[prev in list] [next in list] [prev in thread] [next in thread] 

List:       kde-commits
Subject:    [phonon-gstreamer/1.0-porting-for-merge] gstreamer: Unify coding style
From:       Dan_Vrátil <dvratil () redhat ! com>
Date:       2014-08-12 14:39:06
Message-ID: E1XHDEA-00073J-RZ () scm ! kde ! org
[Download RAW message or body]

Git commit 912f09974c18db4e7e3fd66543aa3b5dd7f2b8e6 by Dan Vrátil.
Committed on 12/08/2014 at 09:57.
Pushed by dvratil into branch '1.0-porting-for-merge'.

Unify coding style

Use {} for one-line statements, and remove spaces after glib/gst method names (this
is a glib coding style, which does not mix well with our normal C++ style)

M  +7    -0    gstreamer/abstractrenderer.cpp
M  +10   -6    gstreamer/abstractrenderer.h
M  +8    -6    gstreamer/audiodataoutput.cpp
M  +9    -9    gstreamer/audioeffect.cpp
M  +25   -19   gstreamer/audiooutput.cpp
M  +37   -27   gstreamer/backend.cpp
M  +50   -37   gstreamer/devicemanager.cpp
M  +9    -9    gstreamer/effect.cpp
M  +6    -5    gstreamer/effectmanager.cpp
M  +15   -11   gstreamer/glrenderer.cpp
M  +5    -5    gstreamer/gsthelper.cpp
M  +48   -34   gstreamer/medianode.cpp
M  +56   -36   gstreamer/mediaobject.cpp
M  +46   -32   gstreamer/pipeline.cpp
M  +5    -6    gstreamer/qwidgetvideosink.cpp
M  +6    -3    gstreamer/videographicsobject.cpp
M  +70   -46   gstreamer/videowidget.cpp
M  +13   -11   gstreamer/volumefadereffect.cpp
M  +4    -6    gstreamer/widgetrenderer.cpp
M  +3    -3    gstreamer/x11renderer.cpp

http://commits.kde.org/phonon-gstreamer/912f09974c18db4e7e3fd66543aa3b5dd7f2b8e6

diff --git a/gstreamer/abstractrenderer.cpp b/gstreamer/abstractrenderer.cpp
index 3e885e2..df7e204 100644
--- a/gstreamer/abstractrenderer.cpp
+++ b/gstreamer/abstractrenderer.cpp
@@ -24,6 +24,13 @@ namespace Gstreamer
 {
 
 
+AbstractRenderer::AbstractRenderer(VideoWidget* video)
+        : m_videoWidget(video)
+        , m_videoSink(0)
+{
+}
+
+
 AbstractRenderer::~AbstractRenderer()
 {
     if (m_videoSink) {
diff --git a/gstreamer/abstractrenderer.h b/gstreamer/abstractrenderer.h
index d052d7e..da5c55e 100644
--- a/gstreamer/abstractrenderer.h
+++ b/gstreamer/abstractrenderer.h
@@ -34,17 +34,21 @@ class VideoWidget;
 class AbstractRenderer
 {
 public:
-    AbstractRenderer(VideoWidget *video) :
-          m_videoWidget(video)
-        , m_videoSink(0) { }
+    AbstractRenderer(VideoWidget *video);
     virtual ~AbstractRenderer();
-    virtual GstElement *videoSink() {return m_videoSink;}
+    virtual GstElement *videoSink() {
+        return m_videoSink;
+    }
+
     virtual void aspectRatioChanged(Phonon::VideoWidget::AspectRatio aspectRatio);
     virtual void scaleModeChanged(Phonon::VideoWidget::ScaleMode scaleMode);
     virtual void movieSizeChanged(const QSize &movieSize);
     virtual bool eventFilter(QEvent *) = 0;
-    virtual void handlePaint(QPaintEvent *) {}
-    virtual bool paintsOnWidget() { return true; } // Controls overlays
+    virtual void handlePaint(QPaintEvent *) { }
+
+    virtual bool paintsOnWidget() {  // Controls overlays
+        return true;
+    }
 
 protected:
     VideoWidget *m_videoWidget;
diff --git a/gstreamer/audiodataoutput.cpp b/gstreamer/audiodataoutput.cpp
index 196e525..11ad7b8 100644
--- a/gstreamer/audiodataoutput.cpp
+++ b/gstreamer/audiodataoutput.cpp
@@ -116,13 +116,13 @@ void AudioDataOutput::processBuffer(GstElement*, GstBuffer* \
buffer, GstPad* pad,  
     // Copiend locally to avoid multithead problems
     qint32 dataSize = that->m_dataSize;
-    if (dataSize == 0)
+    if (dataSize == 0) {
         return;
+    }
 
     // determine the number of channels
-    GstStructure *structure;
-    const GstCaps *caps = gst_pad_get_current_caps(GST_PAD(pad));
-    structure = gst_caps_get_structure(caps, 0);
+    GstCaps *caps = gst_pad_get_current_caps(GST_PAD(pad));
+    GstStructure *structure = gst_caps_get_structure(caps, 0);
     gst_structure_get_int(structure, "channels", &that->m_channels);
 
     // Let's get the buffers
@@ -145,8 +145,9 @@ void AudioDataOutput::processBuffer(GstElement*, GstBuffer* \
buffer, GstPad* pad,  }
 
     // I set the number of channels
-    if (that->m_channelBuffers.size() != that->m_channels)
+    if (that->m_channelBuffers.size() != that->m_channels) {
         that->m_channelBuffers.resize(that->m_channels);
+    }
 
     // check how many emits I will perform
     int nBlockToSend = (that->m_pendingData.size() + gstBufferSize) / (dataSize * \
that->m_channels); @@ -174,8 +175,9 @@ void \
AudioDataOutput::processBuffer(GstElement*, GstBuffer* buffer, GstPad* pad,  }
         }
 
-        if (that->m_pendingData.capacity() != dataSize)
+        if (that->m_pendingData.capacity() != dataSize) {
             that->m_pendingData.reserve(dataSize);
+        }
 
         that->m_pendingData.resize(0);
     }
diff --git a/gstreamer/audioeffect.cpp b/gstreamer/audioeffect.cpp
index 2c21cc0..6959d79 100644
--- a/gstreamer/audioeffect.cpp
+++ b/gstreamer/audioeffect.cpp
@@ -51,25 +51,25 @@ GstElement* AudioEffect::createEffectBin()
     GstElement *audioBin = gst_bin_new(NULL);
 
     // We need a queue to handle tee-connections from parent node
-    GstElement *queue= gst_element_factory_make ("queue", NULL);
+    GstElement *queue= gst_element_factory_make("queue", NULL);
     gst_bin_add(GST_BIN(audioBin), queue);
 
-    GstElement *mconv= gst_element_factory_make ("audioconvert", NULL);
+    GstElement *mconv= gst_element_factory_make("audioconvert", NULL);
     gst_bin_add(GST_BIN(audioBin), mconv);
 
-    m_effectElement = gst_element_factory_make (qPrintable(m_effectName), NULL);
+    m_effectElement = gst_element_factory_make(qPrintable(m_effectName), NULL);
     gst_bin_add(GST_BIN(audioBin), m_effectElement);
 
     //Link src pad
-    GstPad *srcPad= gst_element_get_static_pad (m_effectElement, "src");
-    gst_element_add_pad (audioBin, gst_ghost_pad_new ("src", srcPad));
-    gst_object_unref (srcPad);
+    GstPad *srcPad= gst_element_get_static_pad(m_effectElement, "src");
+    gst_element_add_pad(audioBin, gst_ghost_pad_new("src", srcPad));
+    gst_object_unref(srcPad);
 
     //Link sink pad
     gst_element_link_many(queue, mconv, m_effectElement, NULL);
-    GstPad *sinkpad = gst_element_get_static_pad (queue, "sink");
-    gst_element_add_pad (audioBin, gst_ghost_pad_new ("sink", sinkpad));
-    gst_object_unref (sinkpad);
+    GstPad *sinkpad = gst_element_get_static_pad(queue, "sink");
+    gst_element_add_pad(audioBin, gst_ghost_pad_new("sink", sinkpad));
+    gst_object_unref(sinkpad);
     return audioBin;
 }
 
diff --git a/gstreamer/audiooutput.cpp b/gstreamer/audiooutput.cpp
index a6d228a..ebe9251 100644
--- a/gstreamer/audiooutput.cpp
+++ b/gstreamer/audiooutput.cpp
@@ -50,11 +50,11 @@ AudioOutput::AudioOutput(Backend *backend, QObject *parent)
     static int count = 0;
     m_name = "AudioOutput" + QString::number(count++);
 
-    m_audioBin = gst_bin_new (NULL);
-    gst_object_ref (GST_OBJECT (m_audioBin));
-    gst_object_ref_sink (GST_OBJECT (m_audioBin));
+    m_audioBin = gst_bin_new(NULL);
+    gst_object_ref(GST_OBJECT (m_audioBin));
+    gst_object_ref_sink(GST_OBJECT (m_audioBin));
 
-    m_conv = gst_element_factory_make ("audioconvert", NULL);
+    m_conv = gst_element_factory_make("audioconvert", NULL);
 
     // Get category from parent
     Phonon::Category category = Phonon::NoCategory;
@@ -62,9 +62,9 @@ AudioOutput::AudioOutput(Backend *backend, QObject *parent)
         category = audioOutput->category();
 
     m_audioSink = m_backend->deviceManager()->createAudioSink(category);
-    m_volumeElement = gst_element_factory_make ("volume", NULL);
-    GstElement *queue = gst_element_factory_make ("queue", NULL);
-    GstElement *audioresample = gst_element_factory_make ("audioresample", NULL);
+    m_volumeElement = gst_element_factory_make("volume", NULL);
+    GstElement *queue = gst_element_factory_make("queue", NULL);
+    GstElement *audioresample = gst_element_factory_make("audioresample", NULL);
 
     if (queue && m_audioBin && m_conv && audioresample && m_audioSink && \
m_volumeElement) {  gst_bin_add_many(GST_BIN(m_audioBin), queue, m_conv,
@@ -73,9 +73,9 @@ AudioOutput::AudioOutput(Backend *backend, QObject *parent)
         if (gst_element_link_many(queue, m_conv, audioresample, m_volumeElement,
                                   m_audioSink, NULL)) {
             // Add ghost sink for audiobin
-            GstPad *audiopad = gst_element_get_static_pad (queue, "sink");
-            gst_element_add_pad (m_audioBin, gst_ghost_pad_new ("sink", audiopad));
-            gst_object_unref (audiopad);
+            GstPad *audiopad = gst_element_get_static_pad(queue, "sink");
+            gst_element_add_pad (m_audioBin, gst_ghost_pad_new("sink", audiopad));
+            gst_object_unref(audiopad);
             m_isValid = true; // Initialization ok, accept input
         }
     }
@@ -84,8 +84,8 @@ AudioOutput::AudioOutput(Backend *backend, QObject *parent)
 AudioOutput::~AudioOutput()
 {
     if (m_audioBin) {
-        gst_element_set_state (m_audioBin, GST_STATE_NULL);
-        gst_object_unref (m_audioBin);
+        gst_element_set_state(m_audioBin, GST_STATE_NULL);
+        gst_object_unref(m_audioBin);
     }
 }
 
@@ -101,13 +101,15 @@ int AudioOutput::outputDevice() const
 
 void AudioOutput::setVolume(qreal newVolume)
 {
-    if (newVolume > 2.0 )
+    if (newVolume > 2.0) {
         newVolume = 2.0;
-    else if (newVolume < 0.0)
+    } else if (newVolume < 0.0) {
         newVolume = 0.0;
+    }
 
-    if (newVolume == m_volumeLevel)
+    if (newVolume == m_volumeLevel) {
         return;
+    }
 
     m_volumeLevel = newVolume;
 
@@ -140,19 +142,23 @@ bool AudioOutput::setOutputDevice(const AudioOutputDevice \
&newDevice)  }
 
     const QVariant dalProperty = newDevice.property("deviceAccessList");
-    if (!dalProperty.isValid())
+    if (!dalProperty.isValid()) {
         return false;
+    }
     const DeviceAccessList deviceAccessList = dalProperty.value<DeviceAccessList>();
-    if (deviceAccessList.isEmpty())
+    if (deviceAccessList.isEmpty()) {
         return false;
+    }
 
-    if (newDevice.index() == m_device)
+    if (newDevice.index() == m_device) {
         return true;
+    }
 
     if (root()) {
         root()->saveState();
-        if (root()->pipeline()->setState(GST_STATE_READY) == \
GST_STATE_CHANGE_FAILURE) +        if (root()->pipeline()->setState(GST_STATE_READY) \
== GST_STATE_CHANGE_FAILURE) {  return false;
+        }
     }
 
     // Save previous state
diff --git a/gstreamer/backend.cpp b/gstreamer/backend.cpp
index 0f47262..e6f2e01 100644
--- a/gstreamer/backend.cpp
+++ b/gstreamer/backend.cpp
@@ -90,8 +90,10 @@ Backend::Backend(QObject *parent, const QVariantList &)
     GError *err = 0;
     bool wasInit = gst_init_check(&argc, &argv, &err); //init gstreamer: must be \
called before any gst-related functions  
-    if (err)
+    if (err) {
+        error() << err->message;
         g_error_free(err);
+    }
 
 #ifndef QT_NO_PROPERTIES
     setProperty("identifier",     QLatin1String("phonon_gstreamer"));
@@ -103,8 +105,9 @@ Backend::Backend(QObject *parent, const QVariantList &)
 
     // Check if we should enable debug output
     int debugLevel = qgetenv("PHONON_BACKEND_DEBUG").toInt();
-    if (debugLevel > 3) // 3 is maximum
+    if (debugLevel > 3) { // 3 is maximum
         debugLevel = 3;
+    }
     Debug::setMinimumDebugLevel((Debug::DebugLevel)((int) Debug::DEBUG_NONE - 1 - \
debugLevel));  
     if (wasInit) {
@@ -124,10 +127,12 @@ Backend::Backend(QObject *parent, const QVariantList &)
 
 Backend::~Backend()
 {
-    if (GlobalSubtitles::self)
+    if (GlobalSubtitles::self) {
         delete GlobalSubtitles::self;
-    if (GlobalAudioChannels::self)
+    }
+    if (GlobalAudioChannels::self) {
         delete GlobalAudioChannels::self;
+    }
     delete m_effectManager;
     delete m_deviceManager;
     PulseSupport::shutdown();
@@ -204,12 +209,12 @@ bool Backend::checkDependencies(bool retry) const
 {
     bool success = false;
     // Verify that gst-plugins-base is installed
-    GstElementFactory *acFactory = gst_element_factory_find ("audioconvert");
+    GstElementFactory *acFactory = gst_element_factory_find("audioconvert");
     if (acFactory) {
         gst_object_unref(acFactory);
         success = true;
         // Check if gst-plugins-good is installed
-        GstElementFactory *csFactory = gst_element_factory_find ("videobalance");
+        GstElementFactory *csFactory = gst_element_factory_find("videobalance");
         if (csFactory) {
             gst_object_unref(csFactory);
         } else {
@@ -217,7 +222,7 @@ bool Backend::checkDependencies(bool retry) const
                 gst_update_registry();
                 checkDependencies(true);
             }
-            warning() << tr("Warning: You do not seem to have the package \
gstreamer0.10-plugins-good installed.\n" +            warning() << tr("Warning: You \
                do not seem to have the package gstreamer1.0-plugins-good \
                installed.\n"
                             "          Some video features have been disabled.");
         }
     } else {
@@ -243,12 +248,12 @@ QStringList Backend::availableMimeTypes() const
 
     GstElementFactory *mpegFactory;
     // Add mp3 as a separate mime type as people are likely to look for it.
-    if ((mpegFactory = gst_element_factory_find ("ffmpeg")) ||
-        (mpegFactory = gst_element_factory_find ("mad")) ||
-        (mpegFactory = gst_element_factory_find ("flump3dec"))) {
-        availableMimeTypes << QLatin1String("audio/x-mp3");
-        availableMimeTypes << QLatin1String("audio/x-ape");// ape is available from \
                ffmpeg
-        gst_object_unref(GST_OBJECT(mpegFactory));
+    if ((mpegFactory = gst_element_factory_find("ffmpeg")) ||
+        (mpegFactory = gst_element_factory_find("mad")) ||
+        (mpegFactory = gst_element_factory_find("flump3dec"))) {
+          availableMimeTypes << QLatin1String("audio/x-mp3");
+          availableMimeTypes << QLatin1String("audio/x-ape");// ape is available \
from ffmpeg +          gst_object_unref(GST_OBJECT(mpegFactory));
     }
 
     // Iterate over all audio and video decoders and extract mime types from sink \
caps @@ -275,15 +280,15 @@ QStringList Backend::availableMimeTypes() const
             for (; static_templates != NULL ; static_templates = \
                static_templates->next) {
                 GstStaticPadTemplate *padTemplate = (GstStaticPadTemplate *) \
                static_templates->data;
                 if (padTemplate && padTemplate->direction == GST_PAD_SINK) {
-                    GstCaps *caps = gst_static_pad_template_get_caps (padTemplate);
+                    GstCaps *caps = gst_static_pad_template_get_caps(padTemplate);
 
                     if (caps) {
-                        for (unsigned int struct_idx = 0; struct_idx < \
                gst_caps_get_size (caps); struct_idx++) {
-
-                            const GstStructure* capsStruct = gst_caps_get_structure \
                (caps, struct_idx);
-                            QString mime = QString::fromUtf8(gst_structure_get_name \
                (capsStruct));
-                            if (!availableMimeTypes.contains(mime))
+                        for (unsigned int struct_idx = 0; struct_idx < \
gst_caps_get_size(caps); struct_idx++) { +                            const \
GstStructure* capsStruct = gst_caps_get_structure(caps, struct_idx); +                \
QString mime = QString::fromUtf8(gst_structure_get_name(capsStruct)); +               \
if (!availableMimeTypes.contains(mime)) {  availableMimeTypes.append(mime);
+                            }
                         }
                     }
                 }
@@ -291,14 +296,16 @@ QStringList Backend::availableMimeTypes() const
         }
     }
     g_list_free(factoryList);
-    if (availableMimeTypes.contains("audio/x-vorbis")
-        && availableMimeTypes.contains("application/x-ogm-audio")) {
-        if (!availableMimeTypes.contains("audio/x-vorbis+ogg"))
+    if (availableMimeTypes.contains("audio/x-vorbis") && \
availableMimeTypes.contains("application/x-ogm-audio")) { +        if \
(!availableMimeTypes.contains("audio/x-vorbis+ogg")) {  \
                availableMimeTypes.append("audio/x-vorbis+ogg");
-        if (!availableMimeTypes.contains("application/ogg"))  /* *.ogg */
+        }
+        if (!availableMimeTypes.contains("application/ogg")) { /* *.ogg */
             availableMimeTypes.append("application/ogg");
-        if (!availableMimeTypes.contains("audio/ogg")) /* *.oga */
+        }
+        if (!availableMimeTypes.contains("audio/ogg")) { /* *.oga */
             availableMimeTypes.append("audio/ogg");
+        }
     }
     availableMimeTypes.sort();
     return availableMimeTypes;
@@ -311,8 +318,9 @@ QList<int> \
Backend::objectDescriptionIndexes(ObjectDescriptionType type) const  {
     QList<int> list;
 
-    if (!isValid())
+    if (!isValid()) {
         return list;
+    }
 
     switch (type) {
     case Phonon::AudioOutputDeviceType:
@@ -322,8 +330,9 @@ QList<int> \
Backend::objectDescriptionIndexes(ObjectDescriptionType type) const  break;
     case Phonon::EffectType: {
             QList<EffectInfo*> effectList = effectManager()->audioEffects();
-            for (int eff = 0 ; eff < effectList.size() ; ++eff)
+            for (int eff = 0 ; eff < effectList.size() ; ++eff) {
                 list.append(eff);
+            }
             break;
         }
         break;
@@ -346,8 +355,9 @@ QHash<QByteArray, QVariant> \
Backend::objectDescriptionProperties(ObjectDescripti  {
     QHash<QByteArray, QVariant> ret;
 
-    if (!isValid())
+    if (!isValid()) {
         return ret;
+    }
 
     switch (type) {
     case Phonon::AudioOutputDeviceType:
diff --git a/gstreamer/devicemanager.cpp b/gstreamer/devicemanager.cpp
index 0c61d41..fb7c04d 100644
--- a/gstreamer/devicemanager.cpp
+++ b/gstreamer/devicemanager.cpp
@@ -49,7 +49,8 @@ namespace Gstreamer
 
 DeviceInfo::DeviceInfo(DeviceManager *manager, const QByteArray &deviceId,
                        quint16 caps, bool isAdvanced)
-        : m_isAdvanced(isAdvanced), m_capabilities(caps)
+        : m_isAdvanced(isAdvanced)
+        , m_capabilities(caps)
 {
     // Get an unique integer id for each device
     static int deviceCounter = 0;
@@ -81,14 +82,16 @@ DeviceInfo::DeviceInfo(DeviceManager *manager, const QByteArray \
&deviceId,  }
 
     // A default device should never be advanced
-    if (deviceId == "default")
+    if (deviceId == "default") {
         m_isAdvanced = false;
+    }
 }
 
 void DeviceInfo::useGstElement(GstElement *element, const QByteArray &deviceId)
 {
-    if (!element)
+    if (!element) {
         return;
+    }
 
     gchar *deviceName = NULL;
     if (g_object_class_find_property(G_OBJECT_GET_CLASS(element), "device")){
@@ -202,21 +205,21 @@ DeviceManager::~DeviceManager()
 */
 GstElement *DeviceManager::createGNOMEAudioSink(Category category)
 {
-    GstElement *sink = gst_element_factory_make ("gconfaudiosink", NULL);
+    GstElement *sink = gst_element_factory_make("gconfaudiosink", NULL);
 
     if (sink) {
 
         // set profile property on the gconfaudiosink to "music and movies"
-        if (g_object_class_find_property (G_OBJECT_GET_CLASS (sink), "profile")) {
+        if (g_object_class_find_property(G_OBJECT_GET_CLASS (sink), "profile")) {
             switch (category) {
             case NotificationCategory:
-                g_object_set (G_OBJECT (sink), "profile", 0, NULL); // 0 = 'sounds'
+                g_object_set(G_OBJECT (sink), "profile", 0, NULL); // 0 = 'sounds'
                 break;
             case CommunicationCategory:
-                g_object_set (G_OBJECT (sink), "profile", 2, NULL); // 2 = 'chat'
+                g_object_set(G_OBJECT (sink), "profile", 2, NULL); // 2 = 'chat'
                 break;
             default:
-                g_object_set (G_OBJECT (sink), "profile", 1, NULL); // 1 = 'music \
and movies' +                g_object_set(G_OBJECT (sink), "profile", 1, NULL); // 1 \
= 'music and movies'  break;
             }
         }
@@ -227,11 +230,13 @@ GstElement *DeviceManager::createGNOMEAudioSink(Category \
category)  
 bool DeviceManager::canOpenDevice(GstElement *element) const
 {
-    if (!element)
+    if (!element) {
         return false;
+    }
 
-    if (gst_element_set_state(element, GST_STATE_READY) == GST_STATE_CHANGE_SUCCESS)
+    if (gst_element_set_state(element, GST_STATE_READY) == GST_STATE_CHANGE_SUCCESS) \
{  return true;
+    }
 
     const QList<QByteArray> &list = GstHelper::extractProperties(element, "device");
     foreach (const QByteArray &gstId, list) {
@@ -261,45 +266,44 @@ GstElement *DeviceManager::createAudioSink(Category category)
 {
     GstElement *sink = 0;
 
-    if (m_audioSink == "auto") //this is the default value
-    {
+    if (m_audioSink == "auto") { //this is the default value
         //### TODO : get equivalent KDE settings here
 
         if (!qgetenv("GNOME_DESKTOP_SESSION_ID").isEmpty()) {
             sink = createGNOMEAudioSink(category);
-            if (canOpenDevice(sink))
+            if (canOpenDevice(sink)) {
                 debug() << "AudioOutput using gconf audio sink";
-            else if (sink) {
+            } else if (sink) {
                 gst_object_unref(sink);
                 sink = 0;
             }
         }
 
         if (!sink) {
-            sink = gst_element_factory_make ("alsasink", NULL);
-            if (canOpenDevice(sink))
+            sink = gst_element_factory_make("alsasink", NULL);
+            if (canOpenDevice(sink)) {
                 debug() << "AudioOutput using alsa audio sink";
-            else if (sink) {
+            } else if (sink) {
                 gst_object_unref(sink);
                 sink = 0;
             }
         }
 
         if (!sink) {
-            sink = gst_element_factory_make ("autoaudiosink", NULL);
-            if (canOpenDevice(sink))
+            sink = gst_element_factory_make("autoaudiosink", NULL);
+            if (canOpenDevice(sink)) {
                 debug() << "AudioOutput using auto audio sink";
-            else if (sink) {
+            } else if (sink) {
                 gst_object_unref(sink);
                 sink = 0;
             }
         }
 
         if (!sink) {
-            sink = gst_element_factory_make ("osssink", NULL);
-            if (canOpenDevice(sink))
+            sink = gst_element_factory_make("osssink", NULL);
+            if (canOpenDevice(sink)) {
                 debug() << "AudioOutput using oss audio sink";
-            else if (sink) {
+            } else if (sink) {
                 gst_object_unref(sink);
                 sink = 0;
             }
@@ -307,10 +311,10 @@ GstElement *DeviceManager::createAudioSink(Category category)
     } else if (m_audioSink == "fake") {
         //do nothing as a fakesink will be created by default
     } else if (!m_audioSink.isEmpty()) { //Use a custom sink
-        sink = gst_element_factory_make (m_audioSink, NULL);
-        if (canOpenDevice(sink))
+        sink = gst_element_factory_make(m_audioSink, NULL);
+        if (canOpenDevice(sink)) {
             debug() << "AudioOutput using" << QString::fromUtf8(m_audioSink);
-        else {
+        } else {
             if (sink) {
                 gst_object_unref(sink);
                 sink = 0;
@@ -331,7 +335,7 @@ GstElement *DeviceManager::createAudioSink(Category category)
         if (sink) {
             warning() << "AudioOutput Using fake audio sink";
             //without sync the sink will pull the pipeline as fast as the CPU allows
-            g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);
+            g_object_set(G_OBJECT(sink), "sync", TRUE, NULL);
         }
     }
     Q_ASSERT(sink);
@@ -381,8 +385,9 @@ QList<int> DeviceManager::deviceIds(ObjectDescriptionType type)
 
     QList<int> ids;
     foreach (const DeviceInfo &device, m_devices) {
-        if (device.capabilities() & capability)
+        if (device.capabilities() & capability) {
             ids.append(device.id());
+        }
     }
 
     return ids;
@@ -427,8 +432,9 @@ QHash<QByteArray, QVariant> DeviceManager::deviceProperties(int \
id)  const DeviceInfo *DeviceManager::device(int id) const
 {
     for (int i = 0; i < m_devices.size(); i ++) {
-        if (m_devices[i].id() == id)
+        if (m_devices[i].id() == id) {
             return &m_devices[i];
+        }
     }
 
     return NULL;
@@ -458,12 +464,18 @@ void DeviceManager::updateDeviceList()
         GstElementFactory *factory = gst_element_get_factory(audioSink);
         const gchar *factoryName = \
gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));  QByteArray driver; // \
                means sound system
-        if (!g_strcmp0(factoryName, "alsasink"))       driver = "alsa";
-        if (!g_strcmp0(factoryName, "pulsesink"))      driver = "pulse";
-        if (!g_strcmp0(factoryName, "osssink"))        driver = "oss";
-        if (!g_strcmp0(factoryName, "fakesink"))       driver = "fake";
-        if (driver.isEmpty() && !names.isEmpty())
+        if (g_strcmp0(factoryName, "alsasink") == 0) {
+            driver = "alsa";
+        } else if (g_strcmp0(factoryName, "pulsesink") == 0) {
+            driver = "pulse";
+        } else if (g_strcmp0(factoryName, "osssink") == 0) {
+            driver = "oss";
+        } else if (g_strcmp0(factoryName, "fakesink") == 0) {
+            driver = "fake";
+        }
+        if (driver.isEmpty() && !names.isEmpty()) {
             warning() << "Unknown sound system for device" << names.first();
+        }
 
         for (int i = 0; i < names.size(); ++i) {
             DeviceInfo deviceInfo(this, names[i], DeviceInfo::AudioOutput);
@@ -500,7 +512,7 @@ void DeviceManager::updateDeviceList()
 
     // Search for added devices
     for (int i = 0; i < newDeviceList.count(); ++i) {
-        int id = newDeviceList[i].id();
+        const int id = newDeviceList[i].id();
         if (!listContainsDevice(m_devices, id)) {
             // This is a new device, add it
             m_devices.append(newDeviceList[i]);
@@ -512,7 +524,7 @@ void DeviceManager::updateDeviceList()
 
     // Search for removed devices
     for (int i = m_devices.count() - 1; i >= 0; --i) {
-        int id = m_devices[i].id();
+        const int id = m_devices[i].id();
         if (!listContainsDevice(newDeviceList, id)) {
             debug() << "Lost device" << m_devices[i].name();
 
@@ -525,8 +537,9 @@ void DeviceManager::updateDeviceList()
 bool DeviceManager::listContainsDevice(const QList<DeviceInfo> &list, int id)
 {
     foreach (const DeviceInfo &d, list) {
-        if (d.id() == id)
+        if (d.id() == id) {
             return true;
+        }
     }
     return false;
 }
diff --git a/gstreamer/effect.cpp b/gstreamer/effect.cpp
index e791a1c..14df5d0 100644
--- a/gstreamer/effect.cpp
+++ b/gstreamer/effect.cpp
@@ -30,8 +30,8 @@ namespace Phonon
 namespace Gstreamer
 {
 Effect::Effect(Backend *backend, QObject *parent, NodeDescription description)
-        : QObject(parent),
-        MediaNode(backend, description)
+        : QObject(parent)
+        , MediaNode(backend, description)
         , m_effectBin(0)
         , m_effectElement(0)
 {
@@ -42,8 +42,8 @@ void Effect::init()
     m_effectBin = createEffectBin();
     if (m_effectBin) {
         setupEffectParams();
-        gst_object_ref (GST_OBJECT (m_effectBin)); // Take ownership
-        gst_object_ref_sink (GST_OBJECT (m_effectBin));
+        gst_object_ref(GST_OBJECT(m_effectBin)); // Take ownership
+        gst_object_ref_sink(GST_OBJECT(m_effectBin));
         m_isValid = true;
     }
 }
@@ -58,23 +58,23 @@ Effect::~Effect()
 
 void Effect::setupEffectParams()
 {
-
     Q_ASSERT(m_effectElement);
 
     //query and store parameters
     if (m_effectElement) {
         GParamSpec **property_specs;
         guint propertyCount, i;
-        property_specs = g_object_class_list_properties(G_OBJECT_GET_CLASS \
(m_effectElement), &propertyCount); +        property_specs = \
g_object_class_list_properties(G_OBJECT_GET_CLASS(m_effectElement), &propertyCount);  \
for (i = 0; i < propertyCount; ++i) {  GParamSpec *param = property_specs[i];
             if (param->flags & G_PARAM_WRITABLE) {
-                QString propertyName = g_param_spec_get_name (param);
+                QString propertyName = g_param_spec_get_name(param);
 
                 // These properties should not be exposed to the front-end
-                if (propertyName == "qos" || propertyName == "name" || propertyName \
== "async-handling") +                if (propertyName == "qos" || propertyName == \
"name" || propertyName == "async-handling") {  continue;
- 
+                }
+
                 switch(param->value_type) {
                     case G_TYPE_UINT:
                         m_parameterList.append(Phonon::EffectParameter(i, \
                propertyName,
diff --git a/gstreamer/effectmanager.cpp b/gstreamer/effectmanager.cpp
index eefed06..fa59e8c 100644
--- a/gstreamer/effectmanager.cpp
+++ b/gstreamer/effectmanager.cpp
@@ -36,14 +36,15 @@ EffectInfo::EffectInfo(const QString &name, const QString \
&description,  const QString &author)
         : m_name(name)
         , m_description(description)
-        , m_author(author) {}
+        , m_author(author)
+{
+}
 
 EffectManager::EffectManager(Backend *backend)
         : QObject(backend)
         , m_backend(backend)
 {
-    GList *factoryList =
-            gst_registry_get_feature_list(gst_registry_get (), \
GST_TYPE_ELEMENT_FACTORY); +    GList *factoryList = \
gst_registry_get_feature_list(gst_registry_get(), GST_TYPE_ELEMENT_FACTORY);  
     QString name;
     QString klass;
@@ -81,8 +82,8 @@ EffectManager::EffectManager(Backend *backend)
                  || name == QLatin1String("equalizer-10bands")
                  || name == QLatin1String("speed"))
                 {
-                    description = gst_element_factory_get_description \
                (GST_ELEMENT_FACTORY(feature));
-                    author = gst_element_factory_get_author \
(GST_ELEMENT_FACTORY(feature)); +                    description = \
gst_element_factory_get_description(GST_ELEMENT_FACTORY(feature)); +                  \
                author = \
                gst_element_factory_get_author(GST_ELEMENT_FACTORY(feature));
                     EffectInfo *effect = new EffectInfo(name, description, author);
                     m_audioEffectList.append(effect);
 
diff --git a/gstreamer/glrenderer.cpp b/gstreamer/glrenderer.cpp
index e31adc7..52f1234 100644
--- a/gstreamer/glrenderer.cpp
+++ b/gstreamer/glrenderer.cpp
@@ -48,8 +48,9 @@
 static void frameRendered()
 {
     static QString displayFps = qgetenv("PHONON_GST_FPS");
-    if (displayFps.isEmpty())
+    if (displayFps.isEmpty()) {
         return;
+    }
 
     static int frames = 0;
     static QTime lastTime = QTime::currentTime();
@@ -80,8 +81,8 @@ GLRenderer::GLRenderer(VideoWidget* videoWidget) :
     m_glWindow = new GLRenderWidgetImplementation(videoWidget, format);
 
     if ((m_videoSink = m_glWindow->createVideoSink())) {    //if ((m_videoSink = \
                m_glWindow->createVideoSink())) {
-        gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
-        gst_object_ref_sink (GST_OBJECT (m_videoSink));
+        gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
+        gst_object_ref_sink(GST_OBJECT(m_videoSink));
 
         QWidgetVideoSinkBase*  sink = \
reinterpret_cast<QWidgetVideoSinkBase*>(m_videoSink);  // Let the videosink know \
which widget to direct frame updates to @@ -92,7 +93,7 @@ \
GLRenderer::GLRenderer(VideoWidget* videoWidget) :  GLRenderer::~GLRenderer()
 {
     if (m_videoSink) {
-        gst_object_unref (GST_OBJECT (m_videoSink));
+        gst_object_unref(GST_OBJECT(m_videoSink));
         m_videoSink = 0;
     }
 }
@@ -104,8 +105,7 @@ bool GLRenderer::eventFilter(QEvent * event)
         NewFrameEvent *frameEvent= static_cast <NewFrameEvent *>(event);
         m_glWindow->setNextFrame(frameEvent->frame, frameEvent->width, \
frameEvent->height);  return true;
-    }
-    else if (event->type() == QEvent::Resize) {
+    } else if (event->type() == QEvent::Resize) {
         m_glWindow->setGeometry(m_videoWidget->geometry());
         return true;
     }
@@ -114,22 +114,25 @@ bool GLRenderer::eventFilter(QEvent * event)
 
 GstElement* GLRenderWidgetImplementation::createVideoSink()
 {
-    if (hasYUVSupport())
+    if (hasYUVSupport()) {
         return GST_ELEMENT(g_object_new(get_type_YUV(), NULL));
+    }
     return 0;
 }
 
 void GLRenderWidgetImplementation::setNextFrame(const QByteArray &array, int w, int \
h)  {
-    if (m_videoWidget->root()->state() == Phonon::LoadingState)
+    if (m_videoWidget->root()->state() == Phonon::LoadingState) {
         return;
+    }
 
     m_frame = QImage();
 
-    if (hasYUVSupport())
+    if (hasYUVSupport()) {
         updateTexture(array, w, h);
-    else
+    } else {
         m_frame = QImage((uchar *)array.constData(), w, h, QImage::Format_RGB32);
+    }
 
     m_array = array;
     m_width = w;
@@ -189,8 +192,9 @@ static QImage convertFromYUV(const QByteArray &array, int w, int \
h)  
 const QImage &GLRenderWidgetImplementation::currentFrame() const
 {
-    if (m_frame.isNull() && !m_array.isNull())
+    if (m_frame.isNull() && !m_array.isNull()) {
         m_frame = convertFromYUV(m_array, m_width, m_height);
+    }
 
     return m_frame;
 }
diff --git a/gstreamer/gsthelper.cpp b/gstreamer/gsthelper.cpp
index b00a130..a02ca66 100644
--- a/gstreamer/gsthelper.cpp
+++ b/gstreamer/gsthelper.cpp
@@ -58,7 +58,7 @@ bool GstHelper::setProperty(GstElement *elem, const char \
*propertyName, const QB  Q_ASSERT(elem);
     Q_ASSERT(propertyName && strlen(propertyName));
 
-    if (g_object_class_find_property (G_OBJECT_GET_CLASS(elem), propertyName)) {
+    if (g_object_class_find_property(G_OBJECT_GET_CLASS(elem), propertyName)) {
         g_object_set(G_OBJECT(elem), propertyName, propertyValue.constData(), NULL);
         return true;
     }
@@ -74,9 +74,9 @@ QByteArray GstHelper::property(GstElement *elem, const char \
*propertyName)  Q_ASSERT(propertyName && strlen(propertyName));
     QByteArray retVal;
 
-    if (g_object_class_find_property (G_OBJECT_GET_CLASS(elem), propertyName)) {
+    if (g_object_class_find_property(G_OBJECT_GET_CLASS(elem), propertyName)) {
         gchar *value = NULL;
-        g_object_get (G_OBJECT(elem), propertyName, &value, NULL);
+        g_object_get(G_OBJECT(elem), propertyName, &value, NULL);
         retVal = QByteArray(value);
         g_free (value);
     }
@@ -91,7 +91,7 @@ QByteArray GstHelper::name(GstObject *obj)
     Q_ASSERT(obj);
     QByteArray retVal;
     gchar *value = NULL;
-    if ((value = gst_object_get_name (obj))) {
+    if ((value = gst_object_get_name(obj))) {
         retVal = QByteArray(value);
         g_free (value);
     }
@@ -112,7 +112,7 @@ QString GstHelper::stateName(GstState state)
     case GST_STATE_PLAYING:
         return "playing";
     }
-    return "";
+    return QString();
 }
 
 } //namespace Gstreamer
diff --git a/gstreamer/medianode.cpp b/gstreamer/medianode.cpp
index 25635f7..ddf82eb 100644
--- a/gstreamer/medianode.cpp
+++ b/gstreamer/medianode.cpp
@@ -46,15 +46,15 @@ MediaNode::MediaNode(Backend *backend, NodeDescription \
description) :  if (description & AudioSource) {
         m_audioTee = gst_element_factory_make("tee", NULL);
         Q_ASSERT(m_audioTee); // Must not ever be null.
-        gst_object_ref (GST_OBJECT (m_audioTee));
-        gst_object_ref_sink (GST_OBJECT (m_audioTee));
+        gst_object_ref(GST_OBJECT(m_audioTee));
+        gst_object_ref_sink(GST_OBJECT(m_audioTee));
     }
 
     if (description & VideoSource) {
         m_videoTee = gst_element_factory_make("tee", NULL);
         Q_ASSERT(m_videoTee); // Must not ever be null.
-        gst_object_ref (GST_OBJECT (m_videoTee));
-        gst_object_ref_sink (GST_OBJECT (m_videoTee));
+        gst_object_ref(GST_OBJECT(m_videoTee));
+        gst_object_ref_sink(GST_OBJECT(m_videoTee));
     }
 }
 
@@ -63,11 +63,13 @@ MediaNode::~MediaNode()
     if (m_videoTee) {
         gst_element_set_state(m_videoTee, GST_STATE_NULL);
         gst_object_unref(m_videoTee);
+        m_videoTee = 0;
     }
 
     if (m_audioTee) {
         gst_element_set_state(m_audioTee, GST_STATE_NULL);
         gst_object_unref(m_audioTee);
+        m_audioTee = 0;
     }
 }
 
@@ -83,19 +85,21 @@ bool MediaNode::buildGraph()
 
     if (success) {
         // connect children recursively
-        for (int i=0; i< m_audioSinkList.size(); ++i) {
+        for (int i = 0; i < m_audioSinkList.size(); ++i) {
             if (MediaNode *node = qobject_cast<MediaNode*>(m_audioSinkList[i])) {
                 node->setRoot(root());
-                if (!node->buildGraph())
+                if (!node->buildGraph()) {
                     success = false;
+                }
             }
         }
 
-        for (int i=0; i < m_videoSinkList.size(); ++i) {
+        for (int i = 0; i < m_videoSinkList.size(); ++i) {
             if (MediaNode *node = qobject_cast<MediaNode*>(m_videoSinkList[i])) {
                 node->setRoot(root());
-                if (!node->buildGraph())
+                if (!node->buildGraph()) {
                     success = false;
+                }
             }
         }
     }
@@ -119,17 +123,19 @@ bool MediaNode::breakGraph()
         prepareToUnlink();
         m_finalized = false;
     }
-    for (int i=0; i<m_audioSinkList.size(); ++i) {
+    for (int i = 0; i < m_audioSinkList.size(); ++i) {
         MediaNode *node = qobject_cast<MediaNode*>(m_audioSinkList[i]);
-        if (!node || !node->breakGraph())
+        if (!node || !node->breakGraph()) {
             return false;
+        }
         node->setRoot(0);
     }
 
-    for (int i=0; i <m_videoSinkList.size(); ++i) {
+    for (int i = 0; i < m_videoSinkList.size(); ++i) {
         MediaNode *node = qobject_cast<MediaNode*>(m_videoSinkList[i]);
-        if (!node || !node->breakGraph())
+        if (!node || !node->breakGraph()) {
             return false;
+        }
         node->setRoot(0);
     }
     unlink();
@@ -192,8 +198,9 @@ bool MediaNode::disconnectNode(QObject *obj)
                 gst_element_release_request_pad(m_audioTee, requestedPad);
                 gst_object_unref(requestedPad);
             }
-            if (GST_ELEMENT_PARENT(sink->audioElement()))
+            if (GST_ELEMENT_PARENT(sink->audioElement())) {
                 gst_bin_remove(GST_BIN(root()->audioGraph()), sink->audioElement());
+            }
             gst_object_unref(sinkPad);
         }
 
@@ -205,8 +212,9 @@ bool MediaNode::disconnectNode(QObject *obj)
                 gst_element_release_request_pad(m_videoTee, requestedPad);
                 gst_object_unref(requestedPad);
             }
-            if (GST_ELEMENT_PARENT(sink->videoElement()))
+            if (GST_ELEMENT_PARENT(sink->videoElement())) {
                 gst_bin_remove(GST_BIN(root()->videoGraph()), sink->videoElement());
+            }
             gst_object_unref(sinkPad);
         }
 
@@ -240,35 +248,37 @@ bool MediaNode::addOutput(MediaNode *output, GstElement *tee)
     bool success = true;
 
     GstElement *sinkElement = 0;
-    if (output->description() & AudioSink)
+    if (output->description() & AudioSink) {
         sinkElement = output->audioElement();
-    else if (output->description() & VideoSink)
+    } else if (output->description() & VideoSink) {
         sinkElement = output->videoElement();
+    }
 
     Q_ASSERT(sinkElement);
 
-    if (!sinkElement)
+    if (!sinkElement) {
         return false;
+    }
 
     GstState state = root()->pipeline()->state();
-    GstPad *srcPad;
-    GstPadTemplate* tee_src_pad_template = gst_element_class_get_pad_template \
                (GST_ELEMENT_GET_CLASS (tee), "src_%u");
-    srcPad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);
-    GstPad *sinkPad = gst_element_get_static_pad (sinkElement, "sink");
+    GstPadTemplate* tee_src_pad_template = \
gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS (tee), "src_%u"); +    \
GstPad *srcPad = gst_element_request_pad(tee, tee_src_pad_template, NULL, NULL); +    \
GstPad *sinkPad = gst_element_get_static_pad(sinkElement, "sink");  
     if (!sinkPad) {
         success = false;
     } else if (gst_pad_is_linked(sinkPad)) {
-        gst_object_unref (GST_OBJECT (sinkPad));
-        gst_object_unref (GST_OBJECT (srcPad));
+        gst_object_unref(GST_OBJECT(sinkPad));
+        gst_object_unref(GST_OBJECT(srcPad));
         return true;
     }
 
     if (success) {
-        if (output->description() & AudioSink)
+        if (output->description() & AudioSink) {
             gst_bin_add(GST_BIN(root()->audioGraph()), sinkElement);
-        else if (output->description() & VideoSink)
+        } else if (output->description() & VideoSink) {
             gst_bin_add(GST_BIN(root()->videoGraph()), sinkElement);
+        }
     }
 
     if (success) {
@@ -278,8 +288,8 @@ bool MediaNode::addOutput(MediaNode *output, GstElement *tee)
         gst_element_release_request_pad(tee, srcPad);
     }
 
-    gst_object_unref (GST_OBJECT (srcPad));
-    gst_object_unref (GST_OBJECT (sinkPad));
+    gst_object_unref(GST_OBJECT(srcPad));
+    gst_object_unref(GST_OBJECT(sinkPad));
 
     return success;
 }
@@ -288,15 +298,17 @@ bool MediaNode::linkMediaNodeList(QList<QObject *> &list, \
GstElement *bin, GstEl  {
     if (!GST_ELEMENT_PARENT(tee)) {
         gst_bin_add(GST_BIN(bin), tee);
-        if (!gst_element_link_pads(src, "src", tee, "sink"))
+        if (!gst_element_link_pads(src, "src", tee, "sink")) {
             return false;
+        }
         gst_element_set_state(tee, GST_STATE(bin));
     }
     for (int i = 0 ; i < list.size() ; ++i) {
         QObject *sink = list[i];
         if (MediaNode *output = qobject_cast<MediaNode*>(sink)) {
-            if (!addOutput(output, tee))
+            if (!addOutput(output, tee)) {
                 return false;
+            }
         }
     }
     return true;
@@ -307,14 +319,16 @@ bool MediaNode::link()
     // Rewire everything
     if ((description() & AudioSource)) {
         Q_ASSERT(m_audioTee);
-        if (!linkMediaNodeList(m_audioSinkList, root()->audioGraph(), m_audioTee, \
audioElement())) +        if (!linkMediaNodeList(m_audioSinkList, \
root()->audioGraph(), m_audioTee, audioElement())) {  return false;
+        }
     }
 
     if ((description() & VideoSource)) {
         Q_ASSERT(m_videoTee);
-        if (!linkMediaNodeList(m_videoSinkList, root()->videoGraph(), m_videoTee, \
videoElement())) +        if (!linkMediaNodeList(m_videoSinkList, \
root()->videoGraph(), m_videoTee, videoElement())) {  return false;
+        }
     }
     return true;
 }
@@ -326,8 +340,8 @@ bool MediaNode::unlink()
         if (GST_ELEMENT_PARENT(m_audioTee) == GST_ELEMENT(root()->audioGraph())) {
            gst_element_set_state(m_audioTee, GST_STATE_NULL);
            gst_bin_remove(GST_BIN(root()->audioGraph()), m_audioTee);
-       }
-        for (int i=0; i<m_audioSinkList.size(); ++i) {
+        }
+        for (int i = 0; i < m_audioSinkList.size(); ++i) {
             QObject *audioSink = m_audioSinkList[i];
             if (MediaNode *output = qobject_cast<MediaNode*>(audioSink)) {
                 GstElement *element = output->audioElement();
@@ -342,7 +356,7 @@ bool MediaNode::unlink()
            gst_element_set_state(m_videoTee, GST_STATE_NULL);
            gst_bin_remove(GST_BIN(root()->videoGraph()), m_videoTee);
         }
-        for (int i=0; i <m_videoSinkList.size(); ++i) {
+        for (int i = 0; i < m_videoSinkList.size(); ++i) {
             QObject *videoSink = m_videoSinkList[i];
             if (MediaNode *vw = qobject_cast<MediaNode*>(videoSink)) {
                 GstElement *element = vw->videoElement();
diff --git a/gstreamer/mediaobject.cpp b/gstreamer/mediaobject.cpp
index c823139..e08338b 100644
--- a/gstreamer/mediaobject.cpp
+++ b/gstreamer/mediaobject.cpp
@@ -141,8 +141,9 @@ MediaObject::~MediaObject()
 void MediaObject::saveState()
 {
     //Only first resumeState is respected
-    if (m_resumeState)
+    if (m_resumeState) {
         return;
+    }
 
     if (m_state == Phonon::PlayingState || m_state == Phonon::PausedState) {
         m_resumeState = true;
@@ -193,8 +194,9 @@ qint64 MediaObject::currentTime() const
 //     debug() << m_resumeState;
 //     debug() << state();
 //     debug() << getPipelinePos();
-    if (m_resumeState)
+    if (m_resumeState) {
         return m_oldPos;
+    }
 
     switch (state()) {
     case Phonon::PausedState:
@@ -307,9 +309,11 @@ void MediaObject::changeSubUri(const Mrl &mrl)
         fontDesc = videoWidgetFont.family() + ' ' + \
QString::number(videoWidgetFont.pointSize());  }
     //FIXME: Try to detect common encodings, like libvlc does
-    g_object_set(G_OBJECT(m_pipeline->element()), "suburi", \
mrl.toEncoded().constData(), +    g_object_set(G_OBJECT(m_pipeline->element()),
+        "suburi", mrl.toEncoded().constData(),
         "subtitle-font-desc", customFont.isNull() ? fontDesc.toStdString().c_str() : \
                customFont.constData(),
-        "subtitle-encoding", customEncoding.isNull() ? "UTF-8" : \
customEncoding.constData(), NULL); +        "subtitle-encoding", \
customEncoding.isNull() ? "UTF-8" : customEncoding.constData(), +        NULL);
 }
 
 void MediaObject::autoDetectSubtitle()
@@ -327,7 +331,7 @@ void MediaObject::autoDetectSubtitle()
         absCompleteBaseName.chop(QFileInfo(absCompleteBaseName).suffix().length());
 
         // Looking for a subtitle in the same directory and matching the same name
-        foreach(const QString &ext, exts) {
+        foreach (const QString &ext, exts) {
             if (QFile::exists(absCompleteBaseName + ext)) {
                 changeSubUri(Mrl("file://" + absCompleteBaseName + ext));
                 break;
@@ -348,18 +352,20 @@ void MediaObject::setNextSource(const MediaSource &source)
         // there are no more sources) skip EOS for the current source in order to \
seamlessly  // pass to the next source.
         if (source.type() == Phonon::MediaSource::Invalid ||
-            source.type() == Phonon::MediaSource::Empty)
+            source.type() == Phonon::MediaSource::Empty) {
             m_skippingEOS = false;
-        else
+        } else {
             m_skippingEOS = true;
+        }
 
         m_waitingForNextSource = true;
         m_waitingForPreviousSource = false;
         m_skipGapless = false;
         m_pipeline->setSource(source);
         m_aboutToFinishWait.wakeAll();
-    } else
+    } else {
         qDebug() << "Ignoring source as no aboutToFinish handling is in progress.";
+    }
     m_aboutToFinishLock.unlock();
 }
 
@@ -407,24 +413,26 @@ void MediaObject::getAudioChannelInfo(int stream)
 
     gint channelCount = 0;
     g_object_get(G_OBJECT(m_pipeline->element()), "n-audio", &channelCount, NULL);
-    if (channelCount)
+    if (channelCount) {
         GlobalAudioChannels::instance()->add(this, -1, tr("Default"), "");
+    }
     for (gint i = 0; i < channelCount; ++i) {
         GstTagList *tags = 0;
-        g_signal_emit_by_name (G_OBJECT(m_pipeline->element()), "get-audio-tags",
-                               i, &tags);
+        g_signal_emit_by_name(G_OBJECT(m_pipeline->element()), "get-audio-tags", i, \
&tags);  if (tags) {
             gchar *tagLangCode = 0;
             gchar *tagCodecName = 0;
-            gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &tagCodecName);
-            gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &tagLangCode);
+            gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &tagCodecName);
+            gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &tagLangCode);
             QString name;
-            if (tagLangCode)
+            if (tagLangCode) {
                 name = QLatin1String(tagLangCode);
-            else
+            } else {
                 name = tr("Unknown");
-            if (tagCodecName)
+            }
+            if (tagCodecName) {
                 name = QString("%1 [%2]").arg(name, QLatin1String(tagCodecName));
+            }
             GlobalAudioChannels::instance()->add(this, i, name);
             g_free(tagLangCode);
             g_free(tagCodecName);
@@ -439,21 +447,22 @@ void MediaObject::getSubtitleInfo(int stream)
 
     gint spuCount = 0; // Sub picture units.
     g_object_get(G_OBJECT(m_pipeline->element()), "n-text", &spuCount, NULL);
-    if (spuCount)
+    if (spuCount) {
         GlobalSubtitles::instance()->add(this, -1, tr("Disable"), "");
+    }
     for (gint i = 0; i < spuCount; ++i) {
         GstTagList *tags = 0;
-        g_signal_emit_by_name (G_OBJECT(m_pipeline->element()), "get-text-tags",
-                               i, &tags);
+        g_signal_emit_by_name(G_OBJECT(m_pipeline->element()), "get-text-tags", i, \
&tags);  
         if (tags) {
             gchar *tagLangCode = 0;
-            gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &tagLangCode);
+            gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &tagLangCode);
             QString name;
-            if (tagLangCode)
+            if (tagLangCode) {
                 name = QLatin1String(tagLangCode); // Language code is ISO -> always \
                Latin1
-            else
+            } else {
                 name = tr("Unknown");
+            }
             GlobalSubtitles::instance()->add(this, i, name);
             // tagLangCode was implicat converted to QString, so we can drop
             // the ref.
@@ -466,8 +475,9 @@ void MediaObject::getSubtitleInfo(int stream)
 void MediaObject::setPrefinishMark(qint32 newPrefinishMark)
 {
     m_prefinishMark = newPrefinishMark;
-    if (currentTime() < totalTime() - m_prefinishMark) // not about to finish
+    if (currentTime() < totalTime() - m_prefinishMark) { // not about to finish
         m_prefinishMarkReachedNotEmitted = true;
+    }
 }
 
 void MediaObject::pause()
@@ -577,21 +587,25 @@ void MediaObject::handleStateChange(GstState oldState, GstState \
newState)  prevPhononState = translateState(oldState);
     m_state = translateState(newState);
     debug() << "Moving from" << GstHelper::stateName(oldState) << prevPhononState << \
                "to" << GstHelper::stateName(newState) << m_state;
-    if (GST_STATE_TRANSITION(oldState, newState) == GST_STATE_CHANGE_NULL_TO_READY)
+    if (GST_STATE_TRANSITION(oldState, newState) == GST_STATE_CHANGE_NULL_TO_READY) \
{  loadingComplete();
+    }
     if (GST_STATE_TRANSITION(oldState, newState) == GST_STATE_CHANGE_READY_TO_PAUSED \
&& m_pendingTitle != 0) {  _iface_setCurrentTitle(m_pendingTitle);
     }
-    if (newState == GST_STATE_PLAYING)
+    if (newState == GST_STATE_PLAYING) {
         m_tickTimer->start();
-    else
+    } else {
         m_tickTimer->stop();
+    }
 
-    if (newState == GST_STATE_READY)
+    if (newState == GST_STATE_READY) {
         emit tick(0);
+    }
 
-    if (!m_doingEOS)
+    if (!m_doingEOS) {
         emit stateChanged(m_state, prevPhononState);
+    }
 }
 
 void MediaObject::handleEndOfStream()
@@ -793,8 +807,9 @@ void MediaObject::_iface_setCurrentTitle(int title)
         default:
             break;
     }
-    if (m_currentTitle == m_pendingTitle)
+    if (m_currentTitle == m_pendingTitle) {
         m_pendingTitle = 0;
+    }
 }
 
 QList<SubtitleDescription> MediaObject::_iface_availableSubtitles() const
@@ -829,7 +844,7 @@ void MediaObject::_iface_setCurrentSubtitle(const \
                SubtitleDescription &subtitle)
         const int localIndex = GlobalSubtitles::instance()->localIdFor(this, \
subtitle.index());  int flags;
 
-        g_object_get (G_OBJECT(m_pipeline->element()), "flags", &flags, NULL);
+        g_object_get(G_OBJECT(m_pipeline->element()), "flags", &flags, NULL);
         if (localIndex == -1) {
             flags &= ~GST_PLAY_FLAG_TEXT;
         } else {
@@ -842,14 +857,16 @@ void MediaObject::_iface_setCurrentSubtitle(const \
SubtitleDescription &subtitle)  
 void MediaObject::changeTitle(const QString &format, int title)
 {
-    if ((title < 1) || (title > m_availableTitles))
+    if ((title < 1) || (title > m_availableTitles)) {
         return;
+    }
 
     //let's seek to the beginning of the song
-    GstFormat titleFormat = \
gst_format_get_by_nick(format.toLocal8Bit().constData()); +    GstFormat titleFormat \
= gst_format_get_by_nick(qPrintable(format));  
-    if (!titleFormat)
+    if (!titleFormat) {
         return;
+    }
 
     debug() << Q_FUNC_INFO << format << title;
     if (gst_element_seek_simple(m_pipeline->element(), titleFormat, \
GST_SEEK_FLAG_FLUSH, title - 1)) { @@ -915,8 +932,10 @@ void \
MediaObject::handleAboutToFinish()  debug() << "About to finish";
     m_aboutToFinishLock.lock();
     m_handlingAboutToFinish = true;
-    if (!m_waitingForNextSource)
+    if (!m_waitingForNextSource) {
         emit aboutToFinish();
+    }
+
     // As our signal gets emitted queued we need to wait here until either a
     // new source or a timeout is reached.
     // If we got a new source in time -> hooray + gapless
@@ -937,10 +956,11 @@ void MediaObject::handleAboutToFinish()
         debug() << "total time" << totalTime();
         debug() << "current time" << currentTime();
         debug() << "remaining time" << remainingTime();
-        if (totalTime() <= 0 || (remainingTime() - 500 <= 0))
+        if (totalTime() <= 0 || (remainingTime() - 500 <= 0)) {
             timeout = 0;
-        else
+        } else {
             timeout = remainingTime() - 500;
+        }
 
         debug() << "waiting for" << timeout;
         if (m_aboutToFinishWait.wait(&m_aboutToFinishLock, timeout)) {
diff --git a/gstreamer/pipeline.cpp b/gstreamer/pipeline.cpp
index f704ff3..4b760a1 100644
--- a/gstreamer/pipeline.cpp
+++ b/gstreamer/pipeline.cpp
@@ -75,8 +75,8 @@ Pipeline::Pipeline(QObject *parent)
 
     // Set up audio graph
     m_audioGraph = gst_bin_new("audioGraph");
-    gst_object_ref (GST_OBJECT (m_audioGraph));
-    gst_object_ref_sink (GST_OBJECT (m_audioGraph));
+    gst_object_ref(GST_OBJECT(m_audioGraph));
+    gst_object_ref_sink(GST_OBJECT(m_audioGraph));
 
     // Note that these queues are only required for streaming content
     // And should ideally be created on demand as they will disable
@@ -93,16 +93,16 @@ Pipeline::Pipeline(QObject *parent)
     }
 
     gst_bin_add(GST_BIN(m_audioGraph), m_audioPipe);
-    GstPad *audiopad = gst_element_get_static_pad (m_audioPipe, "sink");
-    gst_element_add_pad (m_audioGraph, gst_ghost_pad_new ("sink", audiopad));
-    gst_object_unref (audiopad);
+    GstPad *audiopad = gst_element_get_static_pad(m_audioPipe, "sink");
+    gst_element_add_pad(m_audioGraph, gst_ghost_pad_new("sink", audiopad));
+    gst_object_unref(audiopad);
 
     g_object_set(m_pipeline, "audio-sink", m_audioGraph, NULL);
 
     // Set up video graph
     m_videoGraph = gst_bin_new("videoGraph");
-    gst_object_ref (GST_OBJECT (m_videoGraph));
-    gst_object_ref_sink (GST_OBJECT (m_videoGraph));
+    gst_object_ref(GST_OBJECT(m_videoGraph));
+    gst_object_ref_sink(GST_OBJECT(m_videoGraph));
 
     m_videoPipe = gst_element_factory_make("queue", "videoPipe");
     gst_bin_add(GST_BIN(m_videoGraph), m_videoPipe);
@@ -160,8 +160,9 @@ void Pipeline::setSource(const Phonon::MediaSource &source, bool \
reset)  case MediaSource::Url:
         case MediaSource::LocalFile:
             gstUri = source.mrl().toEncoded();
-            if(source.mrl().scheme() == QLatin1String("http"))
+            if(source.mrl().scheme() == QLatin1String("http")) {
                 m_isHttpUrl = true;
+            }
             break;
         case MediaSource::Invalid:
             emit errorMessage("Invalid source specified", Phonon::FatalError);
@@ -172,8 +173,9 @@ void Pipeline::setSource(const Phonon::MediaSource &source, bool \
reset)  break;
         case MediaSource::CaptureDevice:
             gstUri = captureDeviceURI(source);
-            if (gstUri.isEmpty())
+            if (gstUri.isEmpty()) {
                 emit errorMessage("Invalid capture device specified", \
Phonon::FatalError); +            }
             break;
         case MediaSource::Disc:
             switch(source.discType()) {
@@ -246,9 +248,9 @@ GstStateChangeReturn Pipeline::setState(GstState state)
 void Pipeline::writeToDot(MediaObject *media, const QString &type)
 {
     GstBin *bin = GST_BIN(m_pipeline);
-    if (media)
+    if (media) {
         debug() << media << "Dumping" << QString("%0.dot").arg(type);
-    else {
+    } else {
         debug() << type;
     }
     GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(bin, GST_DEBUG_GRAPH_SHOW_ALL, \
QString("phonon-%0").arg(type).toUtf8().constData()); @@ -291,8 +293,9 @@ gboolean \
Pipeline::cb_duration(GstBus *bus, GstMessage *gstMessage, gpointer dat  \
Q_UNUSED(bus)  Q_UNUSED(gstMessage)
     Pipeline *that = static_cast<Pipeline*>(data);
-    if (that->m_resetting)
+    if (that->m_resetting) {
         return true;
+    }
 
     emit that->durationChanged(that->totalDuration());
     return true;
@@ -354,14 +357,15 @@ gboolean Pipeline::cb_state(GstBus *bus, GstMessage \
*gstMessage, gpointer data)  // Apparently gstreamer sometimes enters the same state \
                twice.
     // FIXME: Sometimes we enter the same state twice. currently not disallowed by \
the state machine  if (that->m_seeking) {
-        if (GST_STATE_TRANSITION(oldState, newState) == \
GST_STATE_CHANGE_PAUSED_TO_PLAYING) +        if (GST_STATE_TRANSITION(oldState, \
newState) == GST_STATE_CHANGE_PAUSED_TO_PLAYING) {  that->m_seeking = false;
+        }
         return true;
     }
     debug() << "State change";
 
-    transitionName = g_strdup_printf ("%s_%s", gst_element_state_get_name \
                (oldState),
-        gst_element_state_get_name (newState));
+    transitionName = g_strdup_printf("%s_%s", gst_element_state_get_name(oldState),
+                                     gst_element_state_get_name(newState));
     GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (that->m_pipeline), \
GST_DEBUG_GRAPH_SHOW_ALL,  QByteArray("phonon-gstreamer.") + \
QByteArray(transitionName));  g_free(transitionName);
@@ -560,8 +564,9 @@ void foreach_tag_function(const GstTagList *list, const gchar \
*tag, gpointer use  
     QString key = QString(tag).toUpper();
     QString currVal = newData->value(key);
-    if (!value.isEmpty() && !(newData->contains(key) && currVal == value))
+    if (!value.isEmpty() && !(newData->contains(key) && currVal == value)) {
         newData->insert(key, value);
+    }
 }
 
 gboolean Pipeline::cb_tag(GstBus *bus, GstMessage *msg, gpointer data)
@@ -650,23 +655,26 @@ gboolean Pipeline::cb_tag(GstBus *bus, GstMessage *msg, \
gpointer data)  }
                 } else {
                     str = that->m_metaData.value("GENRE");
-                    if (!str.isEmpty())
+                    if (!str.isEmpty()) {
                         that->m_metaData.insert("TITLE", str);
-                    else
+                    } else {
                         that->m_metaData.insert("TITLE", "Streaming Data");
+                    }
                 }
                 if (!that->m_metaData.contains("ARTIST")) {
                     str = that->m_metaData.value("LOCATION");
-                    if (!str.isEmpty())
+                    if (!str.isEmpty()) {
                         that->m_metaData.insert("ARTIST", str);
-                    else
+                    } else {
                         that->m_metaData.insert("ARTIST", "Streaming Data");
+                    }
                 }
                 str = that->m_metaData.value("ORGANIZATION");
-                if (!str.isEmpty())
+                if (!str.isEmpty()) {
                     that->m_metaData.insert("ALBUM", str);
-                else
+                } else {
                     that->m_metaData.insert("ALBUM", "Streaming Data");
+                }
             }
 
             emit that->metaDataChanged(that->m_metaData);
@@ -684,8 +692,9 @@ gboolean Pipeline::cb_streamStart(GstBus *bus, GstMessage *msg, \
gpointer data)  g_object_get(that->m_pipeline, "uri", &uri, NULL);
     debug() << "Stream changed to" << uri;
     g_free(uri);
-    if (!that->m_resetting)
+    if (!that->m_resetting) {
         emit that->streamChanged();
+    }
     return true;
 }
 
@@ -712,8 +721,9 @@ void Pipeline::updateNavigation()
         if (res && gst_navigation_query_parse_commands_length(query, &count)) {
             for(guint i = 0; i < count; ++i) {
                 GstNavigationCommand cmd;
-                if (!gst_navigation_query_parse_commands_nth(query, i, &cmd))
+                if (!gst_navigation_query_parse_commands_nth(query, i, &cmd)) {
                     break;
+                }
                 switch (cmd) {
                 case GST_NAVIGATION_COMMAND_DVD_ROOT_MENU:
                     ret << MediaController::RootMenu;
@@ -754,10 +764,12 @@ QList<MediaController::NavigationMenu> \
Pipeline::availableMenus() const  bool Pipeline::seekToMSec(qint64 time)
 {
     m_posAtReset = time;
-    if (m_resetting)
+    if (m_resetting) {
         return true;
-    if (state() == GST_STATE_PLAYING)
+    }
+    if (state() == GST_STATE_PLAYING) {
         m_seeking = true;
+    }
     return gst_element_seek(GST_ELEMENT(m_pipeline), 1.0, GST_FORMAT_TIME,
                      GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET,
                      time * GST_MSECOND, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
@@ -770,14 +782,14 @@ bool Pipeline::isSeekable() const
     gboolean result;
     gint64 start, stop;
     query = gst_query_new_seeking(GST_FORMAT_TIME);
-    result = gst_element_query (GST_ELEMENT(m_pipeline), query);
+    result = gst_element_query(GST_ELEMENT(m_pipeline), query);
     if (result) {
         GstFormat format;
         gst_query_parse_seeking(query, &format, &seekable, &start, &stop);
     } else {
         //TODO: Log failure
     }
-    gst_query_unref (query);
+    gst_query_unref(query);
     return seekable;
 }
 
@@ -847,13 +859,15 @@ void Pipeline::cb_setupSource(GstElement *playbin, GParamSpec \
*param, gpointer d  if (that->m_isStream) {
         that->m_reader = new StreamReader(that->m_currentSource, that);
         that->m_reader->start();
-        if (that->m_reader->streamSize() > 0)
+        if (that->m_reader->streamSize() > 0) {
             g_object_set(phononSrc, "size", that->m_reader->streamSize(), NULL);
+        }
         int streamType = 0;
-        if (that->m_reader->streamSeekable())
+        if (that->m_reader->streamSeekable()) {
             streamType = GST_APP_STREAM_TYPE_SEEKABLE;
-        else
+        } else {
             streamType = GST_APP_STREAM_TYPE_STREAM;
+        }
         g_object_set(phononSrc, "stream-type", streamType, NULL);
         g_object_set(phononSrc, "block", TRUE, NULL);
         g_signal_connect(phononSrc, "need-data", G_CALLBACK(cb_feedAppSrc), \
that->m_reader); @@ -894,7 +908,7 @@ qint64 Pipeline::position() const
     }
 
     gint64 pos = 0;
-    gst_element_query_position (GST_ELEMENT(m_pipeline), GST_FORMAT_TIME, &pos);
+    gst_element_query_position(GST_ELEMENT(m_pipeline), GST_FORMAT_TIME, &pos);
     return (pos / GST_MSECOND);
 }
 
diff --git a/gstreamer/qwidgetvideosink.cpp b/gstreamer/qwidgetvideosink.cpp
index 19c9fb3..7122235 100644
--- a/gstreamer/qwidgetvideosink.cpp
+++ b/gstreamer/qwidgetvideosink.cpp
@@ -60,7 +60,7 @@ const char* QWidgetVideoSinkClass<VideoFormat_RGB>::get_name()
 template <VideoFormat FMT>
 gboolean QWidgetVideoSink<FMT>::set_caps(GstBaseSink* sink, GstCaps* caps)
 {
-    GstStructure*       data;
+    GstStructure *data;
     QWidgetVideoSink<FMT> *self = G_TYPE_CHECK_INSTANCE_CAST(sink, \
QWidgetVideoSinkClass<FMT>::get_type(), QWidgetVideoSink<FMT>);  
     data = gst_caps_get_structure(caps, 0);
@@ -83,8 +83,7 @@ GstFlowReturn QWidgetVideoSink<FMT>::render(GstBaseSink* sink, \
GstBuffer* buf)  {
     GstFlowReturn rc = GST_FLOW_OK;
 
-    if (buf != 0)
-    {
+    if (buf != 0) {
         QWidgetVideoSink<FMT> *self = G_TYPE_CHECK_INSTANCE_CAST(sink, \
QWidgetVideoSinkClass<FMT>::get_type(), QWidgetVideoSink<FMT>);  QByteArray frame;
         GstMapInfo info;
@@ -94,9 +93,9 @@ GstFlowReturn QWidgetVideoSink<FMT>::render(GstBaseSink* sink, \
GstBuffer* buf)  gst_buffer_unmap(buf, &info);
         NewFrameEvent *frameEvent = new NewFrameEvent(frame, self->width, \
self->height);  QApplication::postEvent(self->renderWidget, frameEvent);
-    }
-    else
+    } else {
         rc = GST_FLOW_ERROR;
+    }
     return rc;
 }
 
@@ -202,7 +201,7 @@ GType QWidgetVideoSinkClass<FMT>::get_type()
         };
 
         type = g_type_register_static(GST_TYPE_VIDEO_SINK,
-                                     QWidgetVideoSinkClass<FMT>::get_name(),
+                                      QWidgetVideoSinkClass<FMT>::get_name(),
                                       &info,
                                       GTypeFlags(0));
     }
diff --git a/gstreamer/videographicsobject.cpp b/gstreamer/videographicsobject.cpp
index 37bb4fc..cee9356 100644
--- a/gstreamer/videographicsobject.cpp
+++ b/gstreamer/videographicsobject.cpp
@@ -73,12 +73,14 @@ VideoGraphicsObject::~VideoGraphicsObject()
 void VideoGraphicsObject::renderCallback(GstBuffer *buffer, void *userData)
 {
     // No data, no pointer to this -> failure
-    if (!buffer || !userData)
+    if (!buffer || !userData) {
         return;
+    }
 
     VideoGraphicsObject *that = static_cast<VideoGraphicsObject *>(userData);
-    if (!that)
+    if (!that) {
         return;
+    }
 
     // Frontend holds lock on data
     if (!that->m_mutex.tryLock()) {
@@ -89,8 +91,9 @@ void VideoGraphicsObject::renderCallback(GstBuffer *buffer, void \
*userData)  // At this point we can do stuff with the data, so we take it over.
     gst_buffer_ref(buffer);
     // Unref the old buffer first...
-    if (that->m_buffer)
+    if (that->m_buffer) {
         gst_buffer_unref(that->m_buffer);
+    }
     that->m_buffer = buffer;
 
     VideoFrame *frame = &that->m_frame;
diff --git a/gstreamer/videowidget.cpp b/gstreamer/videowidget.cpp
index 1023654..3172614 100644
--- a/gstreamer/videowidget.cpp
+++ b/gstreamer/videowidget.cpp
@@ -69,19 +69,21 @@ VideoWidget::VideoWidget(Backend *backend, QWidget *parent) :
 VideoWidget::~VideoWidget()
 {
     if (m_videoBin) {
-        gst_element_set_state (m_videoBin, GST_STATE_NULL);
-        gst_object_unref (m_videoBin);
+        gst_element_set_state(m_videoBin, GST_STATE_NULL);
+        gst_object_unref(m_videoBin);
     }
 
-    if (m_renderer)
+    if (m_renderer) {
         delete m_renderer;
+    }
 }
 
 void VideoWidget::updateWindowID()
 {
     X11Renderer *render = dynamic_cast<X11Renderer*>(m_renderer);
-    if (render)
+    if (render) {
         render->setOverlay();
+    }
 }
 
 void Gstreamer::VideoWidget::syncX()
@@ -114,36 +116,37 @@ void VideoWidget::setupVideoBin()
     GstElement *videoSink = m_renderer->videoSink();
     GstPad *videoPad = gst_element_get_static_pad(videoSink, "sink");
     g_signal_connect(videoPad, "notify::caps", G_CALLBACK(cb_capsChanged), this);
+    gst_object_unref(videoPad);
 
     m_videoBin = gst_bin_new (NULL);
     Q_ASSERT(m_videoBin);
-    gst_object_ref (GST_OBJECT (m_videoBin)); //Take ownership
-    gst_object_ref_sink (GST_OBJECT (m_videoBin));
+    gst_object_ref(GST_OBJECT (m_videoBin)); //Take ownership
+    gst_object_ref_sink(GST_OBJECT (m_videoBin));
     QByteArray tegraEnv = qgetenv("TEGRA_GST_OPENMAX");
     if (tegraEnv.isEmpty()) {
         //The videoplug element is the final element before the pluggable videosink
-        m_videoplug = gst_element_factory_make ("identity", NULL);
+        m_videoplug = gst_element_factory_make("identity", NULL);
 
         //Colorspace ensures that the output of the stream matches the input format \
                accepted by our video sink
-        m_colorspace = gst_element_factory_make ("videoconvert", NULL);
+        m_colorspace = gst_element_factory_make("videoconvert", NULL);
 
         //Video scale is used to prepare the correct aspect ratio and scale.
-        GstElement *videoScale = gst_element_factory_make ("videoscale", NULL);
+        GstElement *videoScale = gst_element_factory_make("videoscale", NULL);
 
         //We need a queue to support the tee from parent node
-        GstElement *queue = gst_element_factory_make ("queue", NULL);
+        GstElement *queue = gst_element_factory_make("queue", NULL);
 
         if (queue && m_videoBin && videoScale && m_colorspace && videoSink && \
m_videoplug) {  //Ensure that the bare essentials are prepared
-            gst_bin_add_many (GST_BIN (m_videoBin), queue, m_colorspace, \
m_videoplug, videoScale, videoSink, NULL); +            gst_bin_add_many(GST_BIN \
(m_videoBin), queue, m_colorspace, m_videoplug, videoScale, videoSink, NULL);  bool \
success = false;  //Video balance controls color/sat/hue in the YUV colorspace
-            m_videoBalance = gst_element_factory_make ("videobalance", NULL);
+            m_videoBalance = gst_element_factory_make("videobalance", NULL);
             if (m_videoBalance) {
                 // For video balance to work we have to first ensure that the video \
                is in YUV colorspace,
                 // then hand it off to the videobalance filter before finally \
                converting it back to RGB.
                 // Hence we nede a videoFilter to convert the colorspace before and \
                after videobalance
-                GstElement *m_colorspace2 = gst_element_factory_make \
("videoconvert", NULL); +                GstElement *m_colorspace2 = \
                gst_element_factory_make("videoconvert", NULL);
                 gst_bin_add_many(GST_BIN(m_videoBin), m_videoBalance, m_colorspace2, \
                NULL);
                 success = gst_element_link_many(queue, m_colorspace, m_videoBalance, \
m_colorspace2, videoScale, m_videoplug, videoSink, NULL);  } else {
@@ -151,25 +154,27 @@ void VideoWidget::setupVideoBin()
                 success = gst_element_link_many(queue, m_colorspace, videoScale, \
m_videoplug, videoSink, NULL);  }
             if (success) {
-                GstPad *videopad = gst_element_get_static_pad (queue, "sink");
-                gst_element_add_pad (m_videoBin, gst_ghost_pad_new ("sink", \
                videopad));
-                gst_object_unref (videopad);
+                GstPad *videopad = gst_element_get_static_pad(queue, "sink");
+                gst_element_add_pad(m_videoBin, gst_ghost_pad_new("sink", \
videopad)); +                gst_object_unref(videopad);
                 QWidget *parentWidget = qobject_cast<QWidget*>(parent());
-                if (parentWidget)
+                if (parentWidget) {
                     parentWidget->winId();  // Due to some existing issues with \
                alien in 4.4,
-                                        //  we must currently force the creation of \
a parent widget. +                                            // we must currently \
force the creation of a parent widget. +                }
                 m_isValid = true; //initialization ok, accept input
             }
         }
     } else {
-        gst_bin_add_many (GST_BIN (m_videoBin), videoSink, NULL);
-        GstPad *videopad = gst_element_get_static_pad (videoSink,"sink");
-        gst_element_add_pad (m_videoBin, gst_ghost_pad_new ("sink", videopad));
-        gst_object_unref (videopad);
+        gst_bin_add_many(GST_BIN(m_videoBin), videoSink, NULL);
+        GstPad *videopad = gst_element_get_static_pad(videoSink,"sink");
+        gst_element_add_pad(m_videoBin, gst_ghost_pad_new("sink", videopad));
+        gst_object_unref(videopad);
         QWidget *parentWidget = qobject_cast<QWidget*>(parent());
-        if (parentWidget)
+        if (parentWidget) {
             parentWidget->winId();  // Due to some existing issues with alien in \
                4.4,
-                                    //  we must currently force the creation of a \
parent widget. +                                    // we must currently force the \
creation of a parent widget. +        }
         m_isValid = true; //initialization ok, accept input
     }
 }
@@ -189,7 +194,7 @@ void VideoWidget::setVisible(bool val) {
         GstElement *videoSink = m_renderer->videoSink();
         Q_ASSERT(videoSink);
 
-        gst_element_set_state (videoSink, GST_STATE_NULL);
+        gst_element_set_state(videoSink, GST_STATE_NULL);
         gst_bin_remove(GST_BIN(m_videoBin), videoSink);
         delete m_renderer;
         m_renderer = 0;
@@ -209,8 +214,9 @@ void VideoWidget::setVisible(bool val) {
 
 bool VideoWidget::event(QEvent *event)
 {
-    if (m_renderer && m_renderer->eventFilter(event))
+    if (m_renderer && m_renderer->eventFilter(event)) {
         return true;
+    }
     return QWidget::event(event);
 }
 
@@ -221,17 +227,19 @@ Phonon::VideoWidget::AspectRatio VideoWidget::aspectRatio() \
const  
 QSize VideoWidget::sizeHint() const
 {
-    if (!m_movieSize.isEmpty())
+    if (!m_movieSize.isEmpty()) {
         return m_movieSize;
-    else
+    } else {
         return QSize(640, 480);
+    }
 }
 
 void VideoWidget::setAspectRatio(Phonon::VideoWidget::AspectRatio aspectRatio)
 {
     m_aspectRatio = aspectRatio;
-    if (m_renderer)
+    if (m_renderer) {
         m_renderer->aspectRatioChanged(aspectRatio);
+    }
 }
 
 Phonon::VideoWidget::ScaleMode VideoWidget::scaleMode() const
@@ -344,10 +352,11 @@ QImage VideoWidget::snapshot() const
             if (ret && width > 0 && height > 0) {
                 QImage snapimage(width, height, QImage::Format_RGB888);
 
-                for (int i = 0; i < height; ++i)
+                for (int i = 0; i < height; ++i) {
                     memcpy(snapimage.scanLine(i),
                            info.data + i * GST_ROUND_UP_4(width * 3),
                            width * 3);
+                }
                 gst_buffer_unmap(snapbuffer, &info);
                 gst_buffer_unref(snapbuffer);
                 return snapimage;
@@ -390,18 +399,21 @@ void VideoWidget::setBrightness(qreal newValue)
 
    newValue = clampedValue(newValue);
 
-    if (newValue == m_brightness)
+    if (newValue == m_brightness) {
         return;
+    }
 
     m_brightness = newValue;
 
     QByteArray tegraEnv = qgetenv("TEGRA_GST_OPENMAX");
     if (tegraEnv.isEmpty()) {
-        if (m_videoBalance)
+        if (m_videoBalance) {
             g_object_set(G_OBJECT(m_videoBalance), "brightness", newValue, NULL); \
//gstreamer range is [-1, 1] +        }
     } else {
-        if (videoSink)
+        if (videoSink) {
             g_object_set(G_OBJECT(videoSink), "brightness", newValue, NULL); \
//gstreamer range is [-1, 1] +        }
     }
 }
 
@@ -418,17 +430,20 @@ void VideoWidget::setContrast(qreal newValue)
 
     newValue = clampedValue(newValue);
 
-    if (newValue == m_contrast)
+    if (newValue == m_contrast) {
         return;
+    }
 
     m_contrast = newValue;
 
     if (tegraEnv.isEmpty()) {
-        if (m_videoBalance)
+        if (m_videoBalance) {
             g_object_set(G_OBJECT(m_videoBalance), "contrast", (newValue + 1.0), \
NULL); //gstreamer range is [0-2] +        }
     } else {
-       if (videoSink)
+       if (videoSink) {
            g_object_set(G_OBJECT(videoSink), "contrast", (newValue + 1.0), NULL); \
//gstreamer range is [0-2] +       }
     }
 }
 
@@ -439,15 +454,17 @@ qreal VideoWidget::hue() const
 
 void VideoWidget::setHue(qreal newValue)
 {
-    if (newValue == m_hue)
+    if (newValue == m_hue) {
         return;
+      }
 
     newValue = clampedValue(newValue);
 
     m_hue = newValue;
 
-    if (m_videoBalance)
+    if (m_videoBalance) {
         g_object_set(G_OBJECT(m_videoBalance), "hue", newValue, NULL); //gstreamer \
range is [-1, 1] +    }
 }
 
 qreal VideoWidget::saturation() const
@@ -462,18 +479,21 @@ void VideoWidget::setSaturation(qreal newValue)
 
     newValue = clampedValue(newValue);
 
-    if (newValue == m_saturation)
+    if (newValue == m_saturation) {
         return;
+    }
 
     m_saturation = newValue;
 
     QByteArray tegraEnv = qgetenv("TEGRA_GST_OPENMAX");
     if (tegraEnv.isEmpty()) {
-        if (m_videoBalance)
+        if (m_videoBalance) {
             g_object_set(G_OBJECT(m_videoBalance), "saturation", newValue + 1.0, \
NULL); //gstreamer range is [0, 2] +        }
     } else {
-        if (videoSink)
+        if (videoSink) {
             g_object_set(G_OBJECT(videoSink), "saturation", newValue + 1.0, NULL); \
//gstreamer range is [0, 2] +        }
     }
 }
 
@@ -481,14 +501,16 @@ void VideoWidget::setSaturation(qreal newValue)
 void VideoWidget::setMovieSize(const QSize &size)
 {
     debug() << "New video size" << size;
-    if (size == m_movieSize)
+    if (size == m_movieSize) {
         return;
+    }
     m_movieSize = size;
     widget()->updateGeometry();
     widget()->update();
 
-    if (m_renderer)
+    if (m_renderer) {
         m_renderer->movieSizeChanged(m_movieSize);
+    }
 }
 
 void VideoWidget::keyPressEvent(QKeyEvent *event)
@@ -575,8 +597,9 @@ void VideoWidget::cb_capsChanged(GstPad *pad, GParamSpec *spec, \
gpointer data)  //value (see Pipeline destructor for example)
     //g_signal_handler_disconnect(pad, media->capsHandler());
     VideoWidget *that = static_cast<VideoWidget*>(data);
-    if (!GST_PAD_IS_LINKED(pad))
+    if (!GST_PAD_IS_LINKED(pad)) {
         return;
+    }
     GstState videoState;
     gst_element_get_state(that->videoElement(), &videoState, NULL, 1000);
 
@@ -594,10 +617,11 @@ void VideoWidget::cb_capsChanged(GstPad *pad, GParamSpec *spec, \
gpointer data)  
 void VideoWidget::mouseOverActive(bool active)
 {
-    if (active)
+    if (active) {
         setCursor(Qt::PointingHandCursor);
-    else
+    } else {
         setCursor(Qt::ArrowCursor);
+    }
 }
 
 }
diff --git a/gstreamer/volumefadereffect.cpp b/gstreamer/volumefadereffect.cpp
index 48af395..40f68c2 100644
--- a/gstreamer/volumefadereffect.cpp
+++ b/gstreamer/volumefadereffect.cpp
@@ -36,9 +36,10 @@ VolumeFaderEffect::VolumeFaderEffect(Backend *backend, QObject \
*parent)  , m_fadeFromVolume(0)
     , m_fadeToVolume(0)
 {
-    m_effectElement = gst_element_factory_make ("volume", NULL);
-    if (m_effectElement)
+    m_effectElement = gst_element_factory_make("volume", NULL);
+    if (m_effectElement) {
         init();
+    }
     m_fadeTimeline = new QTimeLine(1000, this);
     connect(m_fadeTimeline, SIGNAL(valueChanged(qreal)), this, \
SLOT(slotSetVolume(qreal)));  }
@@ -52,31 +53,32 @@ GstElement* VolumeFaderEffect::createEffectBin()
     GstElement *audioBin = gst_bin_new(NULL);
 
     // We need a queue to handle tee-connections from parent node
-    GstElement *queue= gst_element_factory_make ("queue", NULL);
+    GstElement *queue= gst_element_factory_make("queue", NULL);
     gst_bin_add(GST_BIN(audioBin), queue);
 
-    GstElement *mconv= gst_element_factory_make ("audioconvert", NULL);
+    GstElement *mconv= gst_element_factory_make("audioconvert", NULL);
     gst_bin_add(GST_BIN(audioBin), mconv);
     gst_bin_add(GST_BIN(audioBin), m_effectElement);
 
     // Link src pad
-    GstPad *srcPad= gst_element_get_static_pad (m_effectElement, "src");
-    gst_element_add_pad (audioBin, gst_ghost_pad_new ("src", srcPad));
-    gst_object_unref (srcPad);
+    GstPad *srcPad= gst_element_get_static_pad(m_effectElement, "src");
+    gst_element_add_pad(audioBin, gst_ghost_pad_new("src", srcPad));
+    gst_object_unref(srcPad);
 
     // Link sink pad
     gst_element_link_many(queue, mconv, m_effectElement, NULL);
-    GstPad *sinkpad = gst_element_get_static_pad (queue, "sink");
-    gst_element_add_pad (audioBin, gst_ghost_pad_new ("sink", sinkpad));
-    gst_object_unref (sinkpad);
+    GstPad *sinkpad = gst_element_get_static_pad(queue, "sink");
+    gst_element_add_pad(audioBin, gst_ghost_pad_new("sink", sinkpad));
+    gst_object_unref(sinkpad);
     return audioBin;
 }
 
 float VolumeFaderEffect::volume() const
 {
     gdouble val = 1.0;
-    if (m_effectElement)
+    if (m_effectElement) {
         g_object_get(G_OBJECT(m_effectElement), "volume", &val, NULL);
+    }
     return (float)val;
 }
 
diff --git a/gstreamer/widgetrenderer.cpp b/gstreamer/widgetrenderer.cpp
index 1edffc3..0641a41 100644
--- a/gstreamer/widgetrenderer.cpp
+++ b/gstreamer/widgetrenderer.cpp
@@ -40,8 +40,9 @@
 static void frameRendered()
 {
     static QString displayFps = qgetenv("PHONON_GST_FPS");
-    if (displayFps.isEmpty())
+    if (displayFps.isEmpty()) {
         return;
+    }
 
     static int frames = 0;
     static QTime lastTime = QTime::currentTime();
@@ -88,14 +89,11 @@ WidgetRenderer::WidgetRenderer(VideoWidget *videoWidget)
 
 void WidgetRenderer::setNextFrame(const QByteArray &array, int w, int h)
 {
-    if (m_videoWidget->root()->state() == Phonon::LoadingState)
+    if (m_videoWidget->root()->state() == Phonon::LoadingState) {
         return;
-
-    m_frame = QImage();
-    {
-        m_frame = QImage((uchar *)array.constData(), w, h, QImage::Format_RGB32);
     }
 
+    m_frame = QImage((uchar *)array.constData(), w, h, QImage::Format_RGB32);
     m_array = array;
     m_width = w;
     m_height = h;
diff --git a/gstreamer/x11renderer.cpp b/gstreamer/x11renderer.cpp
index 7460247..9c98ceb 100644
--- a/gstreamer/x11renderer.cpp
+++ b/gstreamer/x11renderer.cpp
@@ -81,7 +81,7 @@ X11Renderer::~X11Renderer()
 
 GstElement* X11Renderer::createVideoSink()
 {
-    GstElement *videoSink = gst_element_factory_make ("xvimagesink", NULL);
+    GstElement *videoSink = gst_element_factory_make("xvimagesink", NULL);
     if (videoSink) {
         // Check if the xv sink is usable
         if (gst_element_set_state(videoSink, GST_STATE_READY) != \
GST_STATE_CHANGE_SUCCESS) { @@ -105,8 +105,8 @@ GstElement* \
X11Renderer::createVideoSink()  videoSink = gst_element_factory_make ("ximagesink", \
NULL);  }
 
-    gst_object_ref (GST_OBJECT (videoSink)); //Take ownership
-    gst_object_ref_sink (GST_OBJECT (videoSink));
+    gst_object_ref(GST_OBJECT (videoSink)); //Take ownership
+    gst_object_ref_sink(GST_OBJECT (videoSink));
 
     return videoSink;
 }


[prev in list] [next in list] [prev in thread] [next in thread] 

Configure | About | News | Add a list | Sponsored by KoreLogic