|
| 1 | +diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp |
| 2 | +index 4378b29..cbd3851 100644 |
| 3 | +--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp |
| 4 | ++++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp |
| 5 | +@@ -160,6 +160,10 @@ MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player) |
| 6 | + , m_totalBytes(0) |
| 7 | + , m_preservesPitch(false) |
| 8 | + , m_lastQuery(-1) |
| 9 | ++#if PLATFORM(BCM_NEXUS) |
| 10 | ++ , m_videoDecoder(nullptr) |
| 11 | ++ , m_audioDecoder(nullptr) |
| 12 | ++#endif |
| 13 | + { |
| 14 | + #if USE(GLIB) |
| 15 | + m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE); |
| 16 | +@@ -312,12 +316,12 @@ void MediaPlayerPrivateGStreamer::commitLoad() |
| 17 | + } |
| 18 | + |
| 19 | + #if PLATFORM(BCM_NEXUS) |
| 20 | +-// utility function for bcm nexus seek functionality |
| 21 | +-static void findDecoders(GstElement *element, GstElement **videoDecoder, GstElement **audioDecoder) |
| 22 | +-{ |
| 23 | +- if (!(videoDecoder || audioDecoder)) |
| 24 | +- return; |
| 25 | +- |
| 26 | ++// Find a decoder based on plugin name. |
| 27 | ++static void findDecoder(GstElement *element, GstElement **decoder, const CString &search) |
| 28 | ++{ |
| 29 | ++ if (!decoder) |
| 30 | ++ return; |
| 31 | ++ |
| 32 | + if (GST_IS_BIN(element)) { |
| 33 | + GstIterator* it = gst_bin_iterate_elements(GST_BIN(element)); |
| 34 | + GValue item = G_VALUE_INIT; |
| 35 | +@@ -327,8 +331,8 @@ static void findDecoders(GstElement *element, GstElement **videoDecoder, GstElem |
| 36 | + case GST_ITERATOR_OK: |
| 37 | + { |
| 38 | + GstElement *next = GST_ELEMENT(g_value_get_object(&item)); |
| 39 | +- findDecoders(next, videoDecoder, audioDecoder); |
| 40 | +- done = (!((videoDecoder && !*videoDecoder) || (audioDecoder && !*audioDecoder))); |
| 41 | ++ findDecoder(next, decoder, search); |
| 42 | ++ done = (!(decoder && !*decoder)); |
| 43 | + g_value_reset (&item); |
| 44 | + break; |
| 45 | + } |
| 46 | +@@ -343,10 +347,9 @@ static void findDecoders(GstElement *element, GstElement **videoDecoder, GstElem |
| 47 | + } |
| 48 | + g_value_unset (&item); |
| 49 | + gst_iterator_free(it); |
| 50 | +- } else if (videoDecoder && (GST_IS_VIDEO_DECODER(element) || g_str_has_suffix(G_OBJECT_TYPE_NAME(G_OBJECT(element)), "VideoDecoder"))) |
| 51 | +- *videoDecoder = element; |
| 52 | +- else if (audioDecoder && (GST_IS_AUDIO_DECODER(element) || g_str_has_suffix(G_OBJECT_TYPE_NAME(G_OBJECT(element)), "AudioDecoder"))) |
| 53 | +- *audioDecoder = element; |
| 54 | ++ } else if (decoder && (g_strstr_len(gst_element_get_name(element), search.length(), search.data()))) { |
| 55 | ++ *decoder = element; |
| 56 | ++ } |
| 57 | + return; |
| 58 | + } |
| 59 | + #endif |
| 60 | +@@ -355,6 +358,7 @@ static void findDecoders(GstElement *element, GstElement **videoDecoder, GstElem |
| 61 | + MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const |
| 62 | + { |
| 63 | + |
| 64 | ++ GST_INFO("CachedPosition %s", toString(m_cachedPosition).utf8().data()); |
| 65 | + if (m_isEndReached) { |
| 66 | + // Position queries on a null pipeline return 0. If we're at |
| 67 | + // the end of the stream the pipeline is null but we want to |
| 68 | +@@ -373,7 +377,6 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const |
| 69 | + if (m_lastQuery > -1 && ((now - m_lastQuery) < 0.01) && m_cachedPosition.isValid()) |
| 70 | + return m_cachedPosition; |
| 71 | + |
| 72 | +- m_lastQuery = now; |
| 73 | + |
| 74 | + // Position is only available if no async state change is going on and the state is either paused or playing. |
| 75 | + gint64 position = GST_CLOCK_TIME_NONE; |
| 76 | +@@ -392,32 +395,91 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const |
| 77 | + // Implement getting pts time from broadcom decoder directly for seek functionality. |
| 78 | + // In some cases one stream (audio or video) is shorter than the other and its position doesn't |
| 79 | + // increase anymore. We need to query both decoders (if available) and choose the highest position. |
| 80 | +- GstElement* videoDecoder = nullptr; |
| 81 | +- GstElement* audioDecoder = nullptr; |
| 82 | + GstClockTime videoPosition = GST_CLOCK_TIME_NONE; |
| 83 | + GstClockTime audioPosition = GST_CLOCK_TIME_NONE; |
| 84 | + |
| 85 | +- findDecoders(m_pipeline.get(), &videoDecoder, &audioDecoder); |
| 86 | +- |
| 87 | +- GST_TRACE("videoDecoder: %s, audioDecoder: %s", videoDecoder ? GST_ELEMENT_NAME(videoDecoder) : "null", audioDecoder ? GST_ELEMENT_NAME(audioDecoder) : "null"); |
| 88 | ++ if (!m_audioDecoder) { |
| 89 | ++ |
| 90 | ++ if (!m_videoDecoder) { |
| 91 | ++ GstElement *videoDecoder = nullptr; |
| 92 | ++ if (!(m_videoDecoder)) { |
| 93 | ++ findDecoder(m_pipeline.get(), &videoDecoder, "brcmvideodecoder"); |
| 94 | ++ } |
| 95 | ++ if (!videoDecoder) { |
| 96 | ++ m_lastQuery = now; |
| 97 | ++ return MediaTime::zeroTime(); |
| 98 | ++ } |
| 99 | ++ m_videoDecoder = videoDecoder; |
| 100 | ++ } |
| 101 | + |
| 102 | +- if (!(videoDecoder || audioDecoder)) |
| 103 | +- return MediaTime::zeroTime(); |
| 104 | +- if (videoDecoder && gst_element_query(videoDecoder, query)) |
| 105 | +- gst_query_parse_position(query, 0, (gint64*)&videoPosition); |
| 106 | +- if (audioDecoder) { |
| 107 | +- g_object_set(audioDecoder, "use-audio-position", true, nullptr); |
| 108 | +- if (gst_element_query(audioDecoder, query)) |
| 109 | ++ if (gst_element_query(m_videoDecoder, query)) { |
| 110 | ++ gst_query_parse_position(query, 0, (gint64*)&videoPosition); |
| 111 | ++ } /*else { |
| 112 | ++ GST_INFO("VideoDecoder is NULL"); |
| 113 | ++ m_videoDecoder = nullptr; |
| 114 | ++ }*/ |
| 115 | ++ |
| 116 | ++ |
| 117 | ++ if (videoPosition == GST_CLOCK_TIME_NONE) |
| 118 | ++ videoPosition = 0; |
| 119 | ++ |
| 120 | ++ |
| 121 | ++ if (!(m_seeking || m_paused)) { |
| 122 | ++ if (m_cachedPosition.isValid() && videoPosition != 0 ) { |
| 123 | ++ if ((static_cast<GstClockTime>(videoPosition) > toGstClockTime(m_cachedPosition)) || m_cachedPosition == MediaTime::zeroTime()) { |
| 124 | ++ // Always video position. |
| 125 | ++ position = videoPosition; |
| 126 | ++ } else if ((static_cast<GstClockTime>(videoPosition) == toGstClockTime(m_cachedPosition)) && |
| 127 | ++ ((m_lastQuery > -1 && (now - m_lastQuery) < 2))) { // TODO: 2 seconds for decision, are there any other ways to switch audio position? |
| 128 | ++ // If the reported position is same for 2 seconds, try audio position. |
| 129 | ++ gst_query_unref(query); |
| 130 | ++ return m_cachedPosition; |
| 131 | ++ } else if (m_cachedPosition == m_seekTime) { |
| 132 | ++ // When seeking is not completed, report video position. |
| 133 | ++ if (videoPosition > 0) |
| 134 | ++ position = videoPosition; |
| 135 | ++ } else { |
| 136 | ++ GST_INFO("Switch to audio position."); |
| 137 | ++ GstElement *audioDecoder = nullptr; |
| 138 | ++ if (!(m_audioDecoder)) { |
| 139 | ++ findDecoder(m_pipeline.get(), &audioDecoder, "brcmaudiodecoder"); |
| 140 | ++ } |
| 141 | ++ if (!audioDecoder) { |
| 142 | ++ m_lastQuery = now; |
| 143 | ++ gst_query_unref(query); |
| 144 | ++ return m_cachedPosition; |
| 145 | ++ } |
| 146 | ++ m_audioDecoder = audioDecoder; |
| 147 | ++ g_object_set(m_audioDecoder, "use-audio-position", true, nullptr); |
| 148 | ++ if (gst_element_query(m_audioDecoder, query)) |
| 149 | ++ gst_query_parse_position(query, 0, (gint64*)&audioPosition); |
| 150 | ++ |
| 151 | ++ if (audioPosition == GST_CLOCK_TIME_NONE) |
| 152 | ++ audioPosition = 0; |
| 153 | ++ |
| 154 | ++ position = audioPosition; |
| 155 | ++ } |
| 156 | ++ } |
| 157 | ++ } |
| 158 | ++ else { |
| 159 | ++ // Report cached position in case of paused or seeking. |
| 160 | ++ position = toGstClockTime(m_cachedPosition); |
| 161 | ++ } |
| 162 | ++ |
| 163 | ++ } else { |
| 164 | ++ if (gst_element_query(m_audioDecoder, query)) { |
| 165 | + gst_query_parse_position(query, 0, (gint64*)&audioPosition); |
| 166 | ++ } /*else { |
| 167 | ++ GST_INFO("AudioDecoder is NULL"); |
| 168 | ++ m_audioDecoder = nullptr; |
| 169 | ++ }*/ |
| 170 | ++ |
| 171 | ++ if (audioPosition == GST_CLOCK_TIME_NONE) |
| 172 | ++ audioPosition = 0; |
| 173 | ++ position = audioPosition; |
| 174 | + } |
| 175 | +- if (videoPosition == GST_CLOCK_TIME_NONE) |
| 176 | +- videoPosition = 0; |
| 177 | +- if (audioPosition == GST_CLOCK_TIME_NONE) |
| 178 | +- audioPosition = 0; |
| 179 | +- |
| 180 | +- GST_TRACE("videoPosition: %" GST_TIME_FORMAT ", audioPosition: %" GST_TIME_FORMAT, GST_TIME_ARGS(videoPosition), GST_TIME_ARGS(audioPosition)); |
| 181 | +- |
| 182 | +- position = max(videoPosition, audioPosition); |
| 183 | ++ |
| 184 | ++ GST_INFO("videoPosition: %" GST_TIME_FORMAT ", audioPosition: %" GST_TIME_FORMAT, GST_TIME_ARGS(videoPosition), GST_TIME_ARGS(audioPosition)); |
| 185 | + #else |
| 186 | + positionElement = m_pipeline.get(); |
| 187 | + #endif |
| 188 | +@@ -425,7 +487,7 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const |
| 189 | + gst_query_parse_position(query, 0, &position); |
| 190 | + gst_query_unref(query); |
| 191 | + |
| 192 | +- GST_DEBUG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position)); |
| 193 | ++ GST_INFO("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position)); |
| 194 | + |
| 195 | + MediaTime playbackPosition = MediaTime::zeroTime(); |
| 196 | + GstClockTime gstreamerPosition = static_cast<GstClockTime>(position); |
| 197 | +@@ -435,6 +497,7 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const |
| 198 | + playbackPosition = m_seekTime; |
| 199 | + |
| 200 | + m_cachedPosition = playbackPosition; |
| 201 | ++ m_lastQuery = now; |
| 202 | + return playbackPosition; |
| 203 | + } |
| 204 | + |
| 205 | +diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h |
| 206 | +index 5490a2d..d8d85bc 100644 |
| 207 | +--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h |
| 208 | ++++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h |
| 209 | +@@ -196,7 +196,10 @@ protected: |
| 210 | + GRefPtr<GstElement> m_source; |
| 211 | + bool m_volumeAndMuteInitialized; |
| 212 | + MediaTime m_previousDuration; |
| 213 | +- |
| 214 | ++#if PLATFORM(BCM_NEXUS) |
| 215 | ++ mutable GstElement *m_audioDecoder; |
| 216 | ++ mutable GstElement *m_videoDecoder; |
| 217 | ++#endif |
| 218 | + static GstSeekFlags hardwareDependantSeekFlags(); |
| 219 | + void readyTimerFired(); |
| 220 | + |
| 221 | +diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp |
| 222 | +index 4ea79d5..375b583 100644 |
| 223 | +--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp |
| 224 | ++++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp |
| 225 | +@@ -272,6 +272,9 @@ MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* pl |
| 226 | + : m_notifier(MainThreadNotifier<MainThreadNotification>::create()) |
| 227 | + , m_player(player) |
| 228 | + , m_fpsSink(nullptr) |
| 229 | ++#if PLATFORM(BCM_NEXUS) |
| 230 | ++ , m_videoBcmSink(nullptr) |
| 231 | ++#endif |
| 232 | + , m_readyState(MediaPlayer::HaveNothing) |
| 233 | + , m_networkState(MediaPlayer::Empty) |
| 234 | + , m_isEndReached(false) |
| 235 | +@@ -1274,10 +1277,14 @@ unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const |
| 236 | + GST_DEBUG("frames decoded: %llu", decodedFrames); |
| 237 | + } |
| 238 | + #if PLATFORM(BCM_NEXUS) |
| 239 | +- GstElement* videoSink = nullptr; |
| 240 | +- videoSink = findVideoSink(m_pipeline.get()); |
| 241 | +- if (videoSink) { |
| 242 | +- g_object_get(videoSink, "frames-rendered", &decodedFrames, nullptr); |
| 243 | ++ |
| 244 | ++ if (!m_videoBcmSink) { |
| 245 | ++ GstElement* videoSink = nullptr; |
| 246 | ++ videoSink = findVideoSink(m_pipeline.get()); |
| 247 | ++ if (videoSink) |
| 248 | ++ m_videoBcmSink = videoSink; |
| 249 | ++ } else { |
| 250 | ++ g_object_get(m_videoBcmSink, "frames-rendered", &decodedFrames, nullptr); |
| 251 | + GST_DEBUG("frames decoded: %llu", decodedFrames); |
| 252 | + } |
| 253 | + #endif |
| 254 | +@@ -1291,10 +1298,13 @@ unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const |
| 255 | + g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr); |
| 256 | + |
| 257 | + #if PLATFORM(BCM_NEXUS) |
| 258 | +- GstElement* videoSink = nullptr; |
| 259 | +- videoSink = findVideoSink(m_pipeline.get()); |
| 260 | +- if (videoSink) { |
| 261 | +- g_object_get(videoSink, "frames-dropped", &framesDropped, nullptr); |
| 262 | ++ if (!m_videoBcmSink) { |
| 263 | ++ GstElement* videoSink = nullptr; |
| 264 | ++ videoSink = findVideoSink(m_pipeline.get()); |
| 265 | ++ if (videoSink) |
| 266 | ++ m_videoBcmSink = videoSink; |
| 267 | ++ } else { |
| 268 | ++ g_object_get(m_videoBcmSink, "frames-dropped", &framesDropped, nullptr); |
| 269 | + GST_DEBUG("frames dropped: %llu", framesDropped); |
| 270 | + } |
| 271 | + #endif |
| 272 | +diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h |
| 273 | +index 9e6fbe9..325beaf 100644 |
| 274 | +--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h |
| 275 | ++++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h |
| 276 | +@@ -240,6 +240,9 @@ protected: |
| 277 | + GRefPtr<GstElement> m_pipeline; |
| 278 | + GRefPtr<GstStreamVolume> m_volumeElement; |
| 279 | + GRefPtr<GstElement> m_videoSink; |
| 280 | ++#if PLATFORM(BCM_NEXUS) |
| 281 | ++ mutable GstElement *m_videoBcmSink; |
| 282 | ++#endif |
| 283 | + GRefPtr<GstElement> m_fpsSink; |
| 284 | + MediaPlayer::ReadyState m_readyState; |
| 285 | + mutable MediaPlayer::NetworkState m_networkState; |
| 286 | +diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp |
| 287 | +index 573020c..2334855 100644 |
| 288 | +--- a/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp |
| 289 | ++++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp |
| 290 | +@@ -626,7 +626,7 @@ void MediaPlayerPrivateGStreamerMSE::updateStates() |
| 291 | + ASSERT_NOT_REACHED(); |
| 292 | + break; |
| 293 | + } |
| 294 | +-#if PLATFORM(BROADCOM) |
| 295 | ++#if PLATFORM(BCM_NEXUS) |
| 296 | + // this code path needs a proper review in case it can be generalized to all platforms. |
| 297 | + bool buffering = !isTimeBuffered(currentMediaTime()) && !playbackPipelineHasFutureData(); |
| 298 | + #else |
| 299 | +@@ -639,16 +639,13 @@ void MediaPlayerPrivateGStreamerMSE::updateStates() |
| 300 | + notifyPlayerOfMute(); |
| 301 | + m_volumeAndMuteInitialized = true; |
| 302 | + } |
| 303 | +- |
| 304 | + #if PLATFORM(BCM_NEXUS) |
| 305 | +- if (!isTimeBuffered(currentMediaTime()) && !playbackPipelineHasFutureData()) { |
| 306 | ++ if (buffering) { |
| 307 | + m_readyState = MediaPlayer::HaveMetadata; |
| 308 | +- } |
| 309 | +- else |
| 310 | ++ GST_DEBUG("[Buffering] set readystate to HaveMetadata"); |
| 311 | ++ } else |
| 312 | + #endif |
| 313 | +- if (!isTimeBuffered(currentMediaTime()) && !playbackPipelineHasFutureData()) { |
| 314 | +- m_readyState = MediaPlayer::HaveMetadata; |
| 315 | +- } else if (!seeking() && !buffering && !m_paused && m_playbackRate) { |
| 316 | ++ if (!seeking() && !buffering && !m_paused && m_playbackRate) { |
| 317 | + GST_DEBUG("[Buffering] Restarting playback."); |
| 318 | + changePipelineState(GST_STATE_PLAYING); |
| 319 | + } |
0 commit comments