diff --git a/app/streaming/video/ffmpeg-renderers/drm.cpp b/app/streaming/video/ffmpeg-renderers/drm.cpp index d6a6c8dd..ca234ff4 100644 --- a/app/streaming/video/ffmpeg-renderers/drm.cpp +++ b/app/streaming/video/ffmpeg-renderers/drm.cpp @@ -202,6 +202,11 @@ int DrmRenderer::getRendererAttributes() void DrmRenderer::renderFrame(AVFrame* frame) { + if (frame == nullptr) { + // End of stream - nothing to do for us + return; + } + AVDRMFrameDescriptor* drmFrame = (AVDRMFrameDescriptor*)frame->data[0]; int err; uint32_t primeHandle; diff --git a/app/streaming/video/ffmpeg-renderers/dxva2.cpp b/app/streaming/video/ffmpeg-renderers/dxva2.cpp index 208eccaa..a490c8c2 100644 --- a/app/streaming/video/ffmpeg-renderers/dxva2.cpp +++ b/app/streaming/video/ffmpeg-renderers/dxva2.cpp @@ -812,6 +812,11 @@ int DXVA2Renderer::getDecoderColorspace() void DXVA2Renderer::renderFrame(AVFrame *frame) { + if (frame == nullptr) { + // End of stream - nothing to do for us + return; + } + IDirect3DSurface9* surface = reinterpret_cast(frame->data[3]); HRESULT hr; diff --git a/app/streaming/video/ffmpeg-renderers/eglvid.cpp b/app/streaming/video/ffmpeg-renderers/eglvid.cpp index d2ab2ddc..9f98281e 100644 --- a/app/streaming/video/ffmpeg-renderers/eglvid.cpp +++ b/app/streaming/video/ffmpeg-renderers/eglvid.cpp @@ -434,6 +434,12 @@ bool EGLRenderer::specialize() { void EGLRenderer::renderFrame(AVFrame* frame) { EGLImage imgs[EGL_MAX_PLANES]; + + if (frame == nullptr) { + // End of stream - nothing to do for us + return; + } + if (frame->hw_frames_ctx != nullptr) { // Find the native read-back format and load the shader if (m_SwPixelFormat == AV_PIX_FMT_NONE) { diff --git a/app/streaming/video/ffmpeg-renderers/mmal.cpp b/app/streaming/video/ffmpeg-renderers/mmal.cpp index cb228e5d..bfea4168 100644 --- a/app/streaming/video/ffmpeg-renderers/mmal.cpp +++ b/app/streaming/video/ffmpeg-renderers/mmal.cpp @@ -147,6 +147,11 @@ bool MmalRenderer::needsTestFrame() void MmalRenderer::renderFrame(AVFrame* frame) { + if (frame == nullptr) { + // End of stream - nothing to do for us + return; + } + MMAL_BUFFER_HEADER_T* buffer = (MMAL_BUFFER_HEADER_T*)frame->data[3]; MMAL_STATUS_T status; diff --git a/app/streaming/video/ffmpeg-renderers/pacer/pacer.cpp b/app/streaming/video/ffmpeg-renderers/pacer/pacer.cpp index 574c5eab..dedcaff2 100644 --- a/app/streaming/video/ffmpeg-renderers/pacer/pacer.cpp +++ b/app/streaming/video/ffmpeg-renderers/pacer/pacer.cpp @@ -45,6 +45,10 @@ Pacer::~Pacer() m_RenderQueueNotEmpty.wakeAll(); SDL_WaitThread(m_RenderThread, nullptr); } + else { + // Send a null AVFrame to indicate end of stream on the main thread + m_VsyncRenderer->renderFrame(nullptr); + } // Delete any remaining unconsumed frames while (!m_RenderQueue.isEmpty()) { @@ -106,6 +110,9 @@ int Pacer::renderThread(void* context) me->renderLastFrameAndUnlock(); } + // Send a null AVFrame to indicate end of stream on the render thread + me->m_VsyncRenderer->renderFrame(nullptr); + return 0; } diff --git a/app/streaming/video/ffmpeg-renderers/sdlvid.cpp b/app/streaming/video/ffmpeg-renderers/sdlvid.cpp index 43e40f5a..ed23b711 100644 --- a/app/streaming/video/ffmpeg-renderers/sdlvid.cpp +++ b/app/streaming/video/ffmpeg-renderers/sdlvid.cpp @@ -259,6 +259,11 @@ void SdlRenderer::renderFrame(AVFrame* frame) int err; AVFrame* swFrame = nullptr; + if (frame == nullptr) { + // End of stream - nothing to do for us + return; + } + if (frame->hw_frames_ctx != nullptr) { // If we are acting as the frontend for a hardware // accelerated decoder, we'll need to read the frame diff --git a/app/streaming/video/ffmpeg-renderers/vaapi.cpp b/app/streaming/video/ffmpeg-renderers/vaapi.cpp index 526c2d81..54d3b697 100644 --- a/app/streaming/video/ffmpeg-renderers/vaapi.cpp +++ b/app/streaming/video/ffmpeg-renderers/vaapi.cpp @@ -391,6 +391,11 @@ int VAAPIRenderer::getDecoderColorspace() void VAAPIRenderer::renderFrame(AVFrame* frame) { + if (frame == nullptr) { + // End of stream - nothing to do for us + return; + } + VASurfaceID surface = (VASurfaceID)(uintptr_t)frame->data[3]; AVHWDeviceContext* deviceContext = (AVHWDeviceContext*)m_HwContext->data; AVVAAPIDeviceContext* vaDeviceContext = (AVVAAPIDeviceContext*)deviceContext->hwctx; diff --git a/app/streaming/video/ffmpeg-renderers/vdpau.cpp b/app/streaming/video/ffmpeg-renderers/vdpau.cpp index 5b05572c..a903d890 100644 --- a/app/streaming/video/ffmpeg-renderers/vdpau.cpp +++ b/app/streaming/video/ffmpeg-renderers/vdpau.cpp @@ -302,6 +302,11 @@ int VDPAURenderer::getDecoderColorspace() void VDPAURenderer::renderFrame(AVFrame* frame) { + if (frame == nullptr) { + // End of stream - nothing to do for us + return; + } + VdpStatus status; VdpVideoSurface videoSurface = (VdpVideoSurface)(uintptr_t)frame->data[3]; diff --git a/app/streaming/video/ffmpeg-renderers/vt.mm b/app/streaming/video/ffmpeg-renderers/vt.mm index c23b758e..c140112e 100644 --- a/app/streaming/video/ffmpeg-renderers/vt.mm +++ b/app/streaming/video/ffmpeg-renderers/vt.mm @@ -158,6 +158,11 @@ public: // Caller frees frame after we return virtual void renderFrame(AVFrame* frame) override { + if (frame == nullptr) { + // End of stream - nothing to do for us + return; + } + OSStatus status; CVPixelBufferRef pixBuf = reinterpret_cast(frame->data[3]);