diff --git a/app/streaming/video/ffmpeg-renderers/drm.cpp b/app/streaming/video/ffmpeg-renderers/drm.cpp index 85777764..cbc84917 100644 --- a/app/streaming/video/ffmpeg-renderers/drm.cpp +++ b/app/streaming/video/ffmpeg-renderers/drm.cpp @@ -26,8 +26,9 @@ extern "C" { #include -DrmRenderer::DrmRenderer() - : m_HwContext(nullptr), +DrmRenderer::DrmRenderer(IFFmpegRenderer *backendRenderer) + : m_BackendRenderer(backendRenderer), + m_HwContext(nullptr), m_DrmFd(-1), m_SdlOwnsDrmFd(false), m_SupportsDirectRendering(false), @@ -186,7 +187,10 @@ bool DrmRenderer::initialize(PDECODER_PARAMETERS params) // stuff, since we have EGLRenderer and SDLRenderer that we can use // for indirect rendering. Our FFmpeg renderer selection code will // handle the case where those also fail to render the test frame. - const bool DIRECT_RENDERING_INIT_FAILED = true; + // If we are just acting as a frontend renderer (m_BackendRenderer + // == nullptr), we want to fail if we can't render directly since + // that's the whole point it's trying to use us for. + const bool DIRECT_RENDERING_INIT_FAILED = (m_BackendRenderer == nullptr); // If we're not sharing the DRM FD with SDL, that means we don't // have DRM master, so we can't call drmModeSetPlane(). We can @@ -402,10 +406,26 @@ bool DrmRenderer::initialize(PDECODER_PARAMETERS params) return true; } -enum AVPixelFormat DrmRenderer::getPreferredPixelFormat(int) +enum AVPixelFormat DrmRenderer::getPreferredPixelFormat(int videoFormat) { - // DRM PRIME buffers - return AV_PIX_FMT_DRM_PRIME; + // DRM PRIME buffers, or whatever the backend renderer wants + if (m_BackendRenderer != nullptr) { + return m_BackendRenderer->getPreferredPixelFormat(videoFormat); + } + else { + return AV_PIX_FMT_DRM_PRIME; + } +} + +bool DrmRenderer::isPixelFormatSupported(int videoFormat, AVPixelFormat pixelFormat) { + // Pass through the backend renderer if we have one. Otherwise we use + // the default behavior which only supports the preferred format. + if (m_BackendRenderer != nullptr) { + return m_BackendRenderer->isPixelFormatSupported(videoFormat, pixelFormat); + } + else { + return getPreferredPixelFormat(videoFormat); + } } int DrmRenderer::getRendererAttributes() @@ -447,14 +467,30 @@ void DrmRenderer::setHdrMode(bool enabled) void DrmRenderer::renderFrame(AVFrame* frame) { + AVDRMFrameDescriptor mappedFrame; + AVDRMFrameDescriptor* drmFrame; + if (frame == nullptr) { // End of stream - nothing to do for us return; } - AVDRMFrameDescriptor* drmFrame = (AVDRMFrameDescriptor*)frame->data[0]; + // If we are acting as the frontend renderer, we'll need to have the backend + // map this frame into a DRM PRIME descriptor that we can render. + if (m_BackendRenderer != nullptr) { + if (!m_BackendRenderer->mapDrmPrimeFrame(frame, &mappedFrame)) { + return; + } + + drmFrame = &mappedFrame; + } + else { + // If we're the backend renderer, the frame should already have it. + SDL_assert(frame->format == AV_PIX_FMT_DRM_PRIME); + drmFrame = (AVDRMFrameDescriptor*)frame->data[0]; + } + int err; - uint32_t primeHandle; uint32_t handles[4] = {}; uint32_t pitches[4] = {}; uint32_t offsets[4] = {}; @@ -470,29 +506,33 @@ void DrmRenderer::renderFrame(AVFrame* frame) StreamUtils::scaleSourceToDestinationSurface(&src, &dst); - // Convert the FD in the AVDRMFrameDescriptor to a PRIME handle - // that can be used in drmModeAddFB2() - SDL_assert(drmFrame->nb_objects == 1); - err = drmPrimeFDToHandle(m_DrmFd, drmFrame->objects[0].fd, &primeHandle); - if (err < 0) { - SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, - "drmPrimeFDToHandle() failed: %d", - errno); - return; - } - - // Pass along the modifiers to DRM if there are some in the descriptor - if (drmFrame->objects[0].format_modifier != DRM_FORMAT_MOD_INVALID) { - flags |= DRM_MODE_FB_MODIFIERS; - } - + // DRM requires composed layers rather than separate layers per plane SDL_assert(drmFrame->nb_layers == 1); - SDL_assert(drmFrame->layers[0].nb_planes == 2); - for (int i = 0; i < drmFrame->layers[0].nb_planes; i++) { - handles[i] = primeHandle; - pitches[i] = drmFrame->layers[0].planes[i].pitch; - offsets[i] = drmFrame->layers[0].planes[i].offset; - modifiers[i] = drmFrame->objects[0].format_modifier; + + const auto &layer = drmFrame->layers[0]; + for (int i = 0; i < layer.nb_planes; i++) { + const auto &object = drmFrame->objects[layer.planes[i].object_index]; + + err = drmPrimeFDToHandle(m_DrmFd, object.fd, &handles[i]); + if (err < 0) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "drmPrimeFDToHandle() failed: %d", + errno); + if (m_BackendRenderer != nullptr) { + SDL_assert(drmFrame == &mappedFrame); + m_BackendRenderer->unmapDrmPrimeFrame(drmFrame); + } + return; + } + + pitches[i] = layer.planes[i].pitch; + offsets[i] = layer.planes[i].offset; + modifiers[i] = object.format_modifier; + + // Pass along the modifiers to DRM if there are some in the descriptor + if (modifiers[i] != DRM_FORMAT_MOD_INVALID) { + flags |= DRM_MODE_FB_MODIFIERS; + } } // Remember the last FB object we created so we can free it @@ -506,6 +546,12 @@ void DrmRenderer::renderFrame(AVFrame* frame) handles, pitches, offsets, (flags & DRM_MODE_FB_MODIFIERS) ? modifiers : NULL, &m_CurrentFbId, flags); + + if (m_BackendRenderer != nullptr) { + SDL_assert(drmFrame == &mappedFrame); + m_BackendRenderer->unmapDrmPrimeFrame(drmFrame); + } + if (err < 0) { SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "drmModeAddFB2WithModifiers() failed: %d", @@ -599,6 +645,27 @@ bool DrmRenderer::needsTestFrame() return true; } +bool DrmRenderer::testRenderFrame(AVFrame* frame) { + // If we have a backend renderer, we must make sure it can + // successfully export DRM PRIME frames. + if (m_BackendRenderer != nullptr) { + AVDRMFrameDescriptor drmDescriptor; + + // We shouldn't get here unless the backend at least claims + // it can export DRM PRIME frames. + SDL_assert(m_BackendRenderer->canExportDrmPrime()); + + if (!m_BackendRenderer->mapDrmPrimeFrame(frame, &drmDescriptor)) { + // It can't, so we can't use this renderer. + return false; + } + + m_BackendRenderer->unmapDrmPrimeFrame(&drmDescriptor); + } + + return true; +} + bool DrmRenderer::isDirectRenderingSupported() { return m_SupportsDirectRendering; diff --git a/app/streaming/video/ffmpeg-renderers/drm.h b/app/streaming/video/ffmpeg-renderers/drm.h index 104175ca..f19f31b5 100644 --- a/app/streaming/video/ffmpeg-renderers/drm.h +++ b/app/streaming/video/ffmpeg-renderers/drm.h @@ -43,14 +43,16 @@ namespace DrmDefs class DrmRenderer : public IFFmpegRenderer { public: - DrmRenderer(); + DrmRenderer(IFFmpegRenderer *backendRenderer = nullptr); virtual ~DrmRenderer() override; virtual bool initialize(PDECODER_PARAMETERS params) override; virtual bool prepareDecoderContext(AVCodecContext* context, AVDictionary** options) override; virtual void renderFrame(AVFrame* frame) override; virtual enum AVPixelFormat getPreferredPixelFormat(int videoFormat) override; + virtual bool isPixelFormatSupported(int videoFormat, AVPixelFormat pixelFormat) override; virtual int getRendererAttributes() override; virtual bool needsTestFrame() override; + virtual bool testRenderFrame(AVFrame* frame) override; virtual bool isDirectRenderingSupported() override; virtual void setHdrMode(bool enabled) override; #ifdef HAVE_EGL @@ -65,6 +67,7 @@ private: const char* getDrmColorEncodingValue(AVFrame* frame); const char* getDrmColorRangeValue(AVFrame* frame); + IFFmpegRenderer* m_BackendRenderer; AVBufferRef* m_HwContext; int m_DrmFd; bool m_SdlOwnsDrmFd; diff --git a/app/streaming/video/ffmpeg-renderers/renderer.h b/app/streaming/video/ffmpeg-renderers/renderer.h index ae746b11..262b9772 100644 --- a/app/streaming/video/ffmpeg-renderers/renderer.h +++ b/app/streaming/video/ffmpeg-renderers/renderer.h @@ -7,6 +7,10 @@ extern "C" { #include + +#ifdef HAVE_DRM +#include +#endif } #ifdef HAVE_EGL @@ -184,4 +188,17 @@ public: // Free the ressources allocated during the last `exportEGLImages` call virtual void freeEGLImages(EGLDisplay, EGLImage[EGL_MAX_PLANES]) {} #endif + +#if HAVE_DRM + // By default we can't do DRM PRIME export + virtual bool canExportDrmPrime() { + return false; + } + + virtual bool mapDrmPrimeFrame(AVFrame*, AVDRMFrameDescriptor*) { + return false; + } + + virtual void unmapDrmPrimeFrame(AVDRMFrameDescriptor*) {} +#endif }; diff --git a/app/streaming/video/ffmpeg-renderers/vaapi.cpp b/app/streaming/video/ffmpeg-renderers/vaapi.cpp index 1c171c38..e2eb526b 100644 --- a/app/streaming/video/ffmpeg-renderers/vaapi.cpp +++ b/app/streaming/video/ffmpeg-renderers/vaapi.cpp @@ -698,3 +698,69 @@ VAAPIRenderer::freeEGLImages(EGLDisplay dpy, EGLImage images[EGL_MAX_PLANES]) { } #endif + +#ifdef HAVE_DRM + +bool VAAPIRenderer::canExportDrmPrime() +{ + // Our DRM renderer requires composed layers + return canExportSurfaceHandle(VA_EXPORT_SURFACE_COMPOSED_LAYERS); +} + +bool VAAPIRenderer::mapDrmPrimeFrame(AVFrame* frame, AVDRMFrameDescriptor* drmDescriptor) +{ + auto hwFrameCtx = (AVHWFramesContext*)frame->hw_frames_ctx->data; + AVVAAPIDeviceContext* vaDeviceContext = (AVVAAPIDeviceContext*)hwFrameCtx->device_ctx->hwctx; + VASurfaceID vaSurfaceId = (VASurfaceID)(uintptr_t)frame->data[3]; + VADRMPRIMESurfaceDescriptor vaDrmPrimeDescriptor; + + VAStatus st = vaExportSurfaceHandle(vaDeviceContext->display, + vaSurfaceId, + VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2, + VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_COMPOSED_LAYERS, + &vaDrmPrimeDescriptor); + if (st != VA_STATUS_SUCCESS) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "vaExportSurfaceHandle() failed: %d", st); + return false; + } + + st = vaSyncSurface(vaDeviceContext->display, vaSurfaceId); + if (st != VA_STATUS_SUCCESS) { + SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, + "vaSyncSurface() failed: %d", st); + for (uint32_t i = 0; i < vaDrmPrimeDescriptor.num_objects; i++) { + close(vaDrmPrimeDescriptor.objects[i].fd); + } + return false; + } + + // Map our VADRMPRIMESurfaceDescriptor to the AVDRMFrameDescriptor our caller wants + drmDescriptor->nb_objects = vaDrmPrimeDescriptor.num_objects; + for (uint32_t i = 0; i < vaDrmPrimeDescriptor.num_objects; i++) { + drmDescriptor->objects[i].fd = vaDrmPrimeDescriptor.objects[i].fd; + drmDescriptor->objects[i].size = vaDrmPrimeDescriptor.objects[i].size; + drmDescriptor->objects[i].format_modifier = vaDrmPrimeDescriptor.objects[i].drm_format_modifier; + } + drmDescriptor->nb_layers = vaDrmPrimeDescriptor.num_layers; + for (uint32_t i = 0; i < vaDrmPrimeDescriptor.num_layers; i++) { + drmDescriptor->layers[i].format = vaDrmPrimeDescriptor.layers[i].drm_format; + drmDescriptor->layers[i].nb_planes = vaDrmPrimeDescriptor.layers[i].num_planes; + for (uint32_t j = 0; j < vaDrmPrimeDescriptor.layers[i].num_planes; j++) { + drmDescriptor->layers[i].planes[j].object_index = vaDrmPrimeDescriptor.layers[i].object_index[j]; + drmDescriptor->layers[i].planes[j].offset = vaDrmPrimeDescriptor.layers[i].offset[j]; + drmDescriptor->layers[i].planes[j].pitch = vaDrmPrimeDescriptor.layers[i].pitch[j]; + } + } + + return true; +} + +void VAAPIRenderer::unmapDrmPrimeFrame(AVDRMFrameDescriptor* drmDescriptor) +{ + for (int i = 0; i < drmDescriptor->nb_objects; i++) { + close(drmDescriptor->objects[i].fd); + } +} + +#endif diff --git a/app/streaming/video/ffmpeg-renderers/vaapi.h b/app/streaming/video/ffmpeg-renderers/vaapi.h index 53e7e21e..b1933fac 100644 --- a/app/streaming/video/ffmpeg-renderers/vaapi.h +++ b/app/streaming/video/ffmpeg-renderers/vaapi.h @@ -50,6 +50,12 @@ public: virtual void freeEGLImages(EGLDisplay dpy, EGLImage[EGL_MAX_PLANES]) override; #endif +#if HAVE_DRM + virtual bool canExportDrmPrime() override; + virtual bool mapDrmPrimeFrame(AVFrame* frame, AVDRMFrameDescriptor* drmDescriptor) override; + virtual void unmapDrmPrimeFrame(AVDRMFrameDescriptor* drmDescriptor) override; +#endif + private: VADisplay openDisplay(SDL_Window* window); bool canExportSurfaceHandle(int layerTypeFlag); diff --git a/app/streaming/video/ffmpeg.cpp b/app/streaming/video/ffmpeg.cpp index aa91f9cd..764695a1 100644 --- a/app/streaming/video/ffmpeg.cpp +++ b/app/streaming/video/ffmpeg.cpp @@ -225,9 +225,24 @@ void FFmpegVideoDecoder::reset() } } -bool FFmpegVideoDecoder::createFrontendRenderer(PDECODER_PARAMETERS params, bool eglOnly) +bool FFmpegVideoDecoder::createFrontendRenderer(PDECODER_PARAMETERS params, bool useAlternateFrontend) { - if (eglOnly) { + if (useAlternateFrontend) { +#ifdef HAVE_DRM + // If we're trying to stream HDR, we need to use the DRM renderer in direct + // rendering mode so it can set the HDR metadata on the display. EGL does + // not currently support this (and even if it did, Mesa and Wayland don't + // currently have protocols to actually get that metadata to the display). + if (params->videoFormat == VIDEO_FORMAT_H265_MAIN10 && m_BackendRenderer->canExportDrmPrime()) { + m_FrontendRenderer = new DrmRenderer(m_BackendRenderer); + if (m_FrontendRenderer->initialize(params)) { + return true; + } + delete m_FrontendRenderer; + m_FrontendRenderer = nullptr; + } +#endif + #ifdef HAVE_EGL if (m_BackendRenderer->canExportEGL()) { m_FrontendRenderer = new EGLRenderer(m_BackendRenderer); @@ -258,13 +273,13 @@ bool FFmpegVideoDecoder::createFrontendRenderer(PDECODER_PARAMETERS params, bool return true; } -bool FFmpegVideoDecoder::completeInitialization(const AVCodec* decoder, PDECODER_PARAMETERS params, bool testFrame, bool eglOnly) +bool FFmpegVideoDecoder::completeInitialization(const AVCodec* decoder, PDECODER_PARAMETERS params, bool testFrame, bool useAlternateFrontend) { // In test-only mode, we should only see test frames SDL_assert(!m_TestOnly || testFrame); // Create the frontend renderer based on the capabilities of the backend renderer - if (!createFrontendRenderer(params, eglOnly)) { + if (!createFrontendRenderer(params, useAlternateFrontend)) { return false; } @@ -632,7 +647,7 @@ bool FFmpegVideoDecoder::tryInitializeRenderer(const AVCodec* decoder, { m_HwDecodeCfg = hwConfig; - // i == 0 - Indirect via EGL frontend with zero-copy DMA-BUF passing + // i == 0 - Indirect via EGL or DRM frontend with zero-copy DMA-BUF passing // i == 1 - Direct rendering or indirect via SDL read-back #ifdef HAVE_EGL for (int i = 0; i < 2; i++) { @@ -642,7 +657,7 @@ bool FFmpegVideoDecoder::tryInitializeRenderer(const AVCodec* decoder, SDL_assert(m_BackendRenderer == nullptr); if ((m_BackendRenderer = createRendererFunc()) != nullptr && m_BackendRenderer->initialize(params) && - completeInitialization(decoder, params, m_TestOnly || m_BackendRenderer->needsTestFrame(), i == 0 /* EGL */)) { + completeInitialization(decoder, params, m_TestOnly || m_BackendRenderer->needsTestFrame(), i == 0 /* EGL/DRM */)) { if (m_TestOnly) { // This decoder is only for testing capabilities, so don't bother // creating a usable renderer @@ -654,7 +669,7 @@ bool FFmpegVideoDecoder::tryInitializeRenderer(const AVCodec* decoder, reset(); if ((m_BackendRenderer = createRendererFunc()) != nullptr && m_BackendRenderer->initialize(params) && - completeInitialization(decoder, params, false, i == 0 /* EGL */)) { + completeInitialization(decoder, params, false, i == 0 /* EGL/DRM */)) { return true; } else { diff --git a/app/streaming/video/ffmpeg.h b/app/streaming/video/ffmpeg.h index ec89b27c..a983569c 100644 --- a/app/streaming/video/ffmpeg.h +++ b/app/streaming/video/ffmpeg.h @@ -29,7 +29,7 @@ public: virtual IFFmpegRenderer* getBackendRenderer(); private: - bool completeInitialization(const AVCodec* decoder, PDECODER_PARAMETERS params, bool testFrame, bool eglOnly); + bool completeInitialization(const AVCodec* decoder, PDECODER_PARAMETERS params, bool testFrame, bool useAlternateFrontend); void stringifyVideoStats(VIDEO_STATS& stats, char* output); @@ -37,7 +37,7 @@ private: void addVideoStats(VIDEO_STATS& src, VIDEO_STATS& dst); - bool createFrontendRenderer(PDECODER_PARAMETERS params, bool eglOnly); + bool createFrontendRenderer(PDECODER_PARAMETERS params, bool useAlternateFrontend); bool tryInitializeRendererForDecoderByName(const char* decoderName, PDECODER_PARAMETERS params);