MythTV master
mythopenglvideo.cpp
Go to the documentation of this file.
1// std
2#include <utility>
3
4// Qt
5#include <QPen>
6
7// MythTV
11#include "mythavutil.h"
15#include "tv.h"
16
17// FFmpeg
18extern "C" {
19#include "libavutil/stereo3d.h"
20}
21
22#define LOC QString("GLVid: ")
23// static constexpr int8_t MAX_VIDEO_TEXTURES { 10 }; // YV12 Kernel deinterlacer + 1
24
36 MythVideoBounds* Bounds, const MythVideoProfilePtr& VideoProfile, const QString& Profile)
37 : MythVideoGPU(Render, ColourSpace, Bounds, VideoProfile, Profile),
38 m_openglRender(Render)
39{
41 {
42 LOG(VB_GENERAL, LOG_ERR, LOC + "Fatal error");
43 return;
44 }
45
47 if (m_openglRender->isOpenGLES())
48 m_gles = m_openglRender->format().majorVersion();
49
50 // Set OpenGL feature support
53 m_valid = true;
54
55 m_chromaUpsamplingFilter = gCoreContext->GetBoolSetting("ChromaUpsamplingFilter", true);
56 LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Chroma upsampling filter %1")
57 .arg(m_chromaUpsamplingFilter ? "enabled" : "disabled"));
58}
59
61{
62 if (!m_openglRender)
63 return;
64
67 delete m_toneMap;
69}
70
71void MythOpenGLVideo::ColourSpaceUpdate(bool PrimariesChanged)
72{
74
75 // if input/output type are unset - we haven't created the shaders yet
76 if (PrimariesChanged && (m_outputType != FMT_NONE))
77 {
78 LOG(VB_GENERAL, LOG_INFO, LOC + "Primaries conversion changed - recreating shaders");
80 }
81
82 float colourgamma = m_videoColourSpace->GetColourGamma();
83 float displaygamma = 1.0F / m_videoColourSpace->GetDisplayGamma();
84 QMatrix4x4 primary = m_videoColourSpace->GetPrimaryMatrix();
85 for (size_t i = Progressive; i < ShaderCount; ++i)
86 {
88 m_openglRender->SetShaderProgramParams(m_shaders[i], primary, "m_primaryMatrix");
89 if (m_shaders[i])
90 {
91 m_shaders[i]->setUniformValue("m_colourGamma", colourgamma);
92 m_shaders[i]->setUniformValue("m_displayGamma", displaygamma);
93 }
94 }
95}
96
98{
99 if (m_inputTextureSize.isEmpty())
100 return;
101
103 bool rect = m_textureTarget == QOpenGLTexture::TargetRectangle;
104 GLfloat lineheight = rect ? 1.0F : 1.0F / m_inputTextureSize.height();
105 GLfloat maxheight = rect ? m_videoDispDim.height() : m_videoDispDim.height() /
106 static_cast<GLfloat>(m_inputTextureSize.height());
107 GLfloat fieldsize = rect ? 0.5F : m_inputTextureSize.height() / 2.0F;
108 QVector4D parameters(lineheight, /* lineheight */
109 static_cast<GLfloat>(m_inputTextureSize.width()), /* 'Y' select */
110 maxheight - lineheight, /* maxheight */
111 fieldsize /* fieldsize */);
112
113 for (size_t i = Progressive; i < ShaderCount; ++i)
114 {
115 if (m_shaders[i])
116 {
118 m_shaders[i]->setUniformValue("m_frameData", parameters);
119 if (BicubicUpsize == i)
120 {
121 QVector2D size { rect ? 1.0F : static_cast<GLfloat>(m_videoDim.width()),
122 rect ? 1.0F : static_cast<GLfloat>(m_videoDim.height()) };
123 m_shaders[i]->setUniformValue("m_textureSize", size);
124 }
125 }
126 }
127}
128
130{
134}
135
137{
138 // If switching off/from basic deinterlacing, then we need to delete and
139 // recreate the input textures and sometimes the shaders as well - so start
140 // from scratch
142 {
143 // Note. Textures will be created with linear filtering - which matches
144 // no resizing - which should be the case for the basic deinterlacer - and
145 // the call to SetupFrameFormat will reset resizing anyway
146 LOG(VB_PLAYBACK, LOG_INFO, LOC + "Removing single field textures");
147 // revert to YUY2 if preferred
148 if ((m_inputType == FMT_YV12) && (m_profile == "opengl"))
151 emit OutputChanged(m_videoDim, m_videoDim, -1.0F);
152 }
155 m_deinterlacer2x = false;
156}
157
159 MythDeintType Filter /* = DEINT_SHADER */,
160 bool CreateReferences /* = true */)
161{
162 if (!Frame)
163 return false;
164
165 // do we want an opengl shader?
166 // shaders trump CPU deinterlacers if selected and driver deinterlacers will only
167 // be available under restricted circumstances
168 // N.B. there should in theory be no situation in which shader deinterlacing is not
169 // available for software formats, hence there should be no need to fallback to cpu
170
171 if (!is_interlaced(Scan) || Frame->m_alreadyDeinterlaced)
172 {
174 return false;
175 }
176
177 m_deinterlacer2x = true;
178 MythDeintType deinterlacer = Frame->GetDoubleRateOption(Filter);
179 MythDeintType other = Frame->GetDoubleRateOption(DEINT_DRIVER);
180 if (other) // another double rate deinterlacer is enabled
181 {
183 return false;
184 }
185
186 if (!deinterlacer)
187 {
188 m_deinterlacer2x = false;
189 deinterlacer = Frame->GetSingleRateOption(Filter);
190 other = Frame->GetSingleRateOption(DEINT_DRIVER);
191 if (!deinterlacer || other) // no shader deinterlacer needed
192 {
194 return false;
195 }
196 }
197
198 // if we get this far, we cannot use driver deinterlacers, shader deints
199 // are preferred over cpu, we have a deinterlacer but don't actually care whether
200 // it is single or double rate
201 if (m_deinterlacer == deinterlacer || (m_fallbackDeinterlacer && (m_fallbackDeinterlacer == deinterlacer)))
202 return true;
203
204 // Lock
206
207 // delete old reference textures
210
211 // For basic deinterlacing of software frames, we now create 2 sets of field
212 // based textures - which is the same approach taken by the CPU based onefield/bob
213 // deinterlacer and the EGL basic deinterlacer. The advantages of this
214 // approach are:-
215 // - no dependent texturing in the samplers (it is just a basic YUV to RGB conversion
216 // in the shader)
217 // - better quality (the onefield shader line doubles but does not have the
218 // implicit interpolation/smoothing of using separate textures directly,
219 // which leads to 'blockiness').
220 // - as we are not sampling other fields, there is no need to use an intermediate
221 // framebuffer to ensure accurate sampling - so we can skip the resize stage.
222 //
223 // YUYV formats are currently not supported as it does not work correctly - force YV12 instead.
224
225 if (deinterlacer == DEINT_BASIC && MythVideoFrame::YUVFormat(m_inputType))
226 {
227 if (m_outputType == FMT_YUY2)
228 {
229 LOG(VB_GENERAL, LOG_INFO, LOC + "Forcing OpenGL YV12 for basic deinterlacer");
231 }
233 QSize size(m_videoDim.width(), m_videoDim.height() >> 1);
234 std::vector<QSize> sizes;
235 sizes.emplace_back(size);
236 // N.B. If we are currently resizing, it will be turned off for this
237 // deinterlacer, so the default linear texture filtering is OK.
239 // nextTextures will hold the other field
241 LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created %1 single field textures")
242 .arg(m_inputTextures.size() * 2));
243 // Con MythVideoBounds into display the field only
244 emit OutputChanged(m_videoDim, size, -1.0F);
245 }
246
247 // sanity check max texture units. Should only be an issue on old hardware (e.g. Pi)
249 uint refstocreate = ((deinterlacer == DEINT_HIGH) && CreateReferences) ? 2 : 0;
250 int totaltextures = static_cast<int>(MythVideoFrame::GetNumPlanes(m_outputType)) * static_cast<int>(refstocreate + 1);
251 if (totaltextures > max)
252 {
253 m_fallbackDeinterlacer = deinterlacer;
254 LOG(VB_GENERAL, LOG_WARNING, LOC + QString("Insufficent texture units for deinterlacer '%1' (%2 < %3)")
255 .arg(MythVideoFrame::DeinterlacerName(deinterlacer | DEINT_SHADER, m_deinterlacer2x)).arg(max).arg(totaltextures));
256 deinterlacer = DEINT_BASIC;
257 LOG(VB_GENERAL, LOG_WARNING, LOC + QString("Falling back to '%1'")
259 }
260
261 // create new deinterlacers - the old ones will be deleted
262 if (!(CreateVideoShader(InterlacedBot, deinterlacer) && CreateVideoShader(InterlacedTop, deinterlacer)))
263 return false;
264
265 // create the correct number of reference textures
266 if (refstocreate)
267 {
268 std::vector<QSize> sizes;
269 sizes.emplace_back(m_videoDim);
272 // ensure we use GL_NEAREST if resizing is already active and needed
273 if ((m_resizing & Sampling) == Sampling)
274 {
277 }
278 }
279
280 // ensure they work correctly
281 UpdateColourSpace(false);
283 m_deinterlacer = deinterlacer;
284
285 LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created deinterlacer '%1' (%2->%3)")
289 return true;
290}
291
300{
301 if (!m_openglRender || !(m_features & QOpenGLFunctions::Shaders))
302 return false;
303
304 // delete the old
305 if (m_shaders[Type])
307 m_shaders[Type] = nullptr;
308
309 QStringList defines;
310 QString vertex = DefaultVertexShader;
311 QString fragment;
312 int cost = 1;
313
315 defines << "EXTOES";
316
317 if ((Default == Type) || (BicubicUpsize == Type) || (!MythVideoFrame::YUVFormat(m_outputType)))
318 {
319 QString glsldefines;
320 for (const QString& define : std::as_const(defines))
321 glsldefines += QString("#define MYTHTV_%1\n").arg(define);
322 fragment = glsldefines + YUVFragmentExtensions + ((BicubicUpsize == Type) ? BicubicShader : RGBFragmentShader);
323
324#if CONFIG_MEDIACODEC
326 vertex = MediaCodecVertexShader;
327#endif
328 }
329 // no interlaced shaders yet (i.e. interlaced chroma upsampling - not deinterlacers)
330 else
331 {
332 fragment = YUVFragmentShader;
333 QString extensions = YUVFragmentExtensions;
334 QString glsldefines;
335
336 // Any software frames that are not 8bit need to use unsigned integer
337 // samplers with GLES3.x - which need more modern shaders
339 {
340 static const QString glsl300("#version 300 es\n");
341 fragment = GLSL300YUVFragmentShader;
342 extensions = GLSL300YUVFragmentExtensions;
343 vertex = glsl300 + GLSL300VertexShader;
344 glsldefines.append(glsl300);
345 }
346
347 bool kernel = false;
348 bool topfield = InterlacedTop == Type;
349 bool progressive = (Progressive == Type) || (Deint == DEINT_NONE);
351 {
352 defines << "YV12";
353 cost = 3;
354 }
356 {
357 defines << "NV12";
358 cost = 2;
359 }
360 else if (FMT_YUY2 == m_outputType)
361 {
362 defines << "YUY2";
363 }
364
365#if CONFIG_VIDEOTOOLBOX
366 // N.B. Rectangular texture support is only currently used for VideoToolBox
367 // video frames which are NV12. Do not use rectangular textures for the 'default'
368 // shaders as it breaks video resizing and would require changes to our
369 // FramebufferObject code.
370 if ((m_textureTarget == QOpenGLTexture::TargetRectangle) && (Default != Type))
371 defines << "RECTS";
372#endif
373 if (!progressive)
374 {
375 bool basic = Deint == DEINT_BASIC && MythVideoFrame::YUVFormat(m_inputType);
376 // Chroma upsampling filter
378 m_chromaUpsamplingFilter && !basic)
379 {
380 defines << "CUE";
381 }
382
383 // field
384 if (topfield && !basic)
385 defines << "TOPFIELD";
386
387 switch (Deint)
388 {
389 case DEINT_BASIC:
390 if (!basic)
391 {
392 cost *= 2;
393 defines << "ONEFIELD";
394 }
395 break;
396 case DEINT_MEDIUM: cost *= 5; defines << "LINEARBLEND"; break;
397 case DEINT_HIGH: cost *= 15; defines << "KERNEL"; kernel = true; break;
398 default: break;
399 }
400 }
401
402 // Start building the new fragment shader
403 // We do this in code otherwise the shader string becomes monolithic
404
405 // 'expand' calls to sampleYUV for multiple planes
406 // do this before we add the samplers
407 int count = static_cast<int>(MythVideoFrame::GetNumPlanes(m_outputType));
408 for (int i = (kernel ? 2 : 0); (i >= 0) && count; i--)
409 {
410 QString find = QString("s_texture%1").arg(i);
411 QStringList replacelist;
412 for (int j = (i * count); j < ((i + 1) * count); ++j)
413 replacelist << QString("s_texture%1").arg(j);
414 fragment.replace(find, replacelist.join(", "));
415 }
416
417 // 'expand' calls to the kernel function
418 if (kernel && count)
419 {
420 for (int i = 1 ; i >= 0; i--)
421 {
422 QString find1 = QString("sampler2D kernelTex%1").arg(i);
423 QString find2 = QString("kernelTex%1").arg(i);
424 QStringList replacelist1;
425 QStringList replacelist2;
426 for (int j = 0; j < count; ++j)
427 {
428 replacelist1 << QString("sampler2D kernelTexture%1%2").arg(i).arg(j);
429 replacelist2 << QString("kernelTexture%1%2").arg(i).arg(j);
430 }
431 fragment.replace(find1, replacelist1.join(", "));
432 fragment.replace(find2, replacelist2.join(", "));
433 }
434 }
435
436 // Retrieve colour mappping defines
438
439 // Add defines
440 for (const QString& define : std::as_const(defines))
441 glsldefines += QString("#define MYTHTV_%1\n").arg(define);
442
443 // Add the required samplers
444 int start = 0;
445 int end = count;
446 if (kernel)
447 {
448 end *= 3;
449 if (topfield)
450 start += count;
451 else
452 end -= count;
453 }
454 QString glslsamplers;
455 for (int i = start; i < end; ++i)
456 glslsamplers += QString("uniform sampler2D s_texture%1;\n").arg(i);
457
458 // construct the final shader string
459 fragment = glsldefines + extensions + glslsamplers + fragment;
460 }
461
462 m_shaderCost[Type] = cost;
463 QOpenGLShaderProgram *program = m_openglRender->CreateShaderProgram(vertex, fragment);
464 if (!program)
465 return false;
466
467 m_shaders[Type] = program;
468 return true;
469}
470
472 QSize Size, GLenum TextureTarget)
473{
474 QString texnew { "2D" };
475 if (TextureTarget == QOpenGLTexture::TargetRectangle)
476 texnew = "Rect";
477 else if (TextureTarget == GL_TEXTURE_EXTERNAL_OES)
478 texnew = "OES";
479
480 QString texold { "2D" };
481 if (m_textureTarget == QOpenGLTexture::TargetRectangle)
482 texold = "Rect";
484 texold = "OES";
485
486 LOG(VB_GENERAL, LOG_INFO, LOC +
487 QString("New frame format: %1:%2 %3x%4 (Tex: %5) -> %6:%7 %8x%9 (Tex: %10)")
490 QString::number(m_videoDim.width()),
491 QString::number(m_videoDim.height()),
492 texold,
495 QString::number(Size.width()),
496 QString::number(Size.height()))
497 .arg(texnew));
498
500
501 m_inputType = InputType;
502 m_outputType = OutputType;
503 m_textureTarget = TextureTarget;
504 m_videoDim = Size;
505
506 // This is only currently needed for RGBA32 frames from composed DRM_PRIME
507 // textures that may be half height for simple bob deinterlacing
509 emit OutputChanged(m_videoDim, m_videoDim, -1.0F);
510
511 if (!MythVideoFrame::HardwareFormat(InputType))
512 {
513 std::vector<QSize> sizes;
514 sizes.push_back(Size);
516 if (m_inputTextures.empty())
517 {
518 LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to create input textures");
519 return false;
520 }
521
522 m_inputTextureSize = m_inputTextures[0]->m_totalSize;
523 LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created %1 input textures for '%2'")
524 .arg(m_inputTextures.size()).arg(GetProfile()));
525 }
526 else
527 {
528 m_inputTextureSize = Size;
529 }
530
531 // Create shaders
533 return false;
534
535 UpdateColourSpace(false);
537 return true;
538}
539
541{
542 for (auto & shader : m_shaders)
543 if (shader)
545 m_shaders.fill(nullptr);
546 m_shaderCost.fill(1);
550 m_textureTarget = QOpenGLTexture::Target2D;
554 m_frameBuffer = nullptr;
555 m_frameBufferTexture = nullptr;
556
558}
559
562{
563 if (Frame->m_type == FMT_NONE)
564 return;
565
566 // Hardware frames are retrieved/updated in PrepareFrame but we need to
567 // reset software frames now if necessary
569 {
571 {
572 LOG(VB_PLAYBACK, LOG_INFO, LOC + "Resetting input format");
574 }
575 return;
576 }
577
578 // Sanitise frame
579 if ((Frame->m_width < 1) || (Frame->m_height < 1) || !Frame->m_buffer)
580 {
581 LOG(VB_GENERAL, LOG_ERR, LOC + "Invalid software frame");
582 return;
583 }
584
585 // Can we render this frame format
586 if (!MythVideoFrame::YUVFormat(Frame->m_type))
587 {
588 LOG(VB_GENERAL, LOG_ERR, LOC + "Frame format is not supported");
589 return;
590 }
591
592 // lock
594
595 // check for input changes
596 if ((Frame->m_width != m_videoDim.width()) ||
597 (Frame->m_height != m_videoDim.height()) ||
598 (Frame->m_type != m_inputType))
599 {
600 VideoFrameType frametype = Frame->m_type;
601 if ((frametype == FMT_YV12) && (m_profile == "opengl"))
602 frametype = FMT_YUY2;
603 QSize size(Frame->m_width, Frame->m_height);
604 if (!SetupFrameFormat(Frame->m_type, frametype, size, QOpenGLTexture::Target2D))
605 return;
606 }
607
608 // Setup deinterlacing if required
609 AddDeinterlacer(Frame, Scan);
610
611 if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
612 m_openglRender->logDebugMarker(LOC + "UPDATE_FRAME_START");
613
615
616 // Rotate textures if necessary
617 bool current = true;
619 {
620 if (!m_nextTextures.empty() && !m_prevTextures.empty())
621 {
622 if (qAbs(Frame->m_frameCounter - m_discontinuityCounter) > 1)
624 std::vector<MythVideoTextureOpenGL*> temp = m_prevTextures;
627 m_nextTextures = temp;
628 current = false;
629 }
630 }
631
632 m_discontinuityCounter = Frame->m_frameCounter;
633
635 {
636 // first field. Fake the pitches
637 FramePitches pitches = Frame->m_pitches;
638 Frame->m_pitches[0] = Frame->m_pitches[0] << 1;
639 Frame->m_pitches[1] = Frame->m_pitches[1] << 1;
640 Frame->m_pitches[2] = Frame->m_pitches[2] << 1;
642 // second field. Fake the offsets as well.
643 FrameOffsets offsets = Frame->m_offsets;
644 Frame->m_offsets[0] = Frame->m_offsets[0] + pitches[0];
645 Frame->m_offsets[1] = Frame->m_offsets[1] + pitches[1];
646 Frame->m_offsets[2] = Frame->m_offsets[2] + pitches[2];
648 Frame->m_pitches = pitches;
649 Frame->m_offsets = offsets;
650 }
651 else
652 {
654 }
655
656 if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
657 m_openglRender->logDebugMarker(LOC + "UPDATE_FRAME_END");
658}
659
661 StereoscopicMode StereoOverride, bool DrawBorder)
662{
663 if (!m_openglRender)
664 return;
665
667
668 if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
669 m_openglRender->logDebugMarker(LOC + "RENDER_FRAME_START");
670
671 // Set required input textures for the last stage
672 // ProcessFrame is always called first, which will create/destroy software
673 // textures as needed
674 bool hwframes = false;
675 bool useframebufferimage = false;
676
677 // for tiled renderers (e.g. Pi), enable scissoring to try and improve performance
678 // when not full screen and avoid the resize stage unless absolutely necessary
679 bool tiled = m_extraFeatures & kGLTiled;
680
681 // We lose the pause frame when seeking and using VDPAU/VAAPI/NVDEC direct rendering.
682 // As a workaround, we always use the resize stage so the last displayed frame
683 // should be retained in the Framebuffer used for resizing. If there is
684 // nothing to display, then fallback to this framebuffer.
685 // N.B. this is now strictly necessary with v4l2 and DRM PRIME direct rendering
686 // but ignore now for performance reasons
687 VideoResizing resize;
688 if (Frame)
690 else
692
693 std::vector<MythVideoTextureOpenGL*> inputtextures = m_inputTextures;
694 if (inputtextures.empty())
695 {
696 // This is experimental support for direct rendering to a framebuffer (e.g. DRM).
697 // It may be removed or refactored (e.g. pass presentation details through to
698 // the interop).
699 if (Frame)
700 {
701 Frame->m_displayed = false;
702 Frame->m_srcRect = m_videoRect;
703 Frame->m_dstRect = m_displayVideoRect;
704 }
705
706 // Pull in any hardware frames
708
709 if (Frame && Frame->m_displayed)
710 return;
711
712 if (!inputtextures.empty())
713 {
714 hwframes = true;
715 QSize newsize = inputtextures[0]->m_size;
716 VideoFrameType newsourcetype = inputtextures[0]->m_frameType;
717 VideoFrameType newtargettype = inputtextures[0]->m_frameFormat;
718 GLenum newtargettexture = inputtextures[0]->m_target;
719 if ((m_outputType != newtargettype) || (m_textureTarget != newtargettexture) ||
720 (m_inputType != newsourcetype) || (newsize != m_inputTextureSize))
721 {
722 SetupFrameFormat(newsourcetype, newtargettype, newsize, newtargettexture);
723 }
724
725#if CONFIG_MEDIACODEC
726 // Set the texture transform for mediacodec
728 {
729 if (inputtextures[0]->m_transform && m_shaders[Default])
730 {
732 m_shaders[Default]->setUniformValue("u_transform", *inputtextures[0]->m_transform);
733 }
734 }
735#endif
736 // Enable deinterlacing for NVDEC, VTB and VAAPI DRM if VPP is not available
737 if (inputtextures[0]->m_allowGLSLDeint)
738 AddDeinterlacer(Frame, Scan, DEINT_SHADER | DEINT_CPU, false); // pickup shader or cpu prefs
739 }
740 else
741 {
742 if ((resize == Framebuffer) && m_frameBuffer && m_frameBufferTexture)
743 {
744 LOG(VB_PLAYBACK, LOG_DEBUG, "Using existing framebuffer");
745 useframebufferimage = true;
746 }
747 else
748 {
749 LOG(VB_PLAYBACK, LOG_DEBUG, LOC + "Nothing to display");
750 // if this is live tv startup and the window rect has changed we
751 // must set the viewport
753 return;
754 }
755 }
756 }
757
758 // Determine which shader to use. This helps optimise the resize check.
759 bool deinterlacing = false;
760 bool basicdeinterlacing = false;
761 bool yuvoutput = MythVideoFrame::YUVFormat(m_outputType);
762 VideoShaderType program = yuvoutput ? Progressive : Default;
764 {
765 if (Scan == kScan_Interlaced)
766 {
767 program = TopFieldFirst ? InterlacedTop : InterlacedBot;
768 deinterlacing = true;
769 }
770 else if (Scan == kScan_Intr2ndField)
771 {
772 program = TopFieldFirst ? InterlacedBot : InterlacedTop;
773 deinterlacing = true;
774 }
775
776 // select the correct field for the basic deinterlacer
778 {
779 basicdeinterlacing = true;
780 if (program == InterlacedBot)
781 inputtextures = m_nextTextures;
782 }
783 }
784
785 // Set deinterlacer type for debug OSD
786 if (deinterlacing && Frame)
787 {
788 Frame->m_deinterlaceInuse = m_deinterlacer | DEINT_SHADER;
789 Frame->m_deinterlaceInuse2x = m_deinterlacer2x;
790 }
791
792 // Tonemapping can only render to a texture
793 if (m_toneMap)
794 resize |= ToneMap;
795
796 // Decide whether to use render to texture - for performance or quality
797 if (yuvoutput && !resize)
798 {
799 // ensure deinterlacing works correctly when down scaling in height
800 // N.B. not needed for the basic deinterlacer
801 if (deinterlacing && !basicdeinterlacing && (m_videoDispDim.height() > m_displayVideoRect.height()))
802 resize |= Deinterlacer;
803
804 // NB GL_NEAREST introduces some 'minor' chroma sampling errors
805 // for the following 2 cases. For YUY2 this may be better handled in the
806 // shader. For GLES3.0 10bit textures - Vulkan is probably the better solution.
807
808 // UYVY packed pixels must be sampled exactly with GL_NEAREST
809 if (FMT_YUY2 == m_outputType)
810 resize |= Sampling;
811 // unsigned integer texture formats need GL_NEAREST sampling
813 resize |= Sampling;
814
815 // don't enable resizing if the cost of a framebuffer switch may be
816 // prohibitive (e.g. Raspberry Pi/tiled renderers) or for basic deinterlacing,
817 // where we are trying to simplify/optimise rendering (and the framebuffer
818 // sizing gets confused by the change to m_videoDispDim)
819 if (!resize && !tiled && !basicdeinterlacing)
820 {
821 // improve performance. This is an educated guess on the relative cost
822 // of render to texture versus straight rendering.
823 int totexture = m_videoDispDim.width() * m_videoDispDim.height() * m_shaderCost[program];
824 int blitcost = m_displayVideoRect.width() * m_displayVideoRect.height() * m_shaderCost[Default];
825 int noresizecost = m_displayVideoRect.width() * m_displayVideoRect.height() * m_shaderCost[program];
826 if ((totexture + blitcost) < noresizecost)
827 resize |= Performance;
828 }
829 }
830
831 // Bicubic upsizing - test this after all other resize options have been checked
832 // to ensure it is not the only flag set
833 if (m_bicubicUpsize)
834 SetupBicubic(resize);
835
836 // We don't need an extra stage prior to bicubic if the frame is already RGB (e.g. VDPAU, MediaCodec)
837 // So bypass if we only set resize for bicubic.
838 bool needresize = resize && (!MythVideoFrame::FormatIsRGB(m_outputType) || (resize != Bicubic));
839
840 // set software frame filtering if resizing has changed
841 if (!needresize && m_resizing)
842 {
843 // remove framebuffer
845 {
847 m_frameBufferTexture = nullptr;
848 }
849 if (m_frameBuffer)
850 {
852 m_frameBuffer = nullptr;
853 }
854 // set filtering
859 LOG(VB_PLAYBACK, LOG_INFO, LOC + "Disabled resizing");
860 }
861 else if (!m_resizing && needresize)
862 {
863 // framebuffer will be created as needed below
864 QOpenGLTexture::Filter filter = ((resize & Sampling) == Sampling) ? QOpenGLTexture::Nearest : QOpenGLTexture::Linear;
868 m_resizing = resize;
869 LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Resizing from %1x%2 to %3x%4 for %5")
870 .arg(m_videoDispDim.width()).arg(m_videoDispDim.height())
871 .arg(m_displayVideoRect.width()).arg(m_displayVideoRect.height())
872 .arg(VideoResizeToString(resize)));
873 }
874
875 // check hardware frames have the correct filtering
876 if (hwframes)
877 {
878 QOpenGLTexture::Filter filter = (resize.testFlag(Sampling)) ? QOpenGLTexture::Nearest : QOpenGLTexture::Linear;
879 if (inputtextures[0]->m_filter != filter)
881 }
882
883 // texture coordinates
884 QRect trect(m_videoRect);
885
886 if (needresize)
887 {
888 MythVideoTextureOpenGL* nexttexture = nullptr;
889
890 // only render to the framebuffer if there is something to update
891 if (useframebufferimage)
892 {
893 if (m_toneMap)
894 {
895 nexttexture = m_toneMap->GetTexture();
896 trect = QRect(QPoint(0, 0), m_displayVideoRect.size());
897 }
898 else
899 {
900 nexttexture = m_frameBufferTexture;
901 }
902 }
903 else if (m_toneMap)
904 {
905 if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
906 m_openglRender->logDebugMarker(LOC + "RENDER_TO_TEXTURE");
907 nexttexture = m_toneMap->Map(inputtextures, m_displayVideoRect.size());
908 trect = QRect(QPoint(0, 0), m_displayVideoRect.size());
909 }
910 else
911 {
912 // render to texture stage
913 if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
914 m_openglRender->logDebugMarker(LOC + "RENDER_TO_TEXTURE");
915
916 // we need a framebuffer and associated texture
917 if (!m_frameBuffer)
918 {
920 (fbo != nullptr) && (tex != nullptr))
921 {
922 delete m_frameBuffer;
924 m_frameBuffer = fbo;
926 m_openglRender->SetTextureFilters(m_frameBufferTexture, QOpenGLTexture::Linear);
927 }
928 }
929
931 return;
932
933 // coordinates
934 QRect vrect(QPoint(0, 0), m_videoDispDim);
935 QRect trect2 = vrect;
936 if (FMT_YUY2 == m_outputType)
937 trect2.setWidth(m_videoDispDim.width() >> 1);
938
939 // framebuffer
942
943 // bind correct textures
944 std::vector<MythGLTexture*> textures {};
945 BindTextures(deinterlacing, inputtextures, textures);
946
947 // render
948 m_openglRender->DrawBitmap(textures, m_frameBuffer, trect2, vrect,
949 m_shaders[program], 0);
950 nexttexture = m_frameBufferTexture;
951 }
952
953 // reset for next stage
954 inputtextures.clear();
955 inputtextures.push_back(nexttexture);
956 program = Default;
957 deinterlacing = false;
958 }
959
960 // Use the bicubic shader if necessary
961 if (resize.testFlag(Bicubic))
962 program = BicubicUpsize;
963
964 // render to default framebuffer/screen
965 if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
966 m_openglRender->logDebugMarker(LOC + "RENDER_TO_SCREEN");
967
968 // discard stereoscopic fields
969 StereoscopicMode stereo = StereoOverride;
970 m_lastStereo = Frame ? Frame->m_stereo3D : m_lastStereo;
971 // N.B. kStereoscopicModeSideBySideDiscard is a proxy here for discard of all types
972 if ((stereo == kStereoscopicModeAuto) &&
974 (m_lastStereo != AV_STEREO3D_2D))
975 {
976 if (m_lastStereo == AV_STEREO3D_SIDEBYSIDE)
978 else if (m_lastStereo == AV_STEREO3D_TOPBOTTOM)
980 }
981
983 trect = QRect(trect.left() >> 1, trect.top(), trect.width() >> 1, trect.height());
984 else if (kStereoscopicModeTopAndBottomDiscard == stereo)
985 trect = QRect(trect.left(), trect.top() >> 1, trect.width(), trect.height() >> 1);
986
987 // bind default framebuffer
990
991 // PiP border
992 if (DrawBorder)
993 {
994 QRect piprect = m_displayVideoRect.adjusted(-10, -10, +10, +10);
995 static const QPen kNopen(Qt::NoPen);
996 static const QBrush kRedBrush(QBrush(QColor(127, 0, 0, 255)));
997 m_openglRender->DrawRect(nullptr, piprect, kRedBrush, kNopen, 255);
998 }
999
1000 // bind correct textures
1001 std::vector<MythGLTexture*> textures;
1002 BindTextures(deinterlacing, inputtextures, textures);
1003
1004 // rotation
1005 if (Frame)
1006 m_lastRotation = Frame->m_rotation;
1007
1008 // apply scissoring
1009 if (tiled)
1010 {
1011 // N.B. It's not obvious whether this helps
1012 m_openglRender->glEnable(GL_SCISSOR_TEST);
1013 m_openglRender->glScissor(m_displayVideoRect.left() - 1, m_displayVideoRect.top() - 1,
1014 m_displayVideoRect.width() + 2, m_displayVideoRect.height() + 2);
1015 }
1016
1017 // draw
1018 m_openglRender->DrawBitmap(textures, nullptr, trect, m_displayVideoRect,
1019 m_shaders[program], m_lastRotation);
1020
1021 // disable scissoring
1022 if (tiled)
1023 m_openglRender->glDisable(GL_SCISSOR_TEST);
1024
1025 if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
1026 m_openglRender->logDebugMarker(LOC + "RENDER_FRAME_END");
1027}
1028
1031{
1032 for (auto & texture : m_inputTextures)
1033 texture->m_valid = false;
1034 for (auto & texture : m_prevTextures)
1035 texture->m_valid = false;
1036 for (auto & texture : m_nextTextures)
1037 texture->m_valid = false;
1038}
1039
1040void MythOpenGLVideo::BindTextures(bool Deinterlacing, std::vector<MythVideoTextureOpenGL*>& Current,
1041 std::vector<MythGLTexture*>& Textures)
1042{
1043 if (Deinterlacing && !MythVideoFrame::HardwareFormat(m_inputType))
1044 {
1045 if ((m_nextTextures.size() == Current.size()) && (m_prevTextures.size() == Current.size()))
1046 {
1047 // if we are using reference frames, we want the current frame in the middle
1048 // but next will be the first valid, followed by current...
1049 size_t count = Current.size();
1050 std::vector<MythVideoTextureOpenGL*>& current = Current[0]->m_valid ? Current : m_nextTextures;
1051 std::vector<MythVideoTextureOpenGL*>& prev = m_prevTextures[0]->m_valid ? m_prevTextures : current;
1052
1053 for (uint i = 0; i < count; ++i)
1054 Textures.push_back(reinterpret_cast<MythGLTexture*>(prev[i]));
1055 for (uint i = 0; i < count; ++i)
1056 Textures.push_back(reinterpret_cast<MythGLTexture*>(current[i]));
1057 for (uint i = 0; i < count; ++i)
1058 Textures.push_back(reinterpret_cast<MythGLTexture*>(m_nextTextures[i]));
1059 return;
1060 }
1061 }
1062
1063 std::transform(Current.cbegin(), Current.cend(), std::back_inserter(Textures),
1064 [](MythVideoTextureOpenGL* Tex) { return reinterpret_cast<MythGLTexture*>(Tex); });
1065}
1066
1068{
1070 return "opengl-hw";
1071
1072 switch (Type)
1073 {
1074 case FMT_YUY2: return "opengl"; // compatibility with old profiles
1075 case FMT_YV12: return "opengl-yv12";
1076 case FMT_NV12: return "opengl-nv12";
1077 default: break;
1078 }
1079 return "opengl";
1080}
1081
1082void MythOpenGLVideo::SetupBicubic(VideoResizing& Resize)
1083{
1084 if (((m_videoDispDim.width() < m_displayVideoRect.width()) ||
1085 (m_videoDispDim.height() < m_displayVideoRect.height())))
1086 {
1088 {
1090 {
1091 LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to create bicubic shader. Disabling");
1092 m_bicubicUpsize = false;
1093 }
1094 else
1095 {
1097 LOG(VB_PLAYBACK, LOG_INFO, LOC + "Created bicubic sampler");
1098 }
1099 }
1100
1102 Resize |= Bicubic;
1103 }
1104 else
1105 {
1106 if (m_shaders[BicubicUpsize] != nullptr)
1107 {
1108 LOG(VB_PLAYBACK, LOG_INFO, LOC + "Disabling bicubic sampler");
1109 delete m_shaders[BicubicUpsize];
1110 m_shaders[BicubicUpsize] = nullptr;
1111 }
1112 }
1113}
bool GetBoolSetting(const QString &key, bool defaultval=false)
static std::vector< MythVideoTextureOpenGL * > Retrieve(MythRenderOpenGL *Context, MythVideoColourSpace *ColourSpace, MythVideoFrame *Frame, FrameScanType Scan)
MythVideoTextureOpenGL * GetTexture()
MythVideoTextureOpenGL * Map(std::vector< MythVideoTextureOpenGL * > &Inputs, QSize DisplaySize)
std::vector< MythVideoTextureOpenGL * > m_prevTextures
MythVideoTextureOpenGL * m_frameBufferTexture
bool SetupFrameFormat(VideoFrameType InputType, VideoFrameType OutputType, QSize Size, GLenum TextureTarget)
QOpenGLFramebufferObject * m_frameBuffer
MythOpenGLVideo(MythRenderOpenGL *Render, MythVideoColourSpace *ColourSpace, MythVideoBounds *Bounds, const MythVideoProfilePtr &VideoProfile, const QString &Profile)
~MythOpenGLVideo() override
void BindTextures(bool Deinterlacing, std::vector< MythVideoTextureOpenGL * > &Current, std::vector< MythGLTexture * > &Textures)
bool m_chromaUpsamplingFilter
std::array< int, ShaderCount > m_shaderCost
std::array< QOpenGLShaderProgram *, ShaderCount > m_shaders
static QString TypeToProfile(VideoFrameType Type)
QOpenGLFunctions::OpenGLFeatures m_features
MythOpenGLTonemap * m_toneMap
std::vector< MythVideoTextureOpenGL * > m_inputTextures
void ResetTextures() override
Clear reference frames after a seek as they will contain old images.
std::vector< MythVideoTextureOpenGL * > m_nextTextures
void PrepareFrame(MythVideoFrame *Frame, FrameScanType Scan=kScan_Progressive) override
Update the current input texture using the data from the given video frame.
void ColourSpaceUpdate(bool PrimariesChanged) override
MythDeintType m_fallbackDeinterlacer
void SetupBicubic(VideoResizing &Resize)
void RenderFrame(MythVideoFrame *Frame, bool TopFieldFirst, FrameScanType Scan, StereoscopicMode StereoOverride, bool DrawBorder=false) override
void UpdateShaderParameters()
bool AddDeinterlacer(const MythVideoFrame *Frame, FrameScanType Scan, MythDeintType Filter=DEINT_SHADER, bool CreateReferences=true)
MythRenderOpenGL * m_openglRender
void ResetFrameFormat() override
bool CreateVideoShader(VideoShaderType Type, MythDeintType Deint=DEINT_NONE)
Create the appropriate shader for the operation Type.
QString GetProfile() const override
void SetShaderProgramParams(QOpenGLShaderProgram *Program, const QMatrix4x4 &Value, const char *Uniform)
int GetMaxTextureUnits(void) const
void DrawBitmap(MythGLTexture *Texture, QOpenGLFramebufferObject *Target, QRect Source, QRect Destination, QOpenGLShaderProgram *Program, int Alpha=255, qreal Scale=1.0)
void SetViewPort(QRect Rect, bool ViewportOnly=false) override
void DeleteShaderProgram(QOpenGLShaderProgram *Program)
void BindFramebuffer(QOpenGLFramebufferObject *Framebuffer)
void DeleteFramebuffer(QOpenGLFramebufferObject *Framebuffer)
QOpenGLFunctions::OpenGLFeatures GetFeatures(void) const
bool EnableShaderProgram(QOpenGLShaderProgram *Program)
void logDebugMarker(const QString &Message)
QOpenGLShaderProgram * CreateShaderProgram(const QString &Vertex, const QString &Fragment)
void DeleteTexture(MythGLTexture *Texture)
void DrawRect(QOpenGLFramebufferObject *Target, QRect Area, const QBrush &FillBrush, const QPen &LinePen, int Alpha)
int GetExtraFeatures(void) const
void SetTextureFilters(MythGLTexture *Texture, QOpenGLTexture::Filter Filter, QOpenGLTexture::WrapMode Wrap=QOpenGLTexture::ClampToEdge)
MythVideoColourSpace contains a QMatrix4x4 that can convert YCbCr data to RGB.
float GetDisplayGamma(void) const
QStringList GetColourMappingDefines(void)
QMatrix4x4 GetPrimaryMatrix(void)
bool UpdateColourSpace(const MythVideoFrame *Frame)
Set the current colourspace to use.
float GetColourGamma(void) const
static bool FormatIs422(VideoFrameType Type)
Definition: mythframe.h:443
static uint GetNumPlanes(VideoFrameType Type)
Definition: mythframe.h:213
static QString DeinterlacerName(MythDeintType Deint, bool DoubleRate, VideoFrameType Format=FMT_NONE)
Definition: mythframe.cpp:462
static bool FormatIsNV12(VideoFrameType Type)
Definition: mythframe.h:455
static QString FormatDescription(VideoFrameType Type)
Definition: mythframe.cpp:368
static bool FormatIs444(VideoFrameType Type)
Definition: mythframe.h:449
static bool FormatIs420(VideoFrameType Type)
Definition: mythframe.h:437
static bool FormatIsRGB(VideoFrameType Type)
Definition: mythframe.h:471
static bool HardwareFramesFormat(VideoFrameType Type)
Definition: mythframe.h:432
static bool YUVFormat(VideoFrameType Type)
Definition: mythframe.h:465
static int ColorDepth(int Format)
Definition: mythframe.h:398
static bool HardwareFormat(VideoFrameType Type)
Definition: mythframe.h:424
VideoResizing m_resizing
Definition: mythvideogpu.h:89
int m_lastRotation
Definition: mythvideogpu.h:90
uint m_lastStereo
Definition: mythvideogpu.h:95
VideoFrameType m_inputType
Definition: mythvideogpu.h:80
QSize m_inputTextureSize
Definition: mythvideogpu.h:88
QRect m_videoRect
Definition: mythvideogpu.h:86
StereoscopicMode m_stereoMode
Definition: mythvideogpu.h:96
uint64_t m_discontinuityCounter
Definition: mythvideogpu.h:78
QString m_profile
Definition: mythvideogpu.h:79
bool m_bicubicUpsize
Definition: mythvideogpu.h:97
QSize m_videoDim
Definition: mythvideogpu.h:83
QRect m_displayVideoRect
Definition: mythvideogpu.h:85
MythVideoColourSpace * m_videoColourSpace
Definition: mythvideogpu.h:87
QSize m_masterViewportSize
Definition: mythvideogpu.h:84
VideoFrameType m_outputType
Definition: mythvideogpu.h:81
void OutputChanged(QSize VideoDim, QSize VideoDispDim, float)
static QString VideoResizeToString(VideoResizing Resize)
void UpdateColourSpace(bool PrimariesChanged)
virtual void ResetFrameFormat()
QSize m_videoDispDim
Definition: mythvideogpu.h:82
bool m_deinterlacer2x
Definition: mythvideogpu.h:92
MythDeintType m_deinterlacer
Definition: mythvideogpu.h:91
static void SetTextureFilters(MythRenderOpenGL *Context, const std::vector< MythVideoTextureOpenGL * > &Textures, QOpenGLTexture::Filter Filter, QOpenGLTexture::WrapMode Wrap=QOpenGLTexture::ClampToEdge)
static std::vector< MythVideoTextureOpenGL * > CreateTextures(MythRenderOpenGL *Context, VideoFrameType Type, VideoFrameType Format, std::vector< QSize > Sizes, GLenum Target=QOpenGLTexture::Target2D)
Create a set of textures suitable for the given Type and Format.
static VideoFramebuffer CreateVideoFrameBuffer(MythRenderOpenGL *Context, VideoFrameType OutputType, QSize Size, bool HighPrecision=true)
static void DeleteTextures(MythRenderOpenGL *Context, std::vector< MythVideoTextureOpenGL * > &Textures)
static void UpdateTextures(MythRenderOpenGL *Context, const MythVideoFrame *Frame, const std::vector< MythVideoTextureOpenGL * > &Textures)
Update the contents of the given Textures for data held in Frame.
static pid_list_t::iterator find(const PIDInfoMap &map, pid_list_t &list, pid_list_t::iterator begin, pid_list_t::iterator end, bool find_open)
unsigned int uint
Definition: freesurround.h:24
MythCoreContext * gCoreContext
This global variable contains the MythCoreContext instance for the app.
MythDeintType
Definition: mythframe.h:67
@ DEINT_HIGH
Definition: mythframe.h:71
@ DEINT_DRIVER
Definition: mythframe.h:74
@ DEINT_MEDIUM
Definition: mythframe.h:70
@ DEINT_BASIC
Definition: mythframe.h:69
@ DEINT_NONE
Definition: mythframe.h:68
@ DEINT_SHADER
Definition: mythframe.h:73
@ DEINT_CPU
Definition: mythframe.h:72
std::array< int, 3 > FrameOffsets
Definition: mythframe.h:84
VideoFrameType
Definition: mythframe.h:20
@ FMT_RGBA32
Definition: mythframe.h:34
@ FMT_YV12
Definition: mythframe.h:23
@ FMT_DRMPRIME
Definition: mythframe.h:63
@ FMT_NONE
Definition: mythframe.h:21
@ FMT_NV12
Definition: mythframe.h:52
@ FMT_YUY2
Definition: mythframe.h:50
@ FMT_MEDIACODEC
Definition: mythframe.h:60
std::array< int, 3 > FramePitches
Definition: mythframe.h:83
static bool VERBOSE_LEVEL_CHECK(uint64_t mask, LogLevel_t level)
Definition: mythlogging.h:29
#define LOG(_MASK_, _LEVEL_, _QSTRING_)
Definition: mythlogging.h:39
#define LOC
static const QString YUVFragmentShader
static const QString DefaultVertexShader
static const QString GLSL300VertexShader
static const QString BicubicShader
static const QString YUVFragmentExtensions
static const QString GLSL300YUVFragmentShader
static const QString GLSL300YUVFragmentExtensions
static const QString RGBFragmentShader
@ kGLTiled
#define GL_TEXTURE_EXTERNAL_OES
std::shared_ptr< MythVideoProfile > MythVideoProfilePtr
Definition: mythvideogpu.h:18
QDateTime current(bool stripped)
Returns current Date and Time in UTC.
Definition: mythdate.cpp:15
FrameScanType
Definition: videoouttypes.h:95
@ kScan_Intr2ndField
Definition: videoouttypes.h:99
@ kScan_Interlaced
Definition: videoouttypes.h:98
bool is_interlaced(FrameScanType Scan)
StereoscopicMode
@ kStereoscopicModeAuto
@ kStereoscopicModeTopAndBottomDiscard
@ kStereoscopicModeSideBySideDiscard