MythTV  master
mythopenglvideo.cpp
Go to the documentation of this file.
1 // MythTV
2 #include "mythcontext.h"
3 #include "tv.h"
5 #include "mythavutil.h"
7 #include "mythopengltonemap.h"
8 #include "mythopenglvideo.h"
9 
10 // std
11 #include <utility>
12 
13 #define LOC QString("GLVid: ")
14 #define MAX_VIDEO_TEXTURES 10 // YV12 Kernel deinterlacer + 1
15 
27  QSize VideoDim, QSize VideoDispDim,
28  QRect DisplayVisibleRect, QRect DisplayVideoRect, QRect VideoRect,
29  bool ViewportControl, QString Profile)
30  : m_profile(std::move(Profile)),
31  m_render(Render),
32  m_videoDispDim(VideoDispDim),
33  m_videoDim(VideoDim),
34  m_masterViewportSize(DisplayVisibleRect.size()),
35  m_displayVideoRect(DisplayVideoRect),
36  m_videoRect(VideoRect),
37  m_videoColourSpace(ColourSpace),
38  m_viewportControl(ViewportControl),
39  m_inputTextureSize(m_videoDim)
40 {
42  return;
43 
44  OpenGLLocker ctx_lock(m_render);
45  m_render->IncrRef();
46  if (m_render->isOpenGLES())
47  m_gles = m_render->format().majorVersion();
48 
51 
52  // Set OpenGL feature support
55  m_valid = true;
56 
57  m_chromaUpsamplingFilter = gCoreContext->GetBoolSetting("ChromaUpsamplingFilter", true);
58  LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Chroma upsampling filter %1")
59  .arg(m_chromaUpsamplingFilter ? "enabled" : "disabled"));
60 }
61 
63 {
66 
67  if (!m_render)
68  return;
69 
72  delete m_toneMap;
74  m_render->DecrRef();
75 }
76 
77 bool MythOpenGLVideo::IsValid(void) const
78 {
79  return m_valid;
80 }
81 
82 void MythOpenGLVideo::UpdateColourSpace(bool PrimariesChanged)
83 {
84  OpenGLLocker locker(m_render);
85 
86  // if input/output type are unset - we haven't created the shaders yet
87  if (PrimariesChanged && (m_outputType != FMT_NONE))
88  {
89  LOG(VB_GENERAL, LOG_INFO, LOC + "Primaries conversion changed - recreating shaders");
91  }
92 
93  float colourgamma = m_videoColourSpace->GetColourGamma();
94  float displaygamma = 1.0F / m_videoColourSpace->GetDisplayGamma();
95  QMatrix4x4 primary = m_videoColourSpace->GetPrimaryMatrix();
96  for (int i = Progressive; i < ShaderCount; ++i)
97  {
99  m_render->SetShaderProgramParams(m_shaders[i], primary, "m_primaryMatrix");
100  if (m_shaders[i])
101  {
102  m_shaders[i]->setUniformValue("m_colourGamma", colourgamma);
103  m_shaders[i]->setUniformValue("m_displayGamma", displaygamma);
104  }
105  }
106 }
107 
109 {
110  if (m_inputTextureSize.isEmpty())
111  return;
112 
113  OpenGLLocker locker(m_render);
114  bool rect = m_textureTarget == QOpenGLTexture::TargetRectangle;
115  GLfloat lineheight = rect ? 1.0F : 1.0F / m_inputTextureSize.height();
116  GLfloat maxheight = rect ? m_videoDispDim.height() : m_videoDispDim.height() / static_cast<GLfloat>(m_inputTextureSize.height());
117  GLfloat fieldsize = rect ? 0.5F : m_inputTextureSize.height() / 2.0F;
118  QVector4D parameters(lineheight, /* lineheight */
119  static_cast<GLfloat>(m_inputTextureSize.width()), /* 'Y' select */
120  maxheight - lineheight, /* maxheight */
121  fieldsize /* fieldsize */);
122 
123  for (int i = Progressive; i < ShaderCount; ++i)
124  {
125  if (m_shaders[i])
126  {
128  m_shaders[i]->setUniformValue("m_frameData", parameters);
129  }
130  }
131 }
132 
134 {
135  m_masterViewportSize = Size;
136 }
137 
138 void MythOpenGLVideo::SetVideoDimensions(const QSize &VideoDim, const QSize &VideoDispDim)
139 {
140  m_videoDim = VideoDim;
141  m_videoDispDim = VideoDispDim;
142 }
143 
144 void MythOpenGLVideo::SetVideoRects(const QRect &DisplayVideoRect, const QRect &VideoRect)
145 {
146  m_displayVideoRect = DisplayVideoRect;
147  m_videoRect = VideoRect;
148 }
149 
150 void MythOpenGLVideo::SetViewportRect(const QRect &DisplayVisibleRect)
151 {
152  SetMasterViewport(DisplayVisibleRect.size());
153 }
154 
155 QString MythOpenGLVideo::GetProfile(void) const
156 {
158  return TypeToProfile(m_inputType);
159  return TypeToProfile(m_outputType);
160 }
161 
162 void MythOpenGLVideo::SetProfile(const QString &Profile)
163 {
164  m_profile = Profile;
165 }
166 
168 {
169  return m_videoDim;
170 }
171 
173 {
174  // If switching off/from basic deinterlacing, then we need to delete and
175  // recreate the input textures and sometimes the shaders as well - so start
176  // from scratch
178  {
179  // Note. Textures will be created with linear filtering - which matches
180  // no resizing - which should be the case for the basic deinterlacer - and
181  // the call to SetupFrameFormat will reset resizing anyway
182  LOG(VB_PLAYBACK, LOG_INFO, LOC + "Removing single field textures");
183  // revert to YUY2 if preferred
184  if ((m_inputType == FMT_YV12) && (m_profile == "opengl"))
187  emit OutputChanged(m_videoDim, m_videoDim, -1.0F);
188  }
191  m_deinterlacer2x = false;
192 }
193 
195  MythDeintType Filter /* = DEINT_SHADER */,
196  bool CreateReferences /* = true */)
197 {
198  if (!Frame)
199  return false;
200 
201  // do we want an opengl shader?
202  // shaders trump CPU deinterlacers if selected and driver deinterlacers will only
203  // be available under restricted circumstances
204  // N.B. there should in theory be no situation in which shader deinterlacing is not
205  // available for software formats, hence there should be no need to fallback to cpu
206 
207  if (!is_interlaced(Scan) || Frame->already_deinterlaced)
208  {
210  return false;
211  }
212 
213  m_deinterlacer2x = true;
214  MythDeintType deinterlacer = GetDoubleRateOption(Frame, Filter);
216  if (other) // another double rate deinterlacer is enabled
217  {
219  return false;
220  }
221 
222  if (!deinterlacer)
223  {
224  m_deinterlacer2x = false;
225  deinterlacer = GetSingleRateOption(Frame, Filter);
227  if (!deinterlacer || other) // no shader deinterlacer needed
228  {
230  return false;
231  }
232  }
233 
234  // if we get this far, we cannot use driver deinterlacers, shader deints
235  // are preferred over cpu, we have a deinterlacer but don't actually care whether
236  // it is single or double rate
237  if (m_deinterlacer == deinterlacer || (m_fallbackDeinterlacer && (m_fallbackDeinterlacer == deinterlacer)))
238  return true;
239 
240  // Lock
241  OpenGLLocker ctx_lock(m_render);
242 
243  // delete old reference textures
246 
247  // For basic deinterlacing of software frames, we now create 2 sets of field
248  // based textures - which is the same approach taken by the CPU based onefield/bob
249  // deinterlacer and the EGL basic deinterlacer. The advantages of this
250  // approach are:-
251  // - no dependent texturing in the samplers (it is just a basic YUV to RGB conversion
252  // in the shader)
253  // - better quality (the onefield shader line doubles but does not have the
254  // implicit interpolation/smoothing of using separate textures directly,
255  // which leads to 'blockiness').
256  // - as we are not sampling other fields, there is no need to use an intermediate
257  // framebuffer to ensure accurate sampling - so we can skip the resize stage.
258  //
259  // YUYV formats are currently not supported as it does not work correctly - force YV12 instead.
260 
261  if (deinterlacer == DEINT_BASIC && format_is_yuv(m_inputType))
262  {
263  if (m_outputType == FMT_YUY2)
264  {
265  LOG(VB_GENERAL, LOG_INFO, LOC + "Forcing OpenGL YV12 for basic deinterlacer");
267  }
269  QSize size(m_videoDim.width(), m_videoDim.height() >> 1);
270  vector<QSize> sizes;
271  sizes.emplace_back(size);
272  // N.B. If we are currently resizing, it will be turned off for this
273  // deinterlacer, so the default linear texture filtering is OK.
275  // nextTextures will hold the other field
277  LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created %1 single field textures")
278  .arg(m_inputTextures.size() * 2));
279  // Con VideoOutWindow into display the field only
280  emit OutputChanged(m_videoDim, size, -1.0F);
281  }
282 
283  // sanity check max texture units. Should only be an issue on old hardware (e.g. Pi)
284  int max = m_render->GetMaxTextureUnits();
285  uint refstocreate = ((deinterlacer == DEINT_HIGH) && CreateReferences) ? 2 : 0;
286  int totaltextures = static_cast<int>(planes(m_outputType)) * static_cast<int>(refstocreate + 1);
287  if (totaltextures > max)
288  {
289  m_fallbackDeinterlacer = deinterlacer;
290  LOG(VB_GENERAL, LOG_WARNING, LOC + QString("Insufficent texture units for deinterlacer '%1' (%2 < %3)")
291  .arg(DeinterlacerName(deinterlacer | DEINT_SHADER, m_deinterlacer2x)).arg(max).arg(totaltextures));
292  deinterlacer = DEINT_BASIC;
293  LOG(VB_GENERAL, LOG_WARNING, LOC + QString("Falling back to '%1'")
294  .arg(DeinterlacerName(deinterlacer | DEINT_SHADER, m_deinterlacer2x)));
295  }
296 
297  // create new deinterlacers - the old ones will be deleted
298  if (!(CreateVideoShader(InterlacedBot, deinterlacer) && CreateVideoShader(InterlacedTop, deinterlacer)))
299  return false;
300 
301  // create the correct number of reference textures
302  if (refstocreate)
303  {
304  vector<QSize> sizes;
305  sizes.emplace_back(QSize(m_videoDim));
308  // ensure we use GL_NEAREST if resizing is already active
309  if (m_resizing)
310  {
313  }
314  }
315 
316  // ensure they work correctly
317  UpdateColourSpace(false);
319  m_deinterlacer = deinterlacer;
320 
321  LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created deinterlacer '%1' (%2->%3)")
324  return true;
325 }
326 
335 {
336  if (!m_render || !(m_features & QOpenGLFunctions::Shaders))
337  return false;
338 
339  // delete the old
340  if (m_shaders[Type])
342  m_shaders[Type] = nullptr;
343 
344  QStringList defines;
345  QString vertex = DefaultVertexShader;
346  QString fragment;
347  int cost = 1;
348 
350  defines << "EXTOES";
351 
352  if ((Default == Type) || (!format_is_yuv(m_outputType)))
353  {
354  QString glsldefines;
355  for (const QString& define : qAsConst(defines))
356  glsldefines += QString("#define MYTHTV_%1\n").arg(define);
357  fragment = glsldefines + YUVFragmentExtensions + RGBFragmentShader;
358 
359 #ifdef USING_MEDIACODEC
361  vertex = MediaCodecVertexShader;
362 #endif
363  }
364  // no interlaced shaders yet (i.e. interlaced chroma upsampling - not deinterlacers)
365  else
366  {
367  fragment = YUVFragmentShader;
368  QString extensions = YUVFragmentExtensions;
369  QString glsldefines;
370 
371  // Any software frames that are not 8bit need to use unsigned integer
372  // samplers with GLES3.x - which need more modern shaders
373  if ((m_gles > 2) && (ColorDepth(m_inputType) > 8))
374  {
375  static const QString glsl300("#version 300 es\n");
376  fragment = GLSL300YUVFragmentShader;
377  extensions = GLSL300YUVFragmentExtensions;
378  vertex = glsl300 + GLSL300VertexShader;
379  glsldefines.append(glsl300);
380  }
381 
382  bool kernel = false;
383  bool topfield = InterlacedTop == Type;
384  bool progressive = (Progressive == Type) || (Deint == DEINT_NONE);
386  {
387  defines << "YV12";
388  cost = 3;
389  }
390  else if (format_is_nv12(m_outputType))
391  {
392  defines << "NV12";
393  cost = 2;
394  }
395  else if (FMT_YUY2 == m_outputType)
396  {
397  defines << "YUY2";
398  }
399 
400 #ifdef USING_VTB
401  // N.B. Rectangular texture support is only currently used for VideoToolBox
402  // video frames which are NV12. Do not use rectangular textures for the 'default'
403  // shaders as it breaks video resizing and would require changes to our
404  // FramebufferObject code.
405  if ((m_textureTarget == QOpenGLTexture::TargetRectangle) && (Default != Type))
406  defines << "RECTS";
407 #endif
408  if (!progressive)
409  {
410  bool basic = Deint == DEINT_BASIC && format_is_yuv(m_inputType);
411  // Chroma upsampling filter
413  m_chromaUpsamplingFilter && !basic)
414  {
415  defines << "CUE";
416  }
417 
418  // field
419  if (topfield && !basic)
420  defines << "TOPFIELD";
421 
422  switch (Deint)
423  {
424  case DEINT_BASIC:
425  if (!basic)
426  {
427  cost *= 2;
428  defines << "ONEFIELD";
429  }
430  break;
431  case DEINT_MEDIUM: cost *= 5; defines << "LINEARBLEND"; break;
432  case DEINT_HIGH: cost *= 15; defines << "KERNEL"; kernel = true; break;
433  default: break;
434  }
435  }
436 
437  // Start building the new fragment shader
438  // We do this in code otherwise the shader string becomes monolithic
439 
440  // 'expand' calls to sampleYUV for multiple planes
441  // do this before we add the samplers
442  int count = static_cast<int>(planes(m_outputType));
443  for (int i = (kernel ? 2 : 0); (i >= 0) && count; i--)
444  {
445  QString find = QString("s_texture%1").arg(i);
446  QStringList replacelist;
447  for (int j = (i * count); j < ((i + 1) * count); ++j)
448  replacelist << QString("s_texture%1").arg(j);
449  fragment.replace(find, replacelist.join(", "));
450  }
451 
452  // 'expand' calls to the kernel function
453  if (kernel && count)
454  {
455  for (int i = 1 ; i >= 0; i--)
456  {
457  QString find1 = QString("sampler2D kernelTex%1").arg(i);
458  QString find2 = QString("kernelTex%1").arg(i);
459  QStringList replacelist1;
460  QStringList replacelist2;
461  for (int j = 0; j < count; ++j)
462  {
463  replacelist1 << QString("sampler2D kernelTexture%1%2").arg(i).arg(j);
464  replacelist2 << QString("kernelTexture%1%2").arg(i).arg(j);
465  }
466  fragment.replace(find1, replacelist1.join(", "));
467  fragment.replace(find2, replacelist2.join(", "));
468  }
469  }
470 
471  // Retrieve colour mappping defines
473 
474  // Add defines
475  for (const QString& define : qAsConst(defines))
476  glsldefines += QString("#define MYTHTV_%1\n").arg(define);
477 
478  // Add the required samplers
479  int start = 0;
480  int end = count;
481  if (kernel)
482  {
483  end *= 3;
484  if (topfield)
485  start += count;
486  else
487  end -= count;
488  }
489  QString glslsamplers;
490  for (int i = start; i < end; ++i)
491  glslsamplers += QString("uniform sampler2D s_texture%1;\n").arg(i);
492 
493  // construct the final shader string
494  fragment = glsldefines + extensions + glslsamplers + fragment;
495  }
496 
497  m_shaderCost[Type] = cost;
498  QOpenGLShaderProgram *program = m_render->CreateShaderProgram(vertex, fragment);
499  if (!program)
500  return false;
501 
502  m_shaders[Type] = program;
503  return true;
504 }
505 
507  QSize Size, GLenum TextureTarget)
508 {
509  QString texnew = (TextureTarget == QOpenGLTexture::TargetRectangle) ? "Rect" :
510  (TextureTarget == GL_TEXTURE_EXTERNAL_OES) ? "OES" : "2D";
511  QString texold = (m_textureTarget == QOpenGLTexture::TargetRectangle) ? "Rect" :
512  (m_textureTarget == GL_TEXTURE_EXTERNAL_OES) ? "OES" : "2D";
513  LOG(VB_GENERAL, LOG_WARNING, LOC +
514  QString("New frame format: %1:%2 %3x%4 (Tex: %5) -> %6:%7 %8x%9 (Tex: %10)")
516  .arg(m_videoDim.width()).arg(m_videoDim.height()).arg(texold)
517  .arg(format_description(InputType)).arg(format_description(OutputType))
518  .arg(Size.width()).arg(Size.height()).arg(texnew));
519 
521 
522  m_inputType = InputType;
523  m_outputType = OutputType;
524  m_textureTarget = TextureTarget;
525  m_videoDim = Size;
526 
527  // This is only currently needed for RGBA32 frames from composed DRM_PRIME
528  // textures that may be half height for simple bob deinterlacing
530  emit OutputChanged(m_videoDim, m_videoDim, -1.0F);
531 
532  if (!format_is_hw(InputType))
533  {
534  vector<QSize> sizes;
535  sizes.push_back(Size);
537  if (m_inputTextures.empty())
538  {
539  LOG(VB_GENERAL, LOG_ERR, LOC + "Failed to create input textures");
540  return false;
541  }
542 
543  m_inputTextureSize = m_inputTextures[0]->m_totalSize;
544  LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created %1 input textures for '%2'")
545  .arg(m_inputTextures.size()).arg(GetProfile()));
546  }
547  else
548  {
549  m_inputTextureSize = Size;
550  }
551 
552  // Create shaders
554  return false;
555 
556  UpdateColourSpace(false);
558  return true;
559 }
560 
562 {
563  for (auto & shader : m_shaders)
564  if (shader)
565  m_render->DeleteShaderProgram(shader);
566  memset(m_shaders, 0, sizeof(m_shaders));
567  memset(m_shaderCost, 1, sizeof(m_shaderCost));
573  m_textureTarget = QOpenGLTexture::Target2D;
574  m_inputTextureSize = QSize();
579  m_frameBuffer = nullptr;
580  m_frameBufferTexture = nullptr;
581  // textures are created with Linear filtering - which matches no resize
582  m_resizing = None;
583 }
584 
587 {
588  if (Frame->codec == FMT_NONE)
589  return;
590 
591  // Hardware frames are retrieved/updated in PrepareFrame but we need to
592  // reset software frames now if necessary
593  if (format_is_hw(Frame->codec))
594  {
596  {
597  LOG(VB_PLAYBACK, LOG_INFO, LOC + "Resetting input format");
599  }
600  return;
601  }
602 
603  // Sanitise frame
604  if ((Frame->width < 1) || (Frame->height < 1) || !Frame->buf)
605  {
606  LOG(VB_GENERAL, LOG_ERR, LOC + "Invalid software frame");
607  return;
608  }
609 
610  // Can we render this frame format
611  if (!format_is_yuv(Frame->codec))
612  {
613  LOG(VB_GENERAL, LOG_ERR, LOC + "Frame format is not supported");
614  return;
615  }
616 
617  // lock
618  OpenGLLocker ctx_lock(m_render);
619 
620  // check for input changes
621  if ((Frame->width != m_videoDim.width()) ||
622  (Frame->height != m_videoDim.height()) ||
623  (Frame->codec != m_inputType))
624  {
625  VideoFrameType frametype = Frame->codec;
626  if ((frametype == FMT_YV12) && (m_profile == "opengl"))
627  frametype = FMT_YUY2;
628  QSize size(Frame->width, Frame->height);
629  if (!SetupFrameFormat(Frame->codec, frametype, size, QOpenGLTexture::Target2D))
630  return;
631  }
632 
633  // Setup deinterlacing if required
634  AddDeinterlacer(Frame, Scan);
635 
636  if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
637  m_render->logDebugMarker(LOC + "UPDATE_FRAME_START");
638 
640 
641  // Rotate textures if necessary
642  bool current = true;
644  {
645  if (!m_nextTextures.empty() && !m_prevTextures.empty())
646  {
647  if (abs(Frame->frameCounter - m_discontinuityCounter) > 1)
648  ResetTextures();
649  vector<MythVideoTexture*> temp = m_prevTextures;
652  m_nextTextures = temp;
653  current = false;
654  }
655  }
656 
657  m_discontinuityCounter = Frame->frameCounter;
658 
660  {
661  // first field. Fake the pitches
662  int pitches[3];
663  memcpy(pitches, Frame->pitches, sizeof(int) * 3);
664  Frame->pitches[0] = Frame->pitches[0] << 1;
665  Frame->pitches[1] = Frame->pitches[1] << 1;
666  Frame->pitches[2] = Frame->pitches[2] << 1;
668  // second field. Fake the offsets as well.
669  int offsets[3];
670  memcpy(offsets, Frame->offsets, sizeof(int) * 3);
671  Frame->offsets[0] = Frame->offsets[0] + pitches[0];
672  Frame->offsets[1] = Frame->offsets[1] + pitches[1];
673  Frame->offsets[2] = Frame->offsets[2] + pitches[2];
675  memcpy(Frame->pitches, pitches, sizeof(int) * 3);
676  memcpy(Frame->offsets, offsets, sizeof(int) * 3);
677  }
678  else
679  {
681  }
682 
683  if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
684  m_render->logDebugMarker(LOC + "UPDATE_FRAME_END");
685 }
686 
688  StereoscopicMode Stereo, bool DrawBorder)
689 {
690  if (!m_render)
691  return;
692 
693  OpenGLLocker locker(m_render);
694 
695  if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
696  m_render->logDebugMarker(LOC + "PREP_FRAME_START");
697 
698  // Set required input textures for the last stage
699  // ProcessFrame is always called first, which will create/destroy software
700  // textures as needed
701  bool hwframes = false;
702  bool useframebufferimage = false;
703 
704  // for tiled renderers (e.g. Pi), enable scissoring to try and improve performance
705  // when not full screen and avoid the resize stage unless absolutely necessary
706  bool tiled = m_extraFeatures & kGLTiled;
707 
708  // We lose the pause frame when seeking and using VDPAU/VAAPI/NVDEC direct rendering.
709  // As a workaround, we always use the resize stage so the last displayed frame
710  // should be retained in the Framebuffer used for resizing. If there is
711  // nothing to display, then fallback to this framebuffer.
712  // N.B. this is now strictly necessary with v4l2 and DRM PRIME direct rendering
713  // but ignore now for performance reasons
714  VideoResizing resize = Frame ? (format_is_hwframes(Frame->codec) ? Framebuffer : None) :
716 
717  vector<MythVideoTexture*> inputtextures = m_inputTextures;
718  if (inputtextures.empty())
719  {
720  // Pull in any hardware frames
722  if (!inputtextures.empty())
723  {
724  hwframes = true;
725  QSize newsize = inputtextures[0]->m_size;
726  VideoFrameType newsourcetype = inputtextures[0]->m_frameType;
727  VideoFrameType newtargettype = inputtextures[0]->m_frameFormat;
728  GLenum newtargettexture = inputtextures[0]->m_target;
729  if ((m_outputType != newtargettype) || (m_textureTarget != newtargettexture) ||
730  (m_inputType != newsourcetype) || (newsize != m_inputTextureSize))
731  {
732  SetupFrameFormat(newsourcetype, newtargettype, newsize, newtargettexture);
733  }
734 
735 #ifdef USING_MEDIACODEC
736  // Set the texture transform for mediacodec
738  {
739  if (inputtextures[0]->m_transform && m_shaders[Default])
740  {
742  m_shaders[Default]->setUniformValue("u_transform", *inputtextures[0]->m_transform);
743  }
744  }
745 #endif
746  // Enable deinterlacing for NVDEC, VTB and VAAPI DRM if VPP is not available
747  if (inputtextures[0]->m_allowGLSLDeint)
748  AddDeinterlacer(Frame, Scan, DEINT_SHADER | DEINT_CPU, false); // pickup shader or cpu prefs
749  }
750  else
751  {
752  if ((resize == Framebuffer) && m_frameBuffer && m_frameBufferTexture)
753  {
754  LOG(VB_PLAYBACK, LOG_DEBUG, "Using existing framebuffer");
755  useframebufferimage = true;
756  }
757  else
758  {
759  LOG(VB_PLAYBACK, LOG_DEBUG, LOC + "Nothing to display");
760  // if this is live tv startup and the window rect has changed we
761  // must set the viewport
762  m_render->SetViewPort(QRect(QPoint(), m_masterViewportSize));
763  return;
764  }
765  }
766  }
767 
768  // Determine which shader to use. This helps optimise the resize check.
769  bool deinterlacing = false;
770  bool basicdeinterlacing = false;
772  if (m_deinterlacer != DEINT_NONE)
773  {
774  if (Scan == kScan_Interlaced)
775  {
776  program = TopFieldFirst ? InterlacedTop : InterlacedBot;
777  deinterlacing = true;
778  }
779  else if (Scan == kScan_Intr2ndField)
780  {
781  program = TopFieldFirst ? InterlacedBot : InterlacedTop;
782  deinterlacing = true;
783  }
784 
785  // select the correct field for the basic deinterlacer
786  if (deinterlacing && m_deinterlacer == DEINT_BASIC && format_is_yuv(m_inputType))
787  {
788  basicdeinterlacing = true;
789  if (program == InterlacedBot)
790  inputtextures = m_nextTextures;
791  }
792  }
793 
794  // Set deinterlacer type for debug OSD
795  if (deinterlacing && Frame)
796  {
797  Frame->deinterlace_inuse = m_deinterlacer | DEINT_SHADER;
798  Frame->deinterlace_inuse2x = m_deinterlacer2x;
799  }
800 
801  // Tonemapping can only render to a texture
802  if (m_toneMap)
803  resize |= ToneMap;
804 
805  // Decide whether to use render to texture - for performance or quality
806  if (format_is_yuv(m_outputType) && !resize)
807  {
808  // ensure deinterlacing works correctly when down scaling in height
809  // N.B. not needed for the basic deinterlacer
810  if (deinterlacing && !basicdeinterlacing && (m_videoDispDim.height() > m_displayVideoRect.height()))
811  resize |= Deinterlacer;
812  // UYVY packed pixels must be sampled exactly
813  if (FMT_YUY2 == m_outputType)
814  resize |= Sampling;
815  // unsigned integer texture formats need GL_NEAREST sampling
816  if ((m_gles > 2) && (ColorDepth(m_inputType) > 8))
817  resize |= Sampling;
818 
819  // don't enable resizing if the cost of a framebuffer switch may be
820  // prohibitive (e.g. Raspberry Pi/tiled renderers) or for basic deinterlacing,
821  // where we are trying to simplifiy/optimise rendering (and the framebuffer
822  // sizing gets confused by the change to m_videoDispDim)
823  if (!resize && !tiled && !basicdeinterlacing)
824  {
825  // improve performance. This is an educated guess on the relative cost
826  // of render to texture versus straight rendering.
827  int totexture = m_videoDispDim.width() * m_videoDispDim.height() * m_shaderCost[program];
828  int blitcost = m_displayVideoRect.width() * m_displayVideoRect.height() * m_shaderCost[Default];
829  int noresizecost = m_displayVideoRect.width() * m_displayVideoRect.height() * m_shaderCost[program];
830  if ((totexture + blitcost) < noresizecost)
831  resize |= Performance;
832  }
833  }
834 
835  // set software frame filtering if resizing has changed
836  if (!resize && m_resizing)
837  {
838  // remove framebuffer
840  {
842  m_frameBufferTexture = nullptr;
843  }
844  if (m_frameBuffer)
845  {
847  m_frameBuffer = nullptr;
848  }
849  // set filtering
853  m_resizing = None;
854  LOG(VB_PLAYBACK, LOG_INFO, LOC + "Disabled resizing");
855  }
856  else if (!m_resizing && resize)
857  {
858  // framebuffer will be created as needed below
859  QOpenGLTexture::Filter filter = m_toneMap ? QOpenGLTexture::Linear : QOpenGLTexture::Nearest;
863  m_resizing = resize;
864  LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Resizing from %1x%2 to %3x%4 for %5")
865  .arg(m_videoDispDim.width()).arg(m_videoDispDim.height())
866  .arg(m_displayVideoRect.width()).arg(m_displayVideoRect.height())
867  .arg(VideoResizeToString(resize)));
868  }
869 
870  // check hardware frames have the correct filtering
871  if (hwframes)
872  {
873  QOpenGLTexture::Filter filter = (resize && !m_toneMap) ? QOpenGLTexture::Nearest : QOpenGLTexture::Linear;
874  if (inputtextures[0]->m_filter != filter)
875  MythVideoTexture::SetTextureFilters(m_render, inputtextures, filter);
876  }
877 
878  // texture coordinates
879  QRect trect(m_videoRect);
880 
881  if (resize)
882  {
883  MythVideoTexture* nexttexture = nullptr;
884 
885  // only render to the framebuffer if there is something to update
886  if (useframebufferimage)
887  {
888  if (m_toneMap)
889  {
890  nexttexture = m_toneMap->GetTexture();
891  trect = QRect(QPoint(0, 0), m_displayVideoRect.size());
892  }
893  else
894  {
895  nexttexture = m_frameBufferTexture;
896  }
897  }
898  else if (m_toneMap)
899  {
900  if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
901  m_render->logDebugMarker(LOC + "RENDER_TO_TEXTURE");
902  nexttexture = m_toneMap->Map(inputtextures, m_displayVideoRect.size());
903  trect = QRect(QPoint(0, 0), m_displayVideoRect.size());
904  }
905  else
906  {
907  // render to texture stage
908  if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
909  m_render->logDebugMarker(LOC + "RENDER_TO_TEXTURE");
910 
911  // we need a framebuffer
912  if (!m_frameBuffer)
913  {
915  if (!m_frameBuffer)
916  return;
917  }
918 
919  // and its associated texture
921  {
922  m_frameBufferTexture = reinterpret_cast<MythVideoTexture*>(m_render->CreateFramebufferTexture(m_frameBuffer));
924  return;
925  m_render->SetTextureFilters(m_frameBufferTexture, QOpenGLTexture::Linear);
926  }
927 
928  // coordinates
929  QRect vrect(QPoint(0, 0), m_videoDispDim);
930  QRect trect2 = vrect;
931  if (FMT_YUY2 == m_outputType)
932  trect2.setWidth(m_videoDispDim.width() >> 1);
933 
934  // framebuffer
936  m_render->SetViewPort(vrect);
937 
938  // bind correct textures
940  uint numtextures = 0;
941  BindTextures(deinterlacing, inputtextures, &textures[0], numtextures);
942 
943  // render
944  m_render->DrawBitmap(textures, numtextures, m_frameBuffer,
945  trect2, vrect, m_shaders[program], 0);
946  nexttexture = m_frameBufferTexture;
947  }
948 
949  // reset for next stage
950  inputtextures.clear();
951  inputtextures.push_back(nexttexture);
952  program = Default;
953  deinterlacing = false;
954  }
955 
956  // render to default framebuffer/screen
957  if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
958  m_render->logDebugMarker(LOC + "RENDER_TO_SCREEN");
959 
960  // discard stereoscopic fields
962  trect = QRect(trect.left() >> 1, trect.top(), trect.width() >> 1, trect.height());
963  else if (kStereoscopicModeTopAndBottomDiscard == Stereo)
964  trect = QRect(trect.left(), trect.top() >> 1, trect.width(), trect.height() >> 1);
965 
966  // bind default framebuffer
967  m_render->BindFramebuffer(nullptr);
968  m_render->SetViewPort(QRect(QPoint(), m_masterViewportSize));
969 
970  // PiP border
971  if (DrawBorder)
972  {
973  QRect piprect = m_displayVideoRect.adjusted(-10, -10, +10, +10);
974  static const QPen kNopen(Qt::NoPen);
975  static const QBrush kRedBrush(QBrush(QColor(127, 0, 0, 255)));
976  m_render->DrawRect(nullptr, piprect, kRedBrush, kNopen, 255);
977  }
978 
979  // bind correct textures
981  uint numtextures = 0;
982  BindTextures(deinterlacing, inputtextures, &textures[0], numtextures);
983 
984  // rotation
985  if (Frame)
986  m_lastRotation = Frame->rotation;
987 
988  // apply scissoring
989  if (tiled)
990  {
991  // N.B. It's not obvious whether this helps
992  m_render->glEnable(GL_SCISSOR_TEST);
993  m_render->glScissor(m_displayVideoRect.left() - 1, m_displayVideoRect.top() - 1,
994  m_displayVideoRect.width() + 2, m_displayVideoRect.height() + 2);
995  }
996 
997  // draw
998  m_render->DrawBitmap(textures, numtextures, nullptr, trect,
1000 
1001  // disable scissoring
1002  if (tiled)
1003  m_render->glDisable(GL_SCISSOR_TEST);
1004 
1005  if (VERBOSE_LEVEL_CHECK(VB_GPU, LOG_INFO))
1006  m_render->logDebugMarker(LOC + "PREP_FRAME_END");
1007 }
1008 
1011 {
1012  for (auto & texture : m_inputTextures)
1013  texture->m_valid = false;
1014  for (auto & texture : m_prevTextures)
1015  texture->m_valid = false;
1016  for (auto & texture : m_nextTextures)
1017  texture->m_valid = false;
1018 }
1019 
1020 void MythOpenGLVideo::BindTextures(bool Deinterlacing, vector<MythVideoTexture*> &Current,
1021  MythGLTexture **Textures, uint &TextureCount)
1022 {
1023  bool usecurrent = true;
1024  if (Deinterlacing)
1025  {
1027  {
1028  usecurrent = true;
1029  }
1030  else if ((m_nextTextures.size() == Current.size()) && (m_prevTextures.size() == Current.size()))
1031  {
1032  // if we are using reference frames, we want the current frame in the middle
1033  // but next will be the first valid, followed by current...
1034  usecurrent = false;
1035  size_t count = Current.size();
1036  vector<MythVideoTexture*> &current = Current[0]->m_valid ? Current : m_nextTextures;
1037  vector<MythVideoTexture*> &prev = m_prevTextures[0]->m_valid ? m_prevTextures : current;
1038 
1039  for (uint i = 0; i < count; ++i)
1040  Textures[TextureCount++] = reinterpret_cast<MythGLTexture*>(prev[i]);
1041  for (uint i = 0; i < count; ++i)
1042  Textures[TextureCount++] = reinterpret_cast<MythGLTexture*>(current[i]);
1043  for (uint i = 0; i < count; ++i)
1044  Textures[TextureCount++] = reinterpret_cast<MythGLTexture*>(m_nextTextures[i]);
1045  }
1046  }
1047 
1048  if (usecurrent)
1049  for (auto & texture : Current)
1050  Textures[TextureCount++] = reinterpret_cast<MythGLTexture*>(texture);
1051 }
1052 
1054 {
1055  if (format_is_hw(Type))
1056  return "opengl-hw";
1057 
1058  switch (Type)
1059  {
1060  case FMT_YUY2: return "opengl"; // compatibility with old profiles
1061  case FMT_YV12: return "opengl-yv12";
1062  case FMT_NV12: return "opengl-nv12";
1063  default: break;
1064  }
1065  return "opengl";
1066 }
1067 
1068 QString MythOpenGLVideo::VideoResizeToString(VideoResizing Resize)
1069 {
1070  QStringList reasons;
1071  if ((Resize & Deinterlacer) != 0U) reasons << "Deinterlacer";
1072  if ((Resize & Sampling) != 0U) reasons << "Sampling";
1073  if ((Resize & Performance) != 0U) reasons << "Performance";
1074  if ((Resize & Framebuffer) != 0U) reasons << "Framebuffer";
1075  return reasons.join(",");
1076 }
1077 
1078 QOpenGLFramebufferObject* MythOpenGLVideo::CreateVideoFrameBuffer(VideoFrameType OutputType, QSize Size)
1079 {
1080  // Use a 16bit float framebuffer if necessary and available (not GLES2) to maintain precision.
1081  // The depth check will pick up all software formats as well as NVDEC, VideoToolBox and VAAPI DRM.
1082  // VAAPI GLXPixmap and GLXCopy are currently not 10/12bit aware and VDPAU has no 10bit support -
1083  // and all return RGB formats anyway. The MediaCoded texture format is an unknown but resizing will
1084  // never be enabled as it returns an RGB frame - so if MediaCodec uses a 16bit texture, precision
1085  // will be preserved.
1086  bool sixteenbitfb = m_extraFeatures & kGL16BitFBO;
1087  bool sixteenbitvid = ColorDepth(OutputType) > 8;
1088  if (sixteenbitfb && sixteenbitvid)
1089  LOG(VB_PLAYBACK, LOG_INFO, LOC + "Requesting 16bit framebuffer texture");
1090  return m_render->CreateFramebuffer(Size, sixteenbitfb && sixteenbitvid);
1091 }
void SetTextureFilters(MythGLTexture *Texture, QOpenGLTexture::Filter Filter, QOpenGLTexture::WrapMode Wrap=QOpenGLTexture::ClampToEdge)
int GetExtraFeatures(void) const
QOpenGLShaderProgram * CreateShaderProgram(const QString &Vertex, const QString &Fragment)
static const QString YUVFragmentExtensions
void SetVideoRects(const QRect &DisplayVideoRect, const QRect &VideoRect)
VideoColourSpace contains a QMatrix4x4 that can convert YCbCr data to RGB.
MythVideoTexture * GetTexture(void)
static vector< MythVideoTexture * > CreateTextures(MythRenderOpenGL *Context, VideoFrameType Type, VideoFrameType Format, vector< QSize > Sizes, GLenum Target=QOpenGLTexture::Target2D)
Create a set of textures suitable for the given Type and Format.
void SetShaderProgramParams(QOpenGLShaderProgram *Program, const QMatrix4x4 &Value, const char *Uniform)
static bool format_is_yuv(VideoFrameType Type)
Definition: mythframe.h:114
static const QString GLSL300YUVFragmentExtensions
QSize m_masterViewportSize
Current viewport into which OpenGL is rendered, usually the window size.
MythVideoTexture * m_frameBufferTexture
MythDeintType GetDoubleRateOption(const VideoFrame *Frame, MythDeintType Type, MythDeintType Override)
Definition: mythframe.cpp:847
float GetColourGamma(void) const
static bool format_is_420(VideoFrameType Type)
Definition: mythframe.h:85
vector< MythVideoTexture * > m_nextTextures
Next textures with raw video data.
QSize m_videoDispDim
Useful video frame size e.g. 1920x1080.
static void SetTextureFilters(MythRenderOpenGL *Context, const vector< MythVideoTexture * > &Textures, QOpenGLTexture::Filter Filter, QOpenGLTexture::WrapMode Wrap=QOpenGLTexture::ClampToEdge)
bool EnableShaderProgram(QOpenGLShaderProgram *Program)
QOpenGLFramebufferObject * CreateVideoFrameBuffer(VideoFrameType OutputType, QSize Size)
QOpenGLFunctions::OpenGLFeatures m_features
Default features available from Qt.
void UpdateShaderParameters(void)
QOpenGLShaderProgram * m_shaders[ShaderCount]
void SetViewPort(const QRect &Rect, bool ViewportOnly=false)
void BindFramebuffer(QOpenGLFramebufferObject *Framebuffer)
static pid_list_t::iterator find(const PIDInfoMap &map, pid_list_t &list, pid_list_t::iterator begin, pid_list_t::iterator end, bool find_open)
QSize m_inputTextureSize
Actual size of input texture(s)
bool IsValid(void) const
#define GL_TEXTURE_EXTERNAL_OES
void logDebugMarker(const QString &Message)
FrameScanType
Definition: videoouttypes.h:78
VideoFrameType
Definition: mythframe.h:23
void SetVideoDimensions(const QSize &VideoDim, const QSize &VideoDispDim)
MythOpenGLVideo(MythRenderOpenGL *Render, VideoColourSpace *ColourSpace, QSize VideoDim, QSize VideoDispDim, QRect DisplayVisibleRect, QRect DisplayVideoRect, QRect videoRect, bool ViewportControl, QString Profile)
MythCoreContext * gCoreContext
This global variable contains the MythCoreContext instance for the app.
#define MAX_VIDEO_TEXTURES
static bool format_is_hwframes(VideoFrameType Type)
Definition: mythframe.h:80
static const QString GLSL300VertexShader
static uint planes(VideoFrameType Type)
Definition: mythframe.h:569
void DeleteShaderProgram(QOpenGLShaderProgram *Program)
int ColorDepth(int Format)
Return the color depth for the given MythTV frame format.
Definition: mythframe.cpp:808
static const QString DefaultVertexShader
static QString TypeToProfile(VideoFrameType Type)
QSize GetVideoSize(void) const
MythRenderOpenGL * m_render
static void UpdateTextures(MythRenderOpenGL *Context, const VideoFrame *Frame, const vector< MythVideoTexture * > &Textures)
Update the contents of the given Textures for data held in Frame.
MythDeintType
Definition: mythframe.h:120
#define LOC
VideoFrameType m_outputType
Set by profile for software or decoder for hardware.
QString DeinterlacerName(MythDeintType Deint, bool DoubleRate, VideoFrameType Format)
Return a user friendly description of the given deinterlacer.
Definition: mythavutil.cpp:114
VideoColourSpace * m_videoColourSpace
static void DeleteTextures(MythRenderOpenGL *Context, vector< MythVideoTexture * > &Textures)
MythGLTexture * CreateFramebufferTexture(QOpenGLFramebufferObject *Framebuffer)
vector< MythVideoTexture * > m_inputTextures
Current textures with raw video data.
void OutputChanged(QSize VideoDim, QSize VideoDispDim, float)
QMatrix4x4 GetPrimaryMatrix(void)
virtual int IncrRef(void)
Increments reference count.
QOpenGLFunctions::OpenGLFeatures GetFeatures(void) const
~MythOpenGLVideo() override
MythOpenGLTonemap * m_toneMap
Attempt to fix Chroma Upsampling Error in shaders.
void DeleteFramebuffer(QOpenGLFramebufferObject *Framebuffer)
#define VERBOSE_LEVEL_CHECK(_MASK_, _LEVEL_)
Definition: mythlogging.h:14
QDateTime current(bool stripped)
Returns current Date and Time in UTC.
Definition: mythdate.cpp:10
MythDeintType m_deinterlacer
QStringList GetColourMappingDefines(void)
virtual int DecrRef(void)
Decrements reference count and deletes on 0.
static bool format_is_422(VideoFrameType Type)
Definition: mythframe.h:91
static const QString GLSL300YUVFragmentShader
static const QString YUVFragmentShader
VideoFrameType m_inputType
Usually YV12 for software, VDPAU etc for hardware.
void DrawBitmap(MythGLTexture *Texture, QOpenGLFramebufferObject *Target, const QRect &Source, const QRect &Destination, QOpenGLShaderProgram *Program, int Alpha=255)
void DrawRect(QOpenGLFramebufferObject *Target, const QRect &Area, const QBrush &FillBrush, const QPen &LinePen, int Alpha)
MythDeintType m_fallbackDeinterlacer
Only used if there are insufficient texture units (for kernel)
static const QString RGBFragmentShader
unsigned int uint
Definition: compat.h:140
void ResetFrameFormat(void)
QOpenGLFramebufferObject * CreateFramebuffer(QSize &Size, bool SixteenBit=false)
void ResetTextures(void)
Clear reference frames after a seek as they will contain old images.
void Updated(bool PrimariesChanged)
QSize m_videoDim
Total video frame size e.g. 1920x1088.
QOpenGLFramebufferObject * m_frameBuffer
long long m_discontinuityCounter
Check when to release reference frames after a skip.
QString GetProfile() const
MythVideoTexture * Map(vector< MythVideoTexture * > &Inputs, QSize DisplaySize)
int m_extraFeatures
OR'd list of extra, Myth specific features.
void CleanupDeinterlacers(void)
void ProcessFrame(VideoFrame *Frame, FrameScanType Scan=kScan_Progressive)
Update the current input texture using the data from the given video frame.
void DeleteTexture(MythGLTexture *Texture)
bool AddDeinterlacer(const VideoFrame *Frame, FrameScanType Scan, MythDeintType Filter=DEINT_SHADER, bool CreateReferences=true)
StereoscopicMode
bool GetBoolSetting(const QString &key, bool defaultval=false)
vector< MythVideoTexture * > m_prevTextures
Previous textures with raw video data.
void SetViewportRect(const QRect &DisplayVisibleRect)
bool SetupFrameFormat(VideoFrameType InputType, VideoFrameType OutputType, QSize Size, GLenum TextureTarget)
void BindTextures(bool Deinterlacing, vector< MythVideoTexture * > &Current, MythGLTexture **Textures, uint &TextureCount)
GLenum m_textureTarget
Some interops require custom texture targets.
bool CreateVideoShader(VideoShaderType Type, MythDeintType Deint=DEINT_NONE)
Create the appropriate shader for the operation Type.
static bool format_is_hw(VideoFrameType Type)
Definition: mythframe.h:72
int m_shaderCost[ShaderCount]
void SetProfile(const QString &Profile)
const char * format_description(VideoFrameType Type)
Definition: mythframe.cpp:33
int m_lastRotation
Track rotation for pause frame.
QRect m_videoRect
Sub-rect of video_disp_dim to display (after zoom adjustments etc)
bool is_interlaced(FrameScanType Scan)
int GetMaxTextureUnits(void) const
void SetMasterViewport(QSize Size)
static bool format_is_444(VideoFrameType Type)
Definition: mythframe.h:97
static bool format_is_nv12(VideoFrameType Type)
Definition: mythframe.h:104
MythDeintType GetSingleRateOption(const VideoFrame *Frame, MythDeintType Type, MythDeintType Override)
Definition: mythframe.cpp:834
bool UpdateColourSpace(const VideoFrame *Frame)
Set the current colourspace to use.
static QString VideoResizeToString(VideoResizing Resize)
void UpdateColourSpace(bool PrimariesChanged)
QRect m_displayVideoRect
Sub-rect of display_visible_rect for video.
VideoResizing m_resizing
static vector< MythVideoTexture * > Retrieve(MythRenderOpenGL *Context, VideoColourSpace *ColourSpace, VideoFrame *Frame, FrameScanType Scan)
float GetDisplayGamma(void) const
void PrepareFrame(VideoFrame *Frame, bool TopFieldFirst, FrameScanType Scan, StereoscopicMode Stereo, bool DrawBorder=false)
#define LOG(_MASK_, _LEVEL_, _QSTRING_)
Definition: mythlogging.h:23