MythTV  master
mythcodeccontext.cpp
Go to the documentation of this file.
1 // Copyright (c) 2017-19 MythTV Developers <mythtv-dev@mythtv.org>
3 //
4 // This is part of MythTV (https://www.mythtv.org)
5 //
6 // This program is free software; you can redistribute it and/or modify
7 // it under the terms of the GNU General Public License as published by
8 // the Free Software Foundation; either version 2 of the License, or
9 // (at your option) any later version.
10 //
11 // This program is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 //
16 // You should have received a copy of the GNU General Public License
17 // along with this program; if not, write to the Free Software
18 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 //
20 // You should have received a copy of the GNU General Public License
21 // along with this program. If not, see <http://www.gnu.org/licenses/>.
22 //
24 
25 #include "mythcorecontext.h"
26 #include "mythlogging.h"
27 #include "mythmainwindow.h"
28 #include "mythopenglinterop.h"
29 #include "avformatdecoder.h"
30 
31 #ifdef USING_VAAPI
32 #include "mythvaapicontext.h"
33 #endif
34 #ifdef USING_VDPAU
35 #include "mythvdpauhelper.h"
36 #include "mythvdpaucontext.h"
37 #endif
38 #ifdef USING_NVDEC
39 #include "mythnvdeccontext.h"
40 #endif
41 #ifdef USING_VTB
42 #include "mythvtbcontext.h"
43 #endif
44 #ifdef USING_MEDIACODEC
45 #include "mythmediacodeccontext.h"
46 #endif
47 #ifdef USING_V4L2
48 #include "mythv4l2m2mcontext.h"
49 #endif
50 #ifdef USING_MMAL
51 #include "mythmmalcontext.h"
52 #endif
53 #ifdef USING_EGL
54 #include "mythdrmprimecontext.h"
55 #endif
56 #include "mythcodeccontext.h"
57 
58 extern "C" {
59 #include "libavutil/pixdesc.h"
60 }
61 
62 #define LOC QString("MythCodecContext: ")
63 
65 
67  : m_parent(Parent),
68  m_codecID(CodecID)
69 {
70 }
71 
73 {
74  MythCodecContext *mctx = nullptr;
75 #ifdef USING_VAAPI
76  if (codec_is_vaapi(Codec) || codec_is_vaapi_dec(Codec))
77  mctx = new MythVAAPIContext(Parent, Codec);
78 #endif
79 #ifdef USING_VDPAU
80  if (codec_is_vdpau_hw(Codec) || codec_is_vdpau_dechw(Codec))
81  mctx = new MythVDPAUContext(Parent, Codec);
82 #endif
83 #ifdef USING_NVDEC
84  if (codec_is_nvdec_dec(Codec) || codec_is_nvdec(Codec))
85  mctx = new MythNVDECContext(Parent, Codec);
86 #endif
87 #ifdef USING_VTB
88  if (codec_is_vtb_dec(Codec) || codec_is_vtb(Codec))
89  mctx = new MythVTBContext(Parent, Codec);
90 #endif
91 #ifdef USING_MEDIACODEC
92  if (codec_is_mediacodec(Codec) || codec_is_mediacodec_dec(Codec))
93  mctx = new MythMediaCodecContext(Parent, Codec);
94 #endif
95 #ifdef USING_V4L2
96  if (codec_is_v4l2_dec(Codec) || codec_is_v4l2(Codec))
97  mctx = new MythV4L2M2MContext(Parent, Codec);
98 #endif
99 #ifdef USING_MMAL
100  if (codec_is_mmal_dec(Codec) || codec_is_mmal(Codec))
101  mctx = new MythMMALContext(Parent, Codec);
102 #endif
103 #ifdef USING_EGL
104  if (codec_is_drmprime(Codec))
105  mctx = new MythDRMPRIMEContext(Parent, Codec);
106 #endif
107  Q_UNUSED(Codec);
108 
109  if (!mctx)
110  mctx = new MythCodecContext(Parent, Codec);
111  return mctx;
112 }
113 
115 {
116  QStringList decoders;
117 
118 #ifdef USING_VAAPI
120 #endif
121 #ifdef USING_MEDIACODEC
123 #endif
124  return decoders;
125 }
126 
128 {
129 #ifdef USING_VDPAU
130  // Only enable VDPAU support if it is actually present
132  {
133  Opts.decoders->append("vdpau");
134  (*Opts.equiv_decoders)["vdpau"].append("dummy");
135  Opts.decoders->append("vdpau-dec");
136  (*Opts.equiv_decoders)["vdpau-dec"].append("dummy");
137  }
138 #endif
139 #ifdef USING_DXVA2
140  Opts.decoders->append("dxva2");
141  (*Opts.equiv_decoders)["dxva2"].append("dummy");
142 #endif
143 
144 #ifdef USING_VAAPI
145  // Only enable VAAPI if it is actually present and isn't actually VDPAU
146  if (!MythVAAPIContext::HaveVAAPI().isEmpty())
147  {
148  Opts.decoders->append("vaapi");
149  (*Opts.equiv_decoders)["vaapi"].append("dummy");
150  Opts.decoders->append("vaapi-dec");
151  (*Opts.equiv_decoders)["vaapi-dec"].append("dummy");
152  }
153 #endif
154 #ifdef USING_NVDEC
155  // Only enable NVDec support if it is actually present
157  {
158  Opts.decoders->append("nvdec");
159  (*Opts.equiv_decoders)["nvdec"].append("dummy");
160  Opts.decoders->append("nvdec-dec");
161  (*Opts.equiv_decoders)["nvdec-dec"].append("dummy");
162  }
163 #endif
164 #ifdef USING_MEDIACODEC
166  {
167  Opts.decoders->append("mediacodec");
168  (*Opts.equiv_decoders)["mediacodec"].append("dummy");
169  Opts.decoders->append("mediacodec-dec");
170  (*Opts.equiv_decoders)["mediacodec-dec"].append("dummy");
171  }
172 #endif
173 #ifdef USING_VTB
174  Opts.decoders->append("vtb");
175  Opts.decoders->append("vtb-dec");
176  (*Opts.equiv_decoders)["vtb"].append("dummy");
177  (*Opts.equiv_decoders)["vtb-dec"].append("dummy");
178 #endif
179 #ifdef USING_V4L2
181  {
182 #ifdef USING_V4L2PRIME
183  Opts.decoders->append("v4l2");
184  (*Opts.equiv_decoders)["v4l2"].append("dummy");
185 #endif
186  Opts.decoders->append("v4l2-dec");
187  (*Opts.equiv_decoders)["v4l2-dec"].append("dummy");
188  }
189 #endif
190 #ifdef USING_EGL
192  {
193  Opts.decoders->append("drmprime");
194  (*Opts.equiv_decoders)["drmprime"].append("dummy");
195  }
196 #endif
197 #ifdef USING_MMAL
198  Opts.decoders->append("mmal-dec");
199  (*Opts.equiv_decoders)["mmal-dec"].append("dummy");
201  {
202  Opts.decoders->append("mmal");
203  (*Opts.equiv_decoders)["mmal"].append("dummy");
204  }
205 #endif
206 }
207 
208 MythCodecID MythCodecContext::FindDecoder(const QString &Decoder, AVStream *Stream,
209  AVCodecContext **Context, AVCodec **Codec)
210 {
211  MythCodecID result = kCodec_NONE;
212  uint streamtype = mpeg_version((*Context)->codec_id);
213 
214 #ifdef USING_VDPAU
215  result = MythVDPAUContext::GetSupportedCodec(Context, Codec, Decoder, streamtype);
216  if (codec_is_vdpau_hw(result) || codec_is_vdpau_dechw(result))
217  return result;
218 #endif
219 #ifdef USING_VAAPI
220  result = MythVAAPIContext::GetSupportedCodec(Context, Codec, Decoder, streamtype);
221  if (codec_is_vaapi(result) || codec_is_vaapi_dec(result))
222  return result;
223 #endif
224 #ifdef USING_VTB
225  (void)Stream;
226  result = MythVTBContext::GetSupportedCodec(Context, Codec, Decoder, streamtype);
227  if (codec_is_vtb(result) || codec_is_vtb_dec(result))
228  return result;
229 #endif
230 #ifdef USING_DXVA2
231  result = VideoOutputD3D::GetBestSupportedCodec(width, height, Decoder, streamtype, false);
232  if (codec_is_dxva2(result))
233  return result;
234 #endif
235 #ifdef USING_MEDIACODEC
236  result = MythMediaCodecContext::GetBestSupportedCodec(Context, Codec, Decoder, Stream, streamtype);
237  if (codec_is_mediacodec(result) || codec_is_mediacodec_dec(result))
238  return result;
239 #endif
240 #ifdef USING_NVDEC
241  result = MythNVDECContext::GetSupportedCodec(Context, Codec, Decoder, Stream, streamtype);
242  if (codec_is_nvdec(result) || codec_is_nvdec_dec(result))
243  return result;
244 #endif
245 #ifdef USING_V4L2
246  result = MythV4L2M2MContext::GetSupportedCodec(Context, Codec, Decoder, Stream, streamtype);
247  if (codec_is_v4l2_dec(result) || codec_is_v4l2(result))
248  return result;
249 #endif
250 #ifdef USING_MMAL
251  result = MythMMALContext::GetSupportedCodec(Context, Codec, Decoder, Stream, streamtype);
252  if (codec_is_mmal_dec(result) || codec_is_mmal(result))
253  return result;
254 #endif
255 #ifdef USING_EGL
256  result = MythDRMPRIMEContext::GetSupportedCodec(Context, Codec, Decoder, Stream, streamtype);
257  if (codec_is_drmprime(result))
258  return result;
259 #endif
260 
261  return kCodec_NONE;
262 }
263 
265  bool SelectedStream, bool &DirectRendering)
266 {
267  const AVCodec *codec1 = Context->codec;
268  if (codec1 && codec1->capabilities & AV_CODEC_CAP_DR1)
269  {
270  // Context->flags |= CODEC_FLAG_EMU_EDGE;
271  }
272  else
273  {
274  if (SelectedStream)
275  DirectRendering = false;
276  LOG(VB_PLAYBACK, LOG_INFO, LOC +
277  QString("Using software scaling to convert pixel format %1 for "
278  "codec %2").arg(av_get_pix_fmt_name(Context->pix_fmt))
279  .arg(ff_codec_id_string(Context->codec_id)));
280  }
281 }
282 
284 int MythCodecContext::GetBuffer(struct AVCodecContext *Context, AVFrame *Frame, int Flags)
285 {
286  auto *avfd = static_cast<AvFormatDecoder*>(Context->opaque);
287  VideoFrame *videoframe = avfd->GetPlayer()->GetNextVideoFrame();
288 
289  // set fields required for directrendering
290  for (int i = 0; i < 4; i++)
291  {
292  Frame->data[i] = nullptr;
293  Frame->linesize[i] = 0;
294  }
295  Frame->opaque = videoframe;
296  videoframe->pix_fmt = Context->pix_fmt;
297  Frame->reordered_opaque = Context->reordered_opaque;
298 
299  int ret = avcodec_default_get_buffer2(Context, Frame, Flags);
300  if (ret < 0)
301  return ret;
302 
303  // set the underlying pixel format. Set here rather than guessing later.
304  if (Frame->hw_frames_ctx)
305  {
306  auto *context = reinterpret_cast<AVHWFramesContext*>(Frame->hw_frames_ctx->data);
307  if (context)
308  videoframe->sw_pix_fmt = context->sw_format;
309  }
310 
311  // VAAPI 'fixes' 10/12/16bit colour values. Irrelevant for VDPAU.
312  videoframe->colorshifted = 1;
313 
314  // avcodec_default_get_buffer2 will retrieve an AVBufferRef from the pool of
315  // hardware surfaces stored within AVHWFramesContext. The pointer to the surface is stored
316  // in Frame->data[3]. Store this in VideoFrame::buf for the interop class to use.
317  videoframe->buf = Frame->data[3];
318  // Frame->buf(0) also contains a reference to the buffer. Take an additional reference to this
319  // buffer to retain the surface until it has been displayed (otherwise it is
320  // reused once the decoder is finished with it).
321  videoframe->priv[0] = reinterpret_cast<unsigned char*>(av_buffer_ref(Frame->buf[0]));
322  // frame->hw_frames_ctx contains a reference to the AVHWFramesContext. Take an additional
323  // reference to ensure AVHWFramesContext is not released until we are finished with it.
324  // This also contains the underlying MythOpenGLInterop class reference.
325  videoframe->priv[1] = reinterpret_cast<unsigned char*>(av_buffer_ref(Frame->hw_frames_ctx));
326 
327  // Set release method
328  Frame->buf[1] = av_buffer_create(reinterpret_cast<uint8_t*>(videoframe), 0,
330  return ret;
331 }
332 
333 
335 bool MythCodecContext::GetBuffer2(struct AVCodecContext *Context, VideoFrame* Frame,
336  AVFrame *AvFrame, int /*Flags*/)
337 {
338  if (!AvFrame || !Context || !Frame)
339  return false;
340 
341  auto *avfd = static_cast<AvFormatDecoder*>(Context->opaque);
342 
343  Frame->pix_fmt = Context->pix_fmt;
344  Frame->directrendering = 1;
345  Frame->colorshifted = 1;
346 
347  AvFrame->reordered_opaque = Context->reordered_opaque;
348  AvFrame->opaque = Frame;
349 
350  // retrieve the software format
351  if (AvFrame->hw_frames_ctx)
352  {
353  auto *context = reinterpret_cast<AVHWFramesContext*>(AvFrame->hw_frames_ctx->data);
354  if (context)
355  Frame->sw_pix_fmt = context->sw_format;
356  }
357 
358  // the hardware surface is stored in Frame->data[3]
359  Frame->buf = AvFrame->data[3];
360 
361  // Frame->buf[0] contains the release method. Take another reference to
362  // ensure the frame is not released before it is displayed.
363  Frame->priv[0] = reinterpret_cast<unsigned char*>(av_buffer_ref(AvFrame->buf[0]));
364 
365  // Retrieve and set the interop class
366  auto *devicectx = reinterpret_cast<AVHWDeviceContext*>(Context->hw_device_ctx->data);
367  Frame->priv[1] = reinterpret_cast<unsigned char*>(devicectx->user_opaque);
368 
369  // Set release method
370  AvFrame->buf[1] = av_buffer_create(reinterpret_cast<uint8_t*>(Frame), 0,
372  return true;
373 }
374 
375 void MythCodecContext::ReleaseBuffer(void *Opaque, uint8_t *Data)
376 {
377  auto *decoder = static_cast<AvFormatDecoder*>(Opaque);
378  auto *frame = reinterpret_cast<VideoFrame*>(Data);
379  if (decoder && decoder->GetPlayer())
380  decoder->GetPlayer()->DeLimboFrame(frame);
381 }
382 
391 {
392  int count = ++s_hwFramesContextCount;
393  if (count != 1)
394  LOG(VB_GENERAL, LOG_WARNING, LOC + QString("Error: %1 concurrent hardware frames contexts").arg(count));
395 }
396 
398 {
400  LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("%1 frames context finished")
401  .arg(av_hwdevice_get_type_name(Context->device_ctx->type)));
402  auto *interop = reinterpret_cast<MythOpenGLInterop*>(Context->user_opaque);
403  if (interop)
404  DestroyInterop(interop);
405 }
406 
408 {
409  LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("%1 device context finished")
410  .arg(av_hwdevice_get_type_name(Context->type)));
411  auto *interop = reinterpret_cast<MythOpenGLInterop*>(Context->user_opaque);
412  if (interop)
413  {
414  DestroyInterop(interop);
415  FreeAVHWDeviceContext free = interop->GetDefaultFree();
416  if (free)
417  {
418  LOG(VB_PLAYBACK, LOG_INFO, LOC + "Calling default device context free");
419  Context->user_opaque = interop->GetDefaultUserOpaque();
420  free(Context);
421  }
422  }
423 }
424 
426 {
427  if (gCoreContext->IsUIThread())
428  {
429  Interop->DecrRef();
430  return;
431  }
432 
433  auto destroy = [](void *Wait, void *Interop2, void* /*unused*/)
434  {
435  LOG(VB_PLAYBACK, LOG_INFO, LOC + "Destroy interop callback");
436  auto *wait = reinterpret_cast<QWaitCondition*>(Wait);
437  auto *interop = reinterpret_cast<MythOpenGLInterop*>(Interop2);
438  if (interop)
439  interop->DecrRef();
440  if (wait)
441  wait->wakeAll();
442  };
443 
444  if (!Interop->GetPlayer())
445  {
446  LOG(VB_GENERAL, LOG_ERR, LOC + "Cannot destroy interop - no player");
447  return;
448  }
449  MythPlayer::HandleDecoderCallback(Interop->GetPlayer(), "Destroy OpenGL interop",
450  destroy, Interop, nullptr);
451 }
452 
454 {
455  LOG(VB_PLAYBACK, LOG_INFO, LOC + "Create decoder callback");
456  auto *wait = reinterpret_cast<QWaitCondition*>(Wait);
457  auto *context = reinterpret_cast<AVCodecContext*>(Context);
458  auto callback = reinterpret_cast<CreateHWDecoder>(Callback);
459  if (context && callback)
460  (void)callback(context);
461  if (wait)
462  wait->wakeAll();
463 }
464 
467  const QString &Debug)
468 {
469  if (!Context || !Callback)
470  return -1;
471  if (gCoreContext->IsUIThread())
472  return Callback(Context);
473 
474  // Callback to MythPlayer (which will fail without a MythPlayer instance)
475  MythPlayer *player = nullptr;
476  auto *decoder = reinterpret_cast<AvFormatDecoder*>(Context->opaque);
477  if (decoder)
478  player = decoder->GetPlayer();
480  Context, reinterpret_cast<void*>(Callback));
481  return Context->hw_frames_ctx ? 0 : -1;
482 }
483 
486  const QString &Debug)
487 {
488  if (!Context || !Callback)
489  return -1;
490  if (gCoreContext->IsUIThread())
491  return Callback(Context);
492 
493  // Callback to MythPlayer (which will fail without a MythPlayer instance)
494  MythPlayer *player = nullptr;
495  auto *decoder = reinterpret_cast<AvFormatDecoder*>(Context->opaque);
496  if (decoder)
497  player = decoder->GetPlayer();
499  Context, reinterpret_cast<void*>(Callback));
500  return Context->hw_device_ctx ? 0 : -1;
501 }
502 
503 AVBufferRef* MythCodecContext::CreateDevice(AVHWDeviceType Type, MythOpenGLInterop *Interop, const QString &Device)
504 {
505  AVBufferRef* result = nullptr;
506  int res = av_hwdevice_ctx_create(&result, Type, Device.isEmpty() ? nullptr :
507  Device.toLocal8Bit().constData(), nullptr, 0);
508  if (res == 0)
509  {
510  LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created hardware device '%1'%2")
511  .arg(av_hwdevice_get_type_name(Type))
512  .arg(Device == nullptr ? "" : QString(" (%1)").arg(Device)));
513  auto *context = reinterpret_cast<AVHWDeviceContext*>(result->data);
514 
515  if ((context->free || context->user_opaque) && !Interop)
516  {
517  LOG(VB_PLAYBACK, LOG_INFO, "Creating dummy interop");
518  Interop = MythOpenGLInterop::CreateDummy();
519  }
520 
521  if (Interop)
522  {
523  Interop->SetDefaultFree(context->free);
524  Interop->SetDefaultUserOpaque(context->user_opaque);
525  Interop->IncrRef();
526  }
527 
529  context->user_opaque = Interop;
530  return result;
531  }
532 
533  char error[AV_ERROR_MAX_STRING_SIZE];
534  LOG(VB_PLAYBACK, LOG_ERR, LOC + QString("Failed to create hardware device '%1'%2 Error '%3'")
535  .arg(av_hwdevice_get_type_name(Type))
536  .arg(Device == nullptr ? "" : QString(" (%1)").arg(Device))
537  .arg(av_make_error_string(error, sizeof(error), res)));
538  return nullptr;
539 }
540 
543 {
544  switch (Context->codec_id)
545  {
546  case AV_CODEC_ID_H264:
547  switch (Context->profile)
548  {
549  case FF_PROFILE_H264_HIGH_10:
550  case FF_PROFILE_H264_HIGH_10_INTRA:
551  case FF_PROFILE_H264_HIGH_422:
552  case FF_PROFILE_H264_HIGH_422_INTRA:
553  case FF_PROFILE_H264_HIGH_444_PREDICTIVE:
554  case FF_PROFILE_H264_HIGH_444_INTRA:
555  case FF_PROFILE_H264_CAVLC_444: return true;
556  default: break;
557  }
558  break;
559  default: break;
560  }
561  return false;
562 }
563 
568 {
569  return avcodec_receive_frame(Context, Frame);
570 }
571 
573 {
574  if (!Frame || !AvFrame)
575  return false;
576 
577  AVFrame *temp = av_frame_alloc();
578  if (!temp)
579  return false;
580 
581  AVPixelFormat *pixelformats = nullptr;
582  int ret = av_hwframe_transfer_get_formats(AvFrame->hw_frames_ctx,
583  AV_HWFRAME_TRANSFER_DIRECTION_FROM,
584  &pixelformats, 0);
585  if (ret == 0)
586  {
587  AVPixelFormat best = m_parent->GetBestVideoFormat(pixelformats);
588  if (best != AV_PIX_FMT_NONE)
589  {
591  bool valid = Frame->codec == type;
592  if (!valid || (Frame->width != AvFrame->width) || (Frame->height != AvFrame->height))
594  AvFrame->width, AvFrame->height);
595 
596  if (valid)
597  {
598  // Retrieve the picture directly into the VideoFrame Buffer
599  temp->format = best;
600  uint max = planes(Frame->codec);
601  for (uint i = 0; i < 3; i++)
602  {
603  temp->data[i] = (i < max) ? (Frame->buf + Frame->offsets[i]) : nullptr;
604  temp->linesize[i] = Frame->pitches[i];
605  }
606 
607  // Dummy release method - we do not want to free the buffer
608  temp->buf[0] = av_buffer_create(reinterpret_cast<uint8_t*>(Frame), 0,
609  [](void* /*unused*/, uint8_t* /*unused*/){}, this, 0);
610  temp->width = AvFrame->width;
611  temp->height = AvFrame->height;
612  }
613  }
614  }
615  av_freep(&pixelformats);
616 
617  // retrieve data from GPU to CPU
618  if (ret >= 0)
619  if ((ret = av_hwframe_transfer_data(temp, AvFrame, 0)) < 0)
620  LOG(VB_GENERAL, LOG_ERR, LOC + QString("Error %1 transferring the data to system memory").arg(ret));
621 
622  Frame->colorshifted = 1;
623  av_frame_free(&temp);
624  return ret >= 0;
625 }
626 
628 {
629  switch (CodecID)
630  {
631  case AV_CODEC_ID_MPEG2VIDEO:
632  switch (Profile)
633  {
634  case FF_PROFILE_MPEG2_422: return MPEG2422;
635  case FF_PROFILE_MPEG2_HIGH: return MPEG2High;
636  case FF_PROFILE_MPEG2_SS: return MPEG2Spatial;
637  case FF_PROFILE_MPEG2_SNR_SCALABLE: return MPEG2SNR;
638  case FF_PROFILE_MPEG2_SIMPLE: return MPEG2Simple;
639  case FF_PROFILE_MPEG2_MAIN: return MPEG2Main;
640  default: break;
641  }
642  break;
643  case AV_CODEC_ID_MPEG4:
644  switch (Profile)
645  {
646  case FF_PROFILE_MPEG4_SIMPLE: return MPEG4Simple;
647  case FF_PROFILE_MPEG4_SIMPLE_SCALABLE: return MPEG4SimpleScaleable;
648  case FF_PROFILE_MPEG4_CORE: return MPEG4Core;
649  case FF_PROFILE_MPEG4_MAIN: return MPEG4Main;
650  case FF_PROFILE_MPEG4_N_BIT: return MPEG4NBit;
651  case FF_PROFILE_MPEG4_SCALABLE_TEXTURE: return MPEG4ScaleableTexture;
652  case FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION: return MPEG4SimpleFace;
653  case FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE: return MPEG4BasicAnimated;
654  case FF_PROFILE_MPEG4_HYBRID: return MPEG4Hybrid;
655  case FF_PROFILE_MPEG4_ADVANCED_REAL_TIME: return MPEG4AdvancedRT;
656  case FF_PROFILE_MPEG4_CORE_SCALABLE: return MPEG4CoreScaleable;
657  case FF_PROFILE_MPEG4_ADVANCED_CODING: return MPEG4AdvancedCoding;
658  case FF_PROFILE_MPEG4_ADVANCED_CORE: return MPEG4AdvancedCore;
659  case FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE: return MPEG4AdvancedScaleableTexture;
660  case FF_PROFILE_MPEG4_SIMPLE_STUDIO: return MPEG4SimpleStudio;
661  case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: return MPEG4AdvancedSimple;
662  }
663  break;
664  case AV_CODEC_ID_H263: return H263;
665  case AV_CODEC_ID_H264:
666  switch (Profile)
667  {
668  // Mapping of H264MainExtended, H264ConstrainedHigh?
669  case FF_PROFILE_H264_BASELINE: return H264Baseline;
670  case FF_PROFILE_H264_CONSTRAINED_BASELINE: return H264ConstrainedBaseline;
671  case FF_PROFILE_H264_MAIN: return H264Main;
672  case FF_PROFILE_H264_EXTENDED: return H264Extended;
673  case FF_PROFILE_H264_HIGH: return H264High;
674  case FF_PROFILE_H264_HIGH_10: return H264High10;
675  //case FF_PROFILE_H264_HIGH_10_INTRA:
676  //case FF_PROFILE_H264_MULTIVIEW_HIGH:
677  case FF_PROFILE_H264_HIGH_422: return H264High422;
678  //case FF_PROFILE_H264_HIGH_422_INTRA:
679  //case FF_PROFILE_H264_STEREO_HIGH:
680  case FF_PROFILE_H264_HIGH_444: return H264High444;
681  //case FF_PROFILE_H264_HIGH_444_PREDICTIVE:
682  //case FF_PROFILE_H264_HIGH_444_INTRA:
683  //case FF_PROFILE_H264_CAVLC_444:
684  }
685  break;
686  case AV_CODEC_ID_HEVC:
687  switch (Profile)
688  {
689  case FF_PROFILE_HEVC_MAIN: return HEVCMain;
690  case FF_PROFILE_HEVC_MAIN_10: return HEVCMain10;
691  case FF_PROFILE_HEVC_MAIN_STILL_PICTURE: return HEVCMainStill;
692  case FF_PROFILE_HEVC_REXT: return HEVCRext;
693  }
694  break;
695  case AV_CODEC_ID_VC1:
696  switch (Profile)
697  {
698  case FF_PROFILE_VC1_SIMPLE: return VC1Simple;
699  case FF_PROFILE_VC1_MAIN: return VC1Main;
700  case FF_PROFILE_VC1_COMPLEX: return VC1Complex;
701  case FF_PROFILE_VC1_ADVANCED: return VC1Advanced;
702  }
703  break;
704  case AV_CODEC_ID_VP8: return VP8;
705  case AV_CODEC_ID_VP9:
706  switch (Profile)
707  {
708  case FF_PROFILE_VP9_0: return VP9_0;
709  case FF_PROFILE_VP9_1: return VP9_1;
710  case FF_PROFILE_VP9_2: return VP9_2;
711  case FF_PROFILE_VP9_3: return VP9_3;
712  }
713  break;
714  case AV_CODEC_ID_AV1:
715  switch (Profile)
716  {
717  case FF_PROFILE_AV1_MAIN: return AV1Main;
718  case FF_PROFILE_AV1_HIGH: return AV1High;
719  case FF_PROFILE_AV1_PROFESSIONAL: return AV1Professional;
720  }
721  break;
722  case AV_CODEC_ID_MJPEG: return MJPEG;
723  default: break;
724  }
725 
726  return NoProfile;
727 }
728 
730 {
731  QString profile;
732  switch (Profile)
733  {
734  case NoProfile: profile = QObject::tr("Unknown/Unsupported"); break;
735  case MPEG2: profile = "MPEG2"; break;
736  case MPEG2Simple: profile = "MPEG2 Simple"; break;
737  case MPEG2Main: profile = "MPEG2 Main"; break;
738  case MPEG2422: profile = "MPEG2 422"; break;
739  case MPEG2High: profile = "MPEG2 High"; break;
740  case MPEG2Spatial: profile = "MPEG2 Spatial"; break;
741  case MPEG2SNR: profile = "MPEG2 SNR"; break;
742  case MPEG4: profile = "MPEG4"; break;
743  case MPEG4Simple: profile = "MPEG4 Simple"; break;
744  case MPEG4SimpleScaleable: profile = "MPEG4 Simple Scaleable"; break;
745  case MPEG4Core: profile = "MPEG4 Core"; break;
746  case MPEG4Main: profile = "MPEG4 Main"; break;
747  case MPEG4NBit: profile = "MPEG4 NBit"; break;
748  case MPEG4ScaleableTexture: profile = "MPEG4 Scaleable Texture"; break;
749  case MPEG4SimpleFace: profile = "MPEG4 Simple Face"; break;
750  case MPEG4BasicAnimated: profile = "MPEG4 Basic Animated"; break;
751  case MPEG4Hybrid: profile = "MPEG4 Hybrid"; break;
752  case MPEG4AdvancedRT: profile = "MPEG4 Advanced RT"; break;
753  case MPEG4CoreScaleable: profile = "MPEG4 Core Scaleable"; break;
754  case MPEG4AdvancedCoding: profile = "MPEG4 Advanced Coding"; break;
755  case MPEG4AdvancedCore: profile = "MPEG4 Advanced Core"; break;
756  case MPEG4AdvancedScaleableTexture: profile = "MPEG4 Advanced Scaleable Texture"; break;
757  case MPEG4SimpleStudio: profile = "MPEG4 Simple Studio"; break;
758  case MPEG4AdvancedSimple: profile = "MPEG4 Advanced Simple"; break;
759  case H263: profile = "H263"; break;
760  case H264: profile = "H264"; break;
761  case H264Baseline: profile = "H264 Baseline"; break;
762  case H264ConstrainedBaseline: profile = "H264 Constrained"; break;
763  case H264Main: profile = "H264 Main"; break;
764  case H264MainExtended: profile = "H264 Main Extended"; break;
765  case H264High: profile = "H264 High"; break;
766  case H264High10: profile = "H264 High10"; break;
767  case H264Extended: profile = "H264 Extended"; break;
768  case H264High422: profile = "H264 High 422"; break;
769  case H264High444: profile = "H264 High 444"; break;
770  case H264ConstrainedHigh: profile = "H264 Constrained High"; break;
771  case HEVC: profile = "HEVC"; break;
772  case HEVCMain: profile = "HEVC Main"; break;
773  case HEVCMain10: profile = "HEVC Main10"; break;
774  case HEVCMainStill: profile = "HEVC Main Still"; break;
775  case HEVCRext: profile = "HEVC Rext"; break;
776  case HEVCMain10HDR: profile = "HEVC Main10HDR"; break;
777  case HEVCMain10HDRPlus: profile = "HEVC Main10HDRPlus"; break;
778  case VC1: profile = "VC1"; break;
779  case VC1Simple: profile = "VC1 Simple"; break;
780  case VC1Main: profile = "VC1 Main"; break;
781  case VC1Complex: profile = "VC1 Complex"; break;
782  case VC1Advanced: profile = "VC1 Advanced"; break;
783  case VP8: profile = "VP8"; break;
784  case VP9: profile = "VP9"; break;
785  case VP9_0: profile = "VP9 Level 0"; break;
786  case VP9_1: profile = "VP9 Level 1"; break;
787  case VP9_2: profile = "VP9 Level 2"; break;
788  case VP9_2HDR: profile = "VP9 Level 2 HDR"; break;
789  case VP9_2HDRPlus: profile = "VP9 Level 2 HDRPlus"; break;
790  case VP9_3: profile = "VP9 Level 3"; break;
791  case VP9_3HDR: profile = "VP9 Level 3 HDR"; break;
792  case VP9_3HDRPlus: profile = "VP9 Level 3 HDRPlus"; break;
793  case AV1: profile = "AV1"; break;
794  case AV1Main: profile = "AV1 Main"; break;
795  case AV1High: profile = "AV1 High"; break;
796  case AV1Professional: profile = "AV1 Professional"; break;
797  case MJPEG: profile = "MJPEG";
798  }
799 
800  if (Size.isEmpty())
801  return profile;
802 
803  return QObject::tr("%1 (Max size: %2x%3)").arg(profile).arg(Size.width()).arg(Size.height());
804 }
int(*)(AVCodecContext *Context) CreateHWDecoder
static bool HaveV4L2Codecs(AVCodecID Codec=AV_CODEC_ID_NONE)
static void ReleaseBuffer(void *Opaque, uint8_t *Data)
static void HandleDecoderCallback(MythPlayer *Player, const QString &Debug, DecoderCallback::Callback Function, void *Opaque1, void *Opaque2)
Convenience function to request and wait for a callback into the main thread.
#define codec_is_mmal(id)
Definition: mythcodecid.h:341
void(*)(struct AVHWDeviceContext *) FreeAVHWDeviceContext
unsigned char * buf
Definition: mythframe.h:139
static void GetDecoderList(QStringList &Decoders)
QHash< QString, Action * > Context
Definition: action.h:77
static Type GetInteropType(VideoFrameType Format, MythPlayer *Player)
Check whether we support direct rendering for the given VideoFrameType.
virtual MythCodecID GetVideoCodecID(void) const =0
static QString GetProfileDescription(CodecProfile Profile, QSize Size)
MythCodecID
Definition: mythcodecid.h:10
#define codec_is_vtb_dec(id)
Definition: mythcodecid.h:335
static void error(const char *str,...)
Definition: vbi.c:42
static MythCodecID GetSupportedCodec(AVCodecContext **Context, AVCodec **Codec, const QString &Decoder, AVStream *Stream, uint StreamType)
#define codec_is_mmal_dec(id)
Definition: mythcodecid.h:342
static int InitialiseDecoder(AVCodecContext *Context, CreateHWDecoder Callback, const QString &Debug)
Initialise a hardware decoder that is expected to use AVHWFramesContext.
static MythCodecID GetSupportedCodec(AVCodecContext **Context, AVCodec **Codec, const QString &Decoder, AVStream *Stream, uint StreamType)
struct AVFrame AVFrame
static bool HaveNVDEC(void)
static int GetBuffer(struct AVCodecContext *Context, AVFrame *Frame, int Flags)
A generic hardware buffer initialisation method when using AVHWFramesContext.
MythPlayer * GetPlayer(void)
VideoFrameType PixelFormatToFrameType(AVPixelFormat fmt)
Definition: mythavutil.cpp:68
VideoFrameType
Definition: mythframe.h:23
MythCoreContext * gCoreContext
This global variable contains the MythCoreContext instance for the app.
#define codec_is_v4l2_dec(id)
Definition: mythcodecid.h:339
static bool IsUnsupportedProfile(AVCodecContext *Context)
Most hardware decoders do not support these codecs/profiles.
static uint planes(VideoFrameType Type)
Definition: mythframe.h:567
DecoderBase * m_parent
#define LOC
static bool ReinitBuffer(VideoFrame *Frame, VideoFrameType Type, MythCodecID CodecID, int Width, int Height)
static bool HavePrimeDecoders(AVCodecID Codec=AV_CODEC_ID_NONE)
static MythCodecID FindDecoder(const QString &Decoder, AVStream *Stream, AVCodecContext **Context, AVCodec **Codec)
A generic context handler for codecs that return AV_PIX_FMT_DRM_PRIME frames.
virtual int FilteredReceiveFrame(AVCodecContext *Context, AVFrame *Frame)
Retrieve and process/filter AVFrame.
static MythCodecID GetBestSupportedCodec(uint width, uint height, const QString &decoder, uint stream_type, bool no_acceleration, AVPixelFormat &pix_fmt)
static MythOpenGLInterop * CreateDummy(void)
#define codec_is_nvdec_dec(id)
Definition: mythcodecid.h:330
A device containing images (ie. USB stick, CD, storage group etc)
#define codec_is_vaapi(id)
Definition: mythcodecid.h:311
unsigned char * priv[4]
random empty storage
Definition: mythframe.h:151
virtual int IncrRef(void)
Increments reference count.
static void CreateDecoderCallback(void *Wait, void *Context, void *Callback)
#define codec_is_dxva2(id)
Definition: mythcodecid.h:316
MythCodecContext(DecoderBase *Parent, MythCodecID CodecID)
static int InitialiseDecoder2(AVCodecContext *Context, CreateHWDecoder Callback, const QString &Debug)
Initialise a hardware decoder that is NOT expected to use AVHWFramesContext.
static MythCodecID GetBestSupportedCodec(AVCodecContext **Context, AVCodec **Codec, const QString &Decoder, AVStream *Stream, uint StreamType)
static void GetDecoderList(QStringList &Decoders)
virtual void InitVideoCodec(AVCodecContext *Context, bool SelectedStream, bool &DirectRendering)
void SetDefaultUserOpaque(void *UserOpaque)
#define codec_is_vaapi_dec(id)
Definition: mythcodecid.h:313
static bool HaveMediaCodec(void)
virtual int DecrRef(void)
Decrements reference count and deletes on 0.
int pix_fmt
Definition: mythframe.h:161
static MythCodecID GetSupportedCodec(AVCodecContext **CodecContext, AVCodec **Codec, const QString &Decoder, uint StreamType)
unsigned int uint
Definition: compat.h:140
static MythCodecContext * CreateContext(DecoderBase *Parent, MythCodecID Codec)
#define codec_is_mediacodec(id)
Definition: mythcodecid.h:323
static MythCodecID GetSupportedCodec(AVCodecContext **CodecContext, AVCodec **Codec, const QString &Decoder, AVStream *Stream, uint StreamType)
Determine whether NVDEC decoding is supported for this codec.
static CodecProfile FFmpegToMythProfile(AVCodecID CodecID, int Profile)
A handler for V4L2 Memory2Memory codecs.
static void FramesContextFinished(AVHWFramesContext *Context)
AVPixelFormat GetBestVideoFormat(AVPixelFormat *Formats)
Find a suitable frame format that is mutually acceptable to the decoder and render device.
QMap< QString, QStringList > * equiv_decoders
static bool HaveVDPAU(void)
QStringList * decoders
uint mpeg_version(int codec_id)
#define codec_is_vtb(id)
Definition: mythcodecid.h:333
#define LOG(_MASK_, _LEVEL_, _STRING_)
Definition: mythlogging.h:41
static void DestroyInterop(MythOpenGLInterop *Interop)
static bool GetBuffer2(struct AVCodecContext *Context, VideoFrame *Frame, AVFrame *AvFrame, int Flags)
A generic hardware buffer initialisation method when AVHWFramesContext is NOT used.
#define codec_is_vdpau_dechw(id)
Definition: mythcodecid.h:306
int sw_pix_fmt
Definition: mythframe.h:162
static void DeviceContextFinished(AVHWDeviceContext *Context)
int colorshifted
0 for software decoded 10/12/16bit frames. 1 for hardware decoders.
Definition: mythframe.h:169
static void NewHardwareFramesContext(void)
Track the number of concurrent frames contexts.
static QAtomicInt s_hwFramesContextCount
#define codec_is_vdpau_hw(id)
Definition: mythcodecid.h:300
static QString HaveVAAPI(bool ReCheck=false)
Check whether VAAPI is available and not emulated via VDPAU.
#define codec_is_nvdec(id)
Definition: mythcodecid.h:328
static MythCodecID GetSupportedCodec(AVCodecContext **Context, AVCodec **Codec, const QString &Decoder, uint StreamType)
virtual bool RetrieveHWFrame(VideoFrame *Frame, AVFrame *AvFrame)
#define codec_is_drmprime(id)
Definition: mythcodecid.h:296
void SetDefaultFree(FreeAVHWDeviceContext FreeContext)
static MythCodecID GetSupportedCodec(AVCodecContext **Context, AVCodec **Codec, const QString &Decoder, AVStream *Stream, uint StreamType)
static MythCodecID GetSupportedCodec(AVCodecContext **Context, AVCodec **Codec, const QString &Decoder, uint StreamType)
Confirm whether VAAPI support is available given Decoder and Context.
static AVBufferRef * CreateDevice(AVHWDeviceType Type, MythOpenGLInterop *Interop, const QString &Device=QString())
#define codec_is_v4l2(id)
Definition: mythcodecid.h:338
#define codec_is_mediacodec_dec(id)
Definition: mythcodecid.h:325
static void GetDecoders(RenderOptions &Opts)
static QStringList GetDecoderDescription(void)