Ticket #13230: openglvideo.cpp

File openglvideo.cpp, 62.4 KB (added by mspieth, 5 years ago)

my opengl video with modsfor reference

Line 
1// MythTV headers
2#include "openglvideo.h"
3#include "mythcontext.h"
4#include "tv.h"
5#include "mythrender_opengl.h"
6#include "mythavutil.h"
7
8#define LOC QString("GLVid: ")
9#define COLOUR_UNIFORM "m_colourMatrix"
10#define MYTHTV_YV12 0x8a20
11
12enum DisplayBuffer
13{
14    kDefaultBuffer,
15    kFrameBufferObject
16};
17
18class OpenGLFilter
19{
20    public:
21        vector<GLuint> fragmentPrograms;
22        uint           numInputs;
23        vector<GLuint> frameBuffers;
24        vector<GLuint> frameBufferTextures;
25        DisplayBuffer  outputBuffer;
26};
27
28/**
29 * \class OpenGLVideo
30 *  A class used to display video frames and associated imagery
31 *  using the OpenGL API.
32 *
33 *  The basic operational concept is to use a series of filter stages to
34 *  generate the desired video output, using limited software assistance
35 *  alongside OpenGL fragment programs (deinterlacing and YUV->RGB conversion)
36 *  , FrameBuffer Objects (flexible GPU storage) and PixelBuffer Objects
37 *  (faster CPU->GPU memory transfers).
38 *
39 *  In the most basic case, for example, a YV12 frame pre-converted in software
40 *  to BGRA format is simply blitted to the frame buffer.
41 *  Currently, the most complicated example is the rendering of a standard
42 *  definition, interlaced frame to a high(er) definition display using
43 *  OpenGL (i.e. hardware based) deinterlacing, colourspace conversion and
44 *  bicubic upsampling.
45 *
46 *  Higher level tasks such as coordination between OpenGLVideo instances,
47 *  video buffer management, audio/video synchronisation etc are handled by
48 *  the higher level classes VideoOutput and NuppelVideoPlayer. The bulk of
49 *  the lower level interface with the window system and OpenGL is handled by
50 *  MythRenderOpenGL.
51 *
52 *  N.B. Direct use of OpenGL calls is minimised to maintain platform
53 *  independance. The only member function where this is impractical is
54 *  PrepareFrame().
55 *
56 *  \warning Any direct OpenGL calls must be wrapped by calls to
57 *  gl_context->MakeCurrent(). Alternatively use the convenience class
58 *  OpenGLLocker.
59 */
60
61/**
62 *  Create a new OpenGLVideo instance that must be initialised
63 *  with a call to OpenGLVideo::Init()
64 */
65
66OpenGLVideo::OpenGLVideo() :
67    gl_context(nullptr),      video_disp_dim(0,0),
68    video_dim(0,0),           viewportSize(0,0),
69    masterViewportSize(0,0),  display_visible_rect(0,0,0,0),
70    display_video_rect(0,0,0,0), video_rect(0,0,0,0),
71    frameBufferRect(0,0,0,0), hardwareDeinterlacing(false),
72    colourSpace(nullptr),     viewportControl(false),
73    inputTextureSize(0,0),    currentFrameNum(0),
74    inputUpdated(false),      refsNeeded(0),
75    textureRects(false),      textureType(GL_TEXTURE_2D),
76    helperTexture(0),         defaultUpsize(kGLFilterResize),
77    gl_features(0),           videoTextureType(GL_BGRA),
78    preferYCBCR(false)
79{
80}
81
82OpenGLVideo::~OpenGLVideo()
83{
84    OpenGLLocker ctx_lock(gl_context);
85    Teardown();
86}
87
88void OpenGLVideo::Teardown(void)
89{
90    if (helperTexture)
91        gl_context->DeleteTexture(helperTexture);
92    helperTexture = 0;
93
94    DeleteTextures(&inputTextures);
95    DeleteTextures(&referenceTextures);
96
97    while (!filters.empty())
98    {
99        RemoveFilter(filters.begin()->first);
100        filters.erase(filters.begin());
101    }
102}
103
104/**
105 *  \param glcontext          the MythRenderOpenGL object responsible for lower
106 *   levelwindow and OpenGL context integration
107 *  \param colourspace        the colourspace management object
108 *  \param videoDim           the size of the video source
109 *  \param videoDispDim       the size of the display
110 *  \param displayVisibleRect the bounding rectangle of the OpenGL window
111 *  \param displayVideoRect   the bounding rectangle for the area to display
112 *   the video frame
113 *  \param videoRect          the portion of the video frame to display in
114     displayVideoRect
115 *  \param viewport_control   if true, this instance may permanently change
116     the OpenGL viewport
117 *  \param options            a string defining OpenGL features to disable
118 *  \param hw_accel           if true, a GPU decoder will copy frames directly
119     to an RGBA texture
120 */
121
122bool OpenGLVideo::Init(MythRenderOpenGL *glcontext, VideoColourSpace *colourspace,
123                       QSize videoDim, QSize videoDispDim,
124                       QRect displayVisibleRect,
125                       QRect displayVideoRect, QRect videoRect,
126                       bool viewport_control, QString options,
127                       bool hw_accel)
128{
129    if (!glcontext)
130        return false;
131
132    gl_context            = glcontext;
133    OpenGLLocker ctx_lock(gl_context);
134
135    video_dim             = videoDim;
136    video_disp_dim        = videoDispDim;
137    display_visible_rect  = displayVisibleRect;
138    display_video_rect    = displayVideoRect;
139    video_rect            = videoRect;
140    masterViewportSize    = display_visible_rect.size();
141    frameBufferRect       = QRect(QPoint(0,0), video_disp_dim);
142    softwareDeinterlacer  = "";
143    hardwareDeinterlacing = false;
144    colourSpace           = colourspace;
145    viewportControl       = viewport_control;
146    inputTextureSize      = QSize(0,0);
147    currentFrameNum       = -1;
148    inputUpdated          = false;
149
150    // OpenGL-Lite - use implementation specific extensions for updating frames
151    if (options.contains("preferycbcr"))
152        preferYCBCR = true;
153
154    // Set OpenGL feature support
155    gl_features = gl_context->GetFeatures();
156
157    if (viewportControl)
158        gl_context->SetFence();
159
160    SetViewPort(display_visible_rect.size());
161
162    bool glsl    = gl_features & kGLSL;
163    bool shaders = glsl || (gl_features & kGLExtFragProg);
164    bool fbos    = gl_features & kGLExtFBufObj;
165    bool pbos    = gl_features & kGLExtPBufObj;
166
167    #ifdef ANDROID
168    #define YV12DEFAULT false
169    #else
170    #define YV12DEFAULT true
171    #endif
172
173    bool yv12 = gCoreContext->GetBoolSetting("OpenGLYV12", YV12DEFAULT)
174        && !getenv("OPENGL_NOYV12");
175    bool uyvy = gCoreContext->GetBoolSetting("OpenGLUYVY", true)
176        && !getenv("OPENGL_NOUYVY");
177    bool ycbcr   = (gl_features & kGLMesaYCbCr) || (gl_features & kGLAppleYCbCr);
178
179    // warn about the lite profile when it offers no benefit
180    if (!ycbcr && preferYCBCR)
181    {
182        LOG(VB_GENERAL, LOG_WARNING, LOC +
183            "You have selected the opengl-lite profile but no required OpenGL "
184            "extensions are available.");
185    }
186
187    // decide on best video input texture format
188    videoTextureType = GL_BGRA;
189    if (hw_accel)
190        videoTextureType = GL_RGBA;
191    else if ((!shaders || preferYCBCR) && (gl_features & kGLMesaYCbCr))
192        videoTextureType = GL_YCBCR_MESA;
193    else if ((!shaders || preferYCBCR) && (gl_features & kGLAppleYCbCr))
194        videoTextureType = GL_YCBCR_422_APPLE;
195    else if (glsl && fbos && !(pbos && uyvy) && yv12)
196        videoTextureType = MYTHTV_YV12;
197    else if (shaders && fbos && uyvy)
198        videoTextureType = MYTHTV_UYVY;
199
200    // colourspace adjustments require shaders to operate on YUV textures
201    if ((GL_BGRA != videoTextureType) &&
202        (MYTHTV_UYVY != videoTextureType) &&
203        (MYTHTV_YV12 != videoTextureType))
204    {
205        colourSpace->SetSupportedAttributes(kPictureAttributeSupported_None);
206    }
207
208    // turn on bicubic filtering
209    if (options.contains("openglbicubic"))
210    {
211        if (shaders && fbos)
212            defaultUpsize = kGLFilterBicubic;
213        else
214            LOG(VB_PLAYBACK, LOG_ERR, LOC +
215                "No OpenGL feature support for Bicubic filter.");
216    }
217
218    // decide on best input texture type
219    if ((GL_RGBA != videoTextureType) &&
220        (MYTHTV_YV12 != videoTextureType) &&
221        (defaultUpsize != kGLFilterBicubic) &&
222        (gl_features & kGLExtRect))
223    {
224        textureType = gl_context->GetTextureType(textureRects);
225    }
226
227    // Create initial input texture and associated filter stage
228    GLuint tex = CreateVideoTexture(video_dim, inputTextureSize);
229    bool    ok = false;
230
231    if ((GL_BGRA == videoTextureType) || (MYTHTV_UYVY == videoTextureType))
232        ok = tex && AddFilter(kGLFilterYUV2RGB);
233    else if (MYTHTV_YV12 == videoTextureType)
234        ok = tex && AddFilter(kGLFilterYV12RGB);
235    else
236        ok = tex && AddFilter(kGLFilterResize);
237
238    if (ok)
239    {
240        if (GL_RGBA == videoTextureType)
241            LOG(VB_GENERAL, LOG_INFO, LOC + "Using raw RGBA input textures.");
242        else if ((GL_YCBCR_MESA == videoTextureType) ||
243                 (GL_YCBCR_422_APPLE == videoTextureType))
244            LOG(VB_GENERAL, LOG_INFO, LOC +
245                "Using YCbCr->BGRA input textures.");
246        else if (MYTHTV_YV12 == videoTextureType)
247            LOG(VB_GENERAL, LOG_INFO, LOC +
248                "Using YV12 input textures.");
249        else if (MYTHTV_UYVY == videoTextureType)
250            LOG(VB_GENERAL, LOG_INFO, LOC +
251                "Using custom UYVY input textures.");
252        else
253            LOG(VB_GENERAL, LOG_INFO, LOC +
254                "Using plain BGRA input textures.");
255        inputTextures.push_back(tex);
256    }
257    else
258        Teardown();
259
260    if (filters.empty())
261    {
262        LOG(VB_PLAYBACK, LOG_INFO, LOC +
263                "Failed to setup colourspace conversion.\n\t\t\t"
264                "Falling back to software conversion.\n\t\t\t"
265                "Any opengl filters will also be disabled.");
266
267        videoTextureType = GL_BGRA;
268        GLuint bgra32tex = CreateVideoTexture(video_dim, inputTextureSize);
269
270        if (bgra32tex && AddFilter(kGLFilterResize))
271        {
272            inputTextures.push_back(bgra32tex);
273            colourSpace->SetSupportedAttributes(kPictureAttributeSupported_None);
274        }
275        else
276        {
277            LOG(VB_GENERAL, LOG_ERR, LOC + "Fatal error");
278            Teardown();
279            return false;
280        }
281    }
282
283    bool mmx = false;
284#ifdef MMX
285    // cppcheck-suppress redundantAssignment
286    mmx = true;
287#endif
288
289    CheckResize(false);
290
291    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("MMX: %1 PBO: %2")
292            .arg(mmx).arg((gl_features & kGLExtPBufObj) > 0));
293
294    return true;
295}
296
297/**
298 *   Determine if the output is to be scaled at all and create or destroy
299 *   the appropriate filter as necessary.
300 */
301
302void OpenGLVideo::CheckResize(bool deinterlacing, bool allow)
303{
304    // to improve performance on slower cards
305    bool resize_up = ((video_disp_dim.height() < display_video_rect.height()) ||
306                     (video_disp_dim.width() < display_video_rect.width())) && allow;
307
308    // to ensure deinterlacing works correctly
309    bool resize_down = (video_disp_dim.height() > display_video_rect.height()) &&
310                        deinterlacing && allow;
311
312    // UYVY packed pixels must be sampled exactly and any overscan settings will
313    // break sampling - so always force an extra stage
314    resize_down |= videoTextureType == MYTHTV_UYVY;
315    // Extra stage needed on Fire Stick 4k, maybe others, because of blank screen when playing.
316    resize_down |= gCoreContext->GetBoolSetting("OpenGLExtraStage", false);
317
318    if (resize_up && (defaultUpsize == kGLFilterBicubic))
319    {
320        RemoveFilter(kGLFilterResize);
321        filters.erase(kGLFilterResize);
322        AddFilter(kGLFilterBicubic);
323        OptimiseFilters();
324        return;
325    }
326
327    if ((resize_up && (defaultUpsize == kGLFilterResize)) || resize_down)
328    {
329        RemoveFilter(kGLFilterBicubic);
330        filters.erase(kGLFilterBicubic);
331        AddFilter(kGLFilterResize);
332        OptimiseFilters();
333        return;
334    }
335
336    RemoveFilter(kGLFilterBicubic);
337    filters.erase(kGLFilterBicubic);
338    OptimiseFilters();
339}
340
341/**
342 *  Ensure the current chain of OpenGLFilters is logically correct
343 *  and has the resources required to complete rendering.
344 */
345
346bool OpenGLVideo::OptimiseFilters(void)
347{
348    glfilt_map_t::reverse_iterator it;
349
350    // add/remove required frame buffer objects
351    // and link filters
352    uint buffers_needed = 1;
353    bool last_filter    = true;
354    for (it = filters.rbegin(); it != filters.rend(); ++it)
355    {
356        if (!last_filter)
357        {
358            it->second->outputBuffer = kFrameBufferObject;
359            uint buffers_have = it->second->frameBuffers.size();
360            int buffers_diff = buffers_needed - buffers_have;
361            if (buffers_diff > 0)
362            {
363                uint tmp_buf, tmp_tex;
364                for (int i = 0; i < buffers_diff; i++)
365                {
366                    if (!AddFrameBuffer(tmp_buf, tmp_tex, video_disp_dim))
367                        return false;
368                    else
369                    {
370                        it->second->frameBuffers.push_back(tmp_buf);
371                        it->second->frameBufferTextures.push_back(tmp_tex);
372                    }
373                }
374            }
375            else if (buffers_diff < 0)
376            {
377                for (int i = 0; i > buffers_diff; i--)
378                {
379                    OpenGLFilter *filt = it->second;
380
381                    gl_context->DeleteFrameBuffer(
382                        filt->frameBuffers.back());
383                    gl_context->DeleteTexture(
384                        filt->frameBufferTextures.back());
385
386                    filt->frameBuffers.pop_back();
387                    filt->frameBufferTextures.pop_back();
388                }
389            }
390        }
391        else
392        {
393            it->second->outputBuffer = kDefaultBuffer;
394            last_filter = false;
395        }
396        buffers_needed = it->second->numInputs;
397    }
398
399    SetFiltering();
400
401    return true;
402}
403
404/**
405 *  Set the OpenGL texture mapping functions to optimise speed and quality.
406 */
407
408void OpenGLVideo::SetFiltering(void)
409{
410    if (filters.size() < 2)
411    {
412        SetTextureFilters(&inputTextures, GL_LINEAR, GL_CLAMP_TO_EDGE);
413        SetTextureFilters(&referenceTextures, GL_LINEAR, GL_CLAMP_TO_EDGE);
414        return;
415    }
416
417    SetTextureFilters(&inputTextures, GL_NEAREST, GL_CLAMP_TO_EDGE);
418    SetTextureFilters(&referenceTextures, GL_NEAREST, GL_CLAMP_TO_EDGE);
419
420    glfilt_map_t::reverse_iterator rit;
421    int last_filter = 0;
422
423    for (rit = filters.rbegin(); rit != filters.rend(); ++rit)
424    {
425        if (last_filter == 1)
426        {
427            SetTextureFilters(&(rit->second->frameBufferTextures),
428                              GL_LINEAR, GL_CLAMP_TO_EDGE);
429        }
430        else if (last_filter > 1)
431        {
432            SetTextureFilters(&(rit->second->frameBufferTextures),
433                              GL_NEAREST, GL_CLAMP_TO_EDGE);
434        }
435        ++last_filter;
436    }
437}
438
439/**
440 *  Add a new filter stage and create any additional resources needed.
441 */
442
443bool OpenGLVideo::AddFilter(OpenGLFilterType filter)
444{
445    if (filters.count(filter))
446        return true;
447
448    switch (filter)
449    {
450      case kGLFilterNone:
451          // Nothing to do. Prevents compiler warning.
452          break;
453
454      case kGLFilterResize:
455        if (!(gl_features & kGLExtFBufObj) && !filters.empty())
456        {
457            LOG(VB_PLAYBACK, LOG_ERR, LOC +
458                "GL_EXT_framebuffer_object not available "
459                "for scaling/resizing filter.");
460            return false;
461        }
462        break;
463
464      case kGLFilterBicubic:
465        if (!(gl_features & kGLExtFragProg) || !(gl_features & kGLExtFBufObj))
466        {
467            LOG(VB_PLAYBACK, LOG_ERR, LOC +
468                "Features not available for bicubic filter.");
469            return false;
470        }
471        break;
472
473      case kGLFilterYUV2RGB:
474        if (!(gl_features & kGLExtFragProg) && !(gl_features & kGLSL))
475        {
476            LOG(VB_PLAYBACK, LOG_ERR, LOC +
477                "No shader support for OpenGL deinterlacing.");
478            return false;
479        }
480        break;
481
482      case kGLFilterYV12RGB:
483        if (!(gl_features & kGLSL))
484        {
485            LOG(VB_PLAYBACK, LOG_ERR, LOC +
486                "No shader support for OpenGL deinterlacing.");
487            return false;
488        }
489        break;
490    }
491
492    bool success = true;
493
494    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Creating %1 filter.")
495            .arg(FilterToString(filter)));
496
497    OpenGLFilter *temp = new OpenGLFilter();
498
499    temp->numInputs = 1;
500    GLuint program = 0;
501
502    if (filter == kGLFilterBicubic)
503    {
504        if (helperTexture)
505            gl_context->DeleteTexture(helperTexture);
506
507        helperTexture = gl_context->CreateHelperTexture();
508        if (!helperTexture)
509            success = false;
510    }
511
512    if (success &&
513        (((filter != kGLFilterNone) && (filter != kGLFilterResize)) ||
514         ((gl_features & kGLSL) && (filter == kGLFilterResize))))
515    {
516        program = AddFragmentProgram(filter);
517        if (!program)
518            success = false;
519        else
520            temp->fragmentPrograms.push_back(program);
521    }
522
523    if (success)
524    {
525        temp->outputBuffer = kDefaultBuffer;
526        temp->frameBuffers.clear();
527        temp->frameBufferTextures.clear();
528        filters[filter] = temp;
529        temp = nullptr;
530        success &= OptimiseFilters();
531    }
532
533    if (!success)
534    {
535        RemoveFilter(filter);
536        filters.erase(filter);
537        delete temp; // If temp wasn't added to the filter list, we need to delete
538        return false;
539    }
540
541    return true;
542}
543
544bool OpenGLVideo::RemoveFilter(OpenGLFilterType filter)
545{
546    if (!filters.count(filter))
547        return true;
548
549    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Removing %1 filter")
550            .arg(FilterToString(filter)));
551
552    vector<GLuint> temp;
553    vector<GLuint>::iterator it;
554
555    temp = filters[filter]->fragmentPrograms;
556    for (it = temp.begin(); it != temp.end(); ++it)
557        gl_context->DeleteShaderObject(*it);
558    filters[filter]->fragmentPrograms.clear();
559
560    temp = filters[filter]->frameBuffers;
561    for (it = temp.begin(); it != temp.end(); ++it)
562        gl_context->DeleteFrameBuffer(*it);
563    filters[filter]->frameBuffers.clear();
564
565    DeleteTextures(&(filters[filter]->frameBufferTextures));
566
567    delete filters[filter];
568    filters[filter] = nullptr;
569
570    return true;
571}
572
573void OpenGLVideo::TearDownDeinterlacer(void)
574{
575    glfilt_map_t::iterator it;
576    if (filters.end() == (it = filters.find(kGLFilterYUV2RGB)) &&
577        filters.end() == (it = filters.find(kGLFilterYV12RGB)) )
578    {
579        return;
580    }
581
582    OpenGLFilter *tmp = it->second;
583
584    if (tmp->fragmentPrograms.size() == 3)
585    {
586        gl_context->DeleteShaderObject(tmp->fragmentPrograms[2]);
587        tmp->fragmentPrograms.pop_back();
588    }
589
590    if (tmp->fragmentPrograms.size() == 2)
591    {
592        gl_context->DeleteShaderObject(tmp->fragmentPrograms[1]);
593        tmp->fragmentPrograms.pop_back();
594    }
595
596    DeleteTextures(&referenceTextures);
597    refsNeeded = 0;
598}
599
600/**
601 *  Extends the functionality of the basic YUV->RGB filter stage to include
602 *  deinterlacing (combining the stages is significantly more efficient than
603 *  2 separate stages). Create 2 deinterlacing fragment programs, 1 for each
604 *  required field.
605 */
606
607bool OpenGLVideo::AddDeinterlacer(const QString &deinterlacer)
608{
609    if (!(gl_features & kGLExtFragProg) && !(gl_features & kGLSL))
610    {
611        LOG(VB_PLAYBACK, LOG_ERR, LOC +
612            "No shader support for OpenGL deinterlacing.");
613        return false;
614    }
615
616    OpenGLLocker ctx_lock(gl_context);
617
618    if (filters.end() == filters.find(kGLFilterYUV2RGB) &&
619        filters.end() == filters.find(kGLFilterYV12RGB) )
620    {
621        LOG(VB_PLAYBACK, LOG_ERR, LOC +
622            "No YUV2RGB filter stage for OpenGL deinterlacing.");
623        return false;
624    }
625
626    if (hardwareDeinterlacer == deinterlacer)
627        return true;
628
629    TearDownDeinterlacer();
630
631    bool success = true;
632
633    uint ref_size = 2;
634
635    if (deinterlacer == "openglbobdeint" ||
636        deinterlacer == "openglonefield" ||
637        deinterlacer == "opengllinearblend" ||
638        deinterlacer == "opengldoubleratelinearblend" ||
639        deinterlacer == "opengldoubleratefieldorder")
640    {
641        ref_size = 0;
642    }
643
644    refsNeeded = ref_size;
645    if (ref_size > 0)
646    {
647        for (; ref_size > 0; ref_size--)
648        {
649            GLuint tex = CreateVideoTexture(video_dim, inputTextureSize);
650            if (tex)
651            {
652                referenceTextures.push_back(tex);
653            }
654            else
655            {
656                success = false;
657            }
658        }
659    }
660
661    OpenGLFilterType type = (MYTHTV_YV12 == videoTextureType) ?
662                            kGLFilterYV12RGB : kGLFilterYUV2RGB;
663
664    uint prog1 = AddFragmentProgram(type, deinterlacer, kScan_Interlaced);
665    uint prog2 = AddFragmentProgram(type, deinterlacer, kScan_Intr2ndField);
666
667    if (prog1 && prog2)
668    {
669        filters[type]->fragmentPrograms.push_back(prog1);
670        filters[type]->fragmentPrograms.push_back(prog2);
671    }
672    else
673    {
674        success = false;
675    }
676
677    if (success)
678    {
679        CheckResize(hardwareDeinterlacing);
680        hardwareDeinterlacer = deinterlacer;
681        return true;
682    }
683
684    hardwareDeinterlacer = "";
685    TearDownDeinterlacer();
686
687    return false;
688}
689
690/**
691 *  Create the correct fragment program for the given filter type
692 */
693
694uint OpenGLVideo::AddFragmentProgram(OpenGLFilterType name,
695                                     QString deint, FrameScanType field)
696{
697    if (!gl_context)
698        return 0;
699
700    QString vertex, fragment;
701    if (gl_features & kGLSL)
702    {
703        GetProgramStrings(vertex, fragment, name, deint, field);
704    }
705    else if (gl_features & kGLExtFragProg)
706    {
707        fragment = GetProgramString(name, deint, field);
708    }
709    else
710    {
711        LOG(VB_PLAYBACK, LOG_ERR, LOC + "No OpenGL shader/program support");
712        return 0;
713    }
714
715    return gl_context->CreateShaderObject(vertex, fragment);
716}
717
718/**
719 *  Add a FrameBuffer object of the correct size to the given texture.
720 */
721
722bool OpenGLVideo::AddFrameBuffer(uint &framebuffer,
723                                 uint &texture, QSize vid_size)
724{
725    if (!(gl_features & kGLExtFBufObj))
726    {
727        LOG(VB_PLAYBACK, LOG_ERR, LOC + "Framebuffer binding not supported.");
728        return false;
729    }
730
731    texture = gl_context->CreateTexture(vid_size, false, textureType);
732
733    bool ok = gl_context->CreateFrameBuffer(framebuffer, texture);
734
735    if (!ok)
736        gl_context->DeleteTexture(texture);
737
738    return ok;
739}
740
741void OpenGLVideo::SetViewPort(const QSize &viewPortSize)
742{
743    uint w = max(viewPortSize.width(),  video_disp_dim.width());
744    uint h = max(viewPortSize.height(), video_disp_dim.height());
745
746    viewportSize = QSize(w, h);
747
748    if (!viewportControl)
749        return;
750
751    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Viewport: %1x%2") .arg(w).arg(h));
752    gl_context->SetViewPort(QRect(QPoint(),viewportSize));
753}
754
755/**
756 *  Create and initialise an OpenGL texture suitable for a YV12 video frame
757 *  of the given size.
758 */
759
760uint OpenGLVideo::CreateVideoTexture(QSize size, QSize &tex_size)
761{
762    uint tmp_tex = 0;
763    bool use_pbo = gl_features & kGLExtPBufObj;
764    if (GL_YCBCR_MESA == videoTextureType)
765    {
766        tmp_tex = gl_context->CreateTexture(size, use_pbo, textureType,
767                                            GL_UNSIGNED_SHORT_8_8_MESA,
768                                            GL_YCBCR_MESA, GL_RGBA);
769    }
770    else if (GL_YCBCR_422_APPLE == videoTextureType)
771    {
772        tmp_tex = gl_context->CreateTexture(size, use_pbo, textureType,
773                                            GL_UNSIGNED_SHORT_8_8_MESA,
774                                            GL_YCBCR_422_APPLE, GL_RGBA);
775    }
776    else if (MYTHTV_UYVY == videoTextureType)
777    {
778        QSize fix(size.width() / 2, size.height());
779        tmp_tex = gl_context->CreateTexture(fix, use_pbo, textureType,
780                                            GL_UNSIGNED_BYTE, GL_RGBA, GL_RGBA);
781    }
782    else if (MYTHTV_YV12 == videoTextureType)
783    {
784        // 4:1:1 YVU planar (12bpp)
785        size.setHeight((3 * size.height() + 1) / 2);
786        tmp_tex = gl_context->CreateTexture(size, use_pbo, textureType,
787                                            GL_UNSIGNED_BYTE,   // data_type
788                                            GL_LUMINANCE,       // data_fmt
789                                            GL_LUMINANCE        // internal_fmt
790                                            );
791    }
792    else
793        tmp_tex = gl_context->CreateTexture(size, use_pbo, textureType);
794
795    tex_size = gl_context->GetTextureSize(textureType, size);
796    if (!tmp_tex)
797        return 0;
798
799    return tmp_tex;
800}
801
802QSize OpenGLVideo::GetTextureSize(const QSize &size)
803{
804    if (textureRects)
805        return size;
806
807    int w = 64;
808    int h = 64;
809
810    while (w < size.width())
811    {
812        w *= 2;
813    }
814
815    while (h < size.height())
816    {
817        h *= 2;
818    }
819
820    return QSize(w, h);
821}
822
823uint OpenGLVideo::GetInputTexture(void) const
824{
825    return inputTextures[0];
826}
827
828uint OpenGLVideo::GetTextureType(void) const
829{
830    return textureType;
831}
832
833void OpenGLVideo::SetInputUpdated(void)
834{
835    inputUpdated = true;
836}
837
838/**
839 *  Update the current input texture using the data from the given YV12 video
840 *  frame. If the required hardware support is not available, fall back to
841 *  software YUV->RGB conversion.
842 */
843
844void OpenGLVideo::UpdateInputFrame(const VideoFrame *frame, bool soft_bob)
845{
846    OpenGLLocker ctx_lock(gl_context);
847
848    if (frame->width  != video_dim.width()  ||
849        frame->height != video_dim.height() ||
850        frame->width  < 1 || frame->height < 1 ||
851        frame->codec != FMT_YV12)
852    {
853        return;
854    }
855    if (hardwareDeinterlacing)
856        RotateTextures();
857
858    // We need to convert frames here to avoid dependencies in MythRenderOpenGL
859    void* buf = gl_context->GetTextureBuffer(inputTextures[0]);
860    if (!buf)
861        return;
862
863    if (MYTHTV_YV12 == videoTextureType)
864    {
865
866        if (gl_features & kGLExtPBufObj)
867        {
868            // Copy the frame to the pixel buffer which updates the texture
869            copybuffer((uint8_t*)buf, frame, video_dim.width());
870        }
871        else if (video_dim.width() != frame->pitches[0])
872        {
873            // Re-packing is needed
874            copybuffer((uint8_t*)buf, frame, video_dim.width());
875        }
876        else
877        {
878            // UpdateTexture will copy the frame to the texture
879            buf = frame->buf;
880        }
881    }
882    else if (!filters.count(kGLFilterYUV2RGB) ||
883        MYTHTV_UYVY == videoTextureType)
884    {
885        // software conversion
886        AVFrame img_out;
887        AVPixelFormat out_fmt = AV_PIX_FMT_BGRA;
888        if ((GL_YCBCR_MESA == videoTextureType) ||
889            (GL_YCBCR_422_APPLE == videoTextureType) ||
890            (MYTHTV_UYVY == videoTextureType))
891        {
892            out_fmt = AV_PIX_FMT_UYVY422;
893        }
894        m_copyCtx.Copy(&img_out, frame, (unsigned char*)buf, out_fmt);
895    }
896    else if (frame->interlaced_frame && !soft_bob)
897    {
898        pack_yv12interlaced(frame->buf, (unsigned char*)buf, frame->offsets,
899                            frame->pitches, video_dim);
900    }
901    else
902    {
903        pack_yv12progressive(frame->buf, (unsigned char*)buf, frame->offsets,
904                             frame->pitches, video_dim);
905    }
906
907    gl_context->UpdateTexture(inputTextures[0], buf);
908    inputUpdated = true;
909}
910
911void OpenGLVideo::SetDeinterlacing(bool deinterlacing)
912{
913    hardwareDeinterlacing = deinterlacing;
914    OpenGLLocker ctx_lock(gl_context);
915    CheckResize(hardwareDeinterlacing);
916}
917
918void OpenGLVideo::SetSoftwareDeinterlacer(const QString &filter)
919{
920    if (softwareDeinterlacer != filter)
921        CheckResize(false, filter != "bobdeint");
922    softwareDeinterlacer = filter;
923    softwareDeinterlacer.detach();
924}
925
926/**
927 *  Render the contents of the current input texture to the framebuffer
928 *  using the currently enabled filters.
929 *  \param topfieldfirst         the frame is interlaced and top_field_first
930 *                               is set
931 *  \param scan                  interlaced or progressive?
932 *  \param softwareDeinterlacing the frame has been deinterlaced in software
933 *  \param frame                 the frame number
934 *  \param stereo                Whether/how to drop stereo video information
935 *  \param draw_border           if true, draw a red border around the frame
936 *  \warning This function is a finely tuned, sensitive beast. Tinker at
937 *   your own risk.
938 */
939
940void OpenGLVideo::PrepareFrame(bool topfieldfirst, FrameScanType scan,
941                               bool softwareDeinterlacing,
942                               long long frame, StereoscopicMode stereo,
943                               bool draw_border)
944{
945    if (inputTextures.empty() || filters.empty())
946        return;
947
948    OpenGLLocker ctx_lock(gl_context);
949
950    // we need to special case software bobdeint for 1080i
951    bool softwarebob = softwareDeinterlacer == "bobdeint" &&
952                       softwareDeinterlacing;
953
954    vector<GLuint> inputs = inputTextures;
955    QSize inputsize = inputTextureSize;
956    QSize realsize  = GetTextureSize(video_disp_dim);
957
958//#define RECT_TEST 1
959#define LOG_RECT(r, s) \
960        if (currentFrameNum <= 0) {   \
961            LOG(VB_GENERAL, LOG_INFO, LOC + \
962                QString("PrepareFrame %1 %2 %3,%4,%5,%6")  \
963                .arg(s) \
964                .arg(currentFrameNum)   \
965                .arg(r.left())  \
966                .arg(r.top())   \
967                .arg(r.right()) \
968                .arg(r.bottom())); }
969#define LOG_SIZE(r, s) \
970        if (currentFrameNum <= 0) {   \
971            LOG(VB_GENERAL, LOG_INFO, LOC + \
972                QString("PrepareFrame %1 %2 %3,%4")  \
973                .arg(s) \
974                .arg(currentFrameNum)   \
975                .arg(r.width())  \
976                .arg(r.height())); }
977#define LOG_TINFO(s) \
978        if (currentFrameNum <= 0) {   \
979            LOG(VB_GENERAL, LOG_INFO, LOC + s ); }
980
981    if (currentFrameNum <= 0)
982    {
983        LOG(VB_GENERAL, LOG_INFO, LOC +
984            QString("PrepareFrame frame num %1").arg(frame));
985    }
986    LOG_SIZE(video_dim, "A video_dim");
987    LOG_SIZE(video_disp_dim, "A video_disp_dim");
988
989    glfilt_map_t::iterator it;
990    for (it = filters.begin(); it != filters.end(); ++it)
991    {
992        OpenGLFilterType type = it->first;
993        OpenGLFilter *filter = it->second;
994
995        bool actual = softwarebob && (filter->outputBuffer == kDefaultBuffer);
996
997        if (currentFrameNum <= 0)
998        {
999            LOG(VB_GENERAL, LOG_INFO, LOC +
1000                QString("PrepareFrame filter %1").arg(FilterToString(type)));
1001        }
1002        // texture coordinates
1003        float trueheight = (float)(actual ? video_dim.height() :
1004                                            video_disp_dim.height());
1005        float width = video_disp_dim.width();
1006        if ((type == kGLFilterYUV2RGB) && (videoTextureType == MYTHTV_UYVY))
1007            width /= 2.0f;
1008
1009        QRectF trect(QPoint(0, 0), QSize(width, trueheight));
1010
1011        LOG_RECT(trect, "A trect");
1012        // only apply overscan on last filter
1013        if (filter->outputBuffer == kDefaultBuffer)
1014        {
1015            trect.setRect(video_rect.left(),  video_rect.top(),
1016                          video_rect.width(), video_rect.height());
1017            LOG_RECT(trect, "B trect default");
1018        }
1019
1020        if (!textureRects && (inputsize.height() > 0))
1021            trueheight /= inputsize.height();
1022
1023        // software bobdeint
1024        if (actual)
1025        {
1026            bool top = (scan == kScan_Intr2ndField && topfieldfirst) ||
1027                       (scan == kScan_Interlaced && !topfieldfirst);
1028            bool bot = (scan == kScan_Interlaced && topfieldfirst) ||
1029                       (scan == kScan_Intr2ndField && !topfieldfirst);
1030            bool first = filters.size() < 2;
1031            float bob = (trueheight / (float)video_disp_dim.height()) / 4.0f;
1032            if ((top && !first) || (bot && first))
1033            {
1034                trect.setBottom(trect.bottom() / 2);
1035                trect.setTop(trect.top() / 2);
1036                LOG_RECT(trect, "C trect int a");
1037                trect.adjust(0, bob, 0, bob);
1038            }
1039            if ((bot && !first) || (top && first))
1040            {
1041                trect.setTop(static_cast<qreal>(trueheight / 2) + (trect.top() / 2));
1042                trect.setBottom(static_cast<qreal>(trueheight / 2) + (trect.bottom() / 2));
1043                LOG_RECT(trect, "D trect int b");
1044                trect.adjust(0, -bob, 0, -bob);
1045            }
1046        }
1047
1048        // discard stereoscopic fields
1049        if (filter->outputBuffer == kDefaultBuffer)
1050        {
1051            if (kStereoscopicModeSideBySideDiscard == stereo)
1052                trect = QRectF(trect.left() / 2.0,  trect.top(),
1053                               trect.width() / 2.0, trect.height());
1054            if (kStereoscopicModeTopAndBottomDiscard == stereo)
1055                trect = QRectF(trect.left(),  trect.top() / 2.0,
1056                               trect.width(), trect.height() / 2.0);
1057        }
1058
1059        // vertex coordinates
1060        QRect display = (filter->outputBuffer == kDefaultBuffer) ?
1061                         display_video_rect : frameBufferRect;
1062        QRect visible = (filter->outputBuffer == kDefaultBuffer) ?
1063                         display_visible_rect : frameBufferRect;
1064#ifdef RECT_TEST
1065        QRectF vrect(trect);
1066        draw_border = true;
1067#else
1068        QRectF vrect(display);
1069#endif
1070
1071#ifndef RECT_TEST
1072        // invert if first filter
1073        if (it == filters.begin())
1074        {
1075            LOG_RECT(display, "E display");
1076            LOG_RECT(visible, "F visible");
1077            if (filters.size() > 1)
1078            {
1079                vrect.setTop(visible.height() - display.top());
1080                vrect.setBottom(vrect.top() - display.height());
1081                LOG_RECT(vrect, "G vrect sf");
1082            }
1083            else
1084            {
1085                vrect.setBottom(display.top());
1086                vrect.setTop(display.top() + display.height());
1087                LOG_RECT(vrect, "H vrect bf");
1088            }
1089        }
1090#endif
1091
1092        // hardware bobdeint
1093        if (filter->outputBuffer == kDefaultBuffer &&
1094            hardwareDeinterlacing &&
1095            hardwareDeinterlacer == "openglbobdeint")
1096        {
1097            float bob = ((float)display.height() / (float)video_rect.height())
1098                        / 2.0f;
1099            float field = kScan_Interlaced ? -1.0f : 1.0f;
1100            bob = bob * (topfieldfirst ? field : -field);
1101            vrect.adjust(0, bob, 0, bob);
1102        }
1103
1104        uint target = 0;
1105        // bind correct frame buffer (default onscreen) and set viewport
1106        switch (filter->outputBuffer)
1107        {
1108            case kDefaultBuffer:
1109                gl_context->BindFramebuffer(0);
1110                if (viewportControl)
1111                    gl_context->SetViewPort(QRect(QPoint(), display_visible_rect.size()));
1112                else
1113                    gl_context->SetViewPort(QRect(QPoint(), masterViewportSize));
1114                break;
1115            case kFrameBufferObject:
1116                if (!filter->frameBuffers.empty())
1117                {
1118                    gl_context->BindFramebuffer(filter->frameBuffers[0]);
1119                    gl_context->SetViewPort(QRect(QPoint(), frameBufferRect.size()));
1120                    target = filter->frameBuffers[0];
1121                }
1122                break;
1123
1124            default:
1125                continue;
1126        }
1127
1128        if (draw_border && filter->outputBuffer == kDefaultBuffer)
1129        {
1130            LOG_RECT(vrect, "I vrect");
1131#ifdef RECT_TEST
1132            vrect = vrect.adjusted(+10, +10, +10, +10);
1133            QRectF piprectf = vrect.adjusted(-1, -1, +1, +1);
1134#else
1135            QRectF piprectf = vrect.adjusted(-10, -10, +10, +10);
1136#endif
1137            QRect  piprect(piprectf.left(), piprectf.top(),
1138                           piprectf.width(), piprectf.height());
1139            static const QPen nopen(Qt::NoPen);
1140            static const QBrush redbrush(QBrush(QColor(127, 0, 0, 255)));
1141            gl_context->DrawRect(piprect, redbrush, nopen, 255);
1142        }
1143
1144        // bind correct textures
1145        uint textures[4]; // NB
1146        uint texture_count = 0;
1147        for (uint i = 0; i < inputs.size(); i++)
1148            textures[texture_count++] = inputs[i];
1149
1150        if (!referenceTextures.empty() &&
1151            hardwareDeinterlacing &&
1152            (type == kGLFilterYUV2RGB || type == kGLFilterYV12RGB))
1153        {
1154            for (uint i = 0; i < referenceTextures.size(); i++)
1155                textures[texture_count++] = referenceTextures[i];
1156        }
1157
1158        if (helperTexture && type == kGLFilterBicubic)
1159            textures[texture_count++] = helperTexture;
1160
1161        // enable fragment program and set any environment variables
1162        GLuint program = 0;
1163        if (((type != kGLFilterNone) && (type != kGLFilterResize)) ||
1164            ((gl_features & kGLSL) && (type == kGLFilterResize)))
1165        {
1166            GLuint prog_ref = 0;
1167
1168            if (type == kGLFilterYUV2RGB || type == kGLFilterYV12RGB)
1169            {
1170                if (hardwareDeinterlacing &&
1171                    filter->fragmentPrograms.size() == 3 &&
1172                    !refsNeeded)
1173                {
1174                    if (scan == kScan_Interlaced)
1175                        prog_ref = topfieldfirst ? 1 : 2;
1176                    else if (scan == kScan_Intr2ndField)
1177                        prog_ref = topfieldfirst ? 2 : 1;
1178                }
1179            }
1180            program = filter->fragmentPrograms[prog_ref];
1181        }
1182
1183        if (type == kGLFilterYUV2RGB || type == kGLFilterYV12RGB)
1184        {
1185            gl_context->SetShaderParams(program,
1186                GLMatrix4x4(reinterpret_cast<float*>(colourSpace->GetMatrix())),
1187                COLOUR_UNIFORM);
1188        }
1189
1190        LOG_RECT(trect, "Final trect");
1191        LOG_RECT(vrect, "Final vrect");
1192        gl_context->DrawBitmap(textures, texture_count, target, &trect, &vrect,
1193                               program);
1194
1195        inputs = filter->frameBufferTextures;
1196        inputsize = realsize;
1197    }
1198
1199    currentFrameNum = frame;
1200    inputUpdated = false;
1201}
1202
1203void OpenGLVideo::RotateTextures(void)
1204{
1205    if (referenceTextures.size() < 2)
1206        return;
1207
1208    if (refsNeeded > 0)
1209        refsNeeded--;
1210
1211    GLuint tmp = referenceTextures[referenceTextures.size() - 1];
1212
1213    for (uint i = referenceTextures.size() - 1; i > 0;  i--)
1214        referenceTextures[i] = referenceTextures[i - 1];
1215
1216    referenceTextures[0] = inputTextures[0];
1217    inputTextures[0] = tmp;
1218}
1219
1220void OpenGLVideo::DeleteTextures(vector<GLuint> *textures)
1221{
1222    if ((*textures).empty())
1223        return;
1224
1225    for (uint i = 0; i < (*textures).size(); i++)
1226        gl_context->DeleteTexture((*textures)[i]);
1227    (*textures).clear();
1228}
1229
1230void OpenGLVideo::SetTextureFilters(vector<GLuint> *textures,
1231                                    int filt, int wrap)
1232{
1233    if (textures->empty())
1234        return;
1235
1236    for (uint i = 0; i < textures->size(); i++)
1237        gl_context->SetTextureFilters((*textures)[i], filt, wrap);
1238}
1239
1240OpenGLVideo::OpenGLFilterType OpenGLVideo::StringToFilter(const QString &filter)
1241{
1242    OpenGLFilterType ret = kGLFilterNone;
1243
1244    if (filter.contains("master"))
1245        ret = kGLFilterYUV2RGB;
1246    else if (filter.contains("resize"))
1247        ret = kGLFilterResize;
1248    else if (filter.contains("bicubic"))
1249        ret = kGLFilterBicubic;
1250    else if (filter.contains("yv12rgb"))
1251        ret = kGLFilterYV12RGB;
1252
1253    return ret;
1254}
1255
1256QString OpenGLVideo::FilterToString(OpenGLFilterType filt)
1257{
1258    switch (filt)
1259    {
1260        case kGLFilterNone:
1261            break;
1262        case kGLFilterYUV2RGB:
1263            return "master";
1264        case kGLFilterResize:
1265            return "resize";
1266        case kGLFilterBicubic:
1267            return "bicubic";
1268        case kGLFilterYV12RGB:
1269            return "yv12rgb";
1270    }
1271
1272    return "";
1273}
1274
1275static const QString attrib_fast =
1276"ATTRIB tex   = fragment.texcoord[0];\n"
1277"PARAM yuv[3] = { program.local[0..2] };\n";
1278
1279static const QString tex_fast =
1280"TEX res, tex, texture[0], %1;\n";
1281
1282static const QString var_fast =
1283"TEMP tmp, res;\n";
1284
1285static const QString var_col =
1286"TEMP col;\n";
1287
1288static const QString select_col =
1289"MUL col, tex.xxxx, %8;\n"
1290"FRC col, col;\n"
1291"SUB col, col, 0.5;\n"
1292"CMP res, col, res.rabg, res.rgba;\n";
1293
1294static const QString end_fast =
1295"DPH tmp.r, res.arbg, yuv[0];\n"
1296"DPH tmp.g, res.arbg, yuv[1];\n"
1297"DPH tmp.b, res.arbg, yuv[2];\n"
1298"MOV tmp.a, 1.0;\n"
1299"MOV result.color, tmp;\n";
1300
1301static const QString var_deint =
1302"TEMP other, current, mov, prev;\n";
1303
1304static const QString field_calc =
1305"MUL prev, tex.yyyy, %2;\n"
1306"FRC prev, prev;\n"
1307"SUB prev, prev, 0.5;\n";
1308
1309static const QString bobdeint[2] = {
1310field_calc +
1311"ADD other, tex, {0.0, %3, 0.0, 0.0};\n"
1312"MIN other, other, {10000.0, %9, 10000.0, 10000.0};\n"
1313"TEX other, other, texture[0], %1;\n"
1314"CMP res, prev, res, other;\n",
1315field_calc +
1316"SUB other, tex, {0.0, %3, 0.0, 0.0};\n"
1317"TEX other, other, texture[0], %1;\n"
1318"CMP res, prev, other, res;\n"
1319};
1320
1321static const QString deint_end_top =
1322"CMP res,  prev, current, other;\n";
1323
1324static const QString deint_end_bot =
1325"CMP res,  prev, other, current;\n";
1326
1327static const QString linearblend[2] = {
1328"TEX current, tex, texture[0], %1;\n"
1329"ADD other, tex, {0.0, %3, 0.0, 0.0};\n"
1330"MIN other, other, {10000.0, %9, 10000.0, 10000.0};\n"
1331"TEX other, other, texture[0], %1;\n"
1332"SUB mov, tex, {0.0, %3, 0.0, 0.0};\n"
1333"TEX mov, mov, texture[0], %1;\n"
1334"LRP other, 0.5, other, mov;\n"
1335+ field_calc + deint_end_top,
1336
1337"TEX current, tex, texture[0], %1;\n"
1338"SUB other, tex, {0.0, %3, 0.0, 0.0};\n"
1339"TEX other, other, texture[0], %1;\n"
1340"ADD mov, tex, {0.0, %3, 0.0, 0.0};\n"
1341"TEX mov, mov, texture[0], %1;\n"
1342"LRP other, 0.5, other, mov;\n"
1343+ field_calc + deint_end_bot
1344};
1345
1346static const QString kerneldeint[2] = {
1347"TEX current, tex, texture[1], %1;\n"
1348"TEX prev, tex, texture[2], %1;\n"
1349"MUL other, 0.125, prev;\n"
1350"MAD other, 0.125, current, other;\n"
1351"ADD prev, tex, {0.0, %3, 0.0, 0.0};\n"
1352"MIN prev, prev, {10000.0, %9, 10000.0, 10000.0};\n"
1353"TEX prev, prev, texture[1], %1;\n"
1354"MAD other, 0.5, prev, other;\n"
1355"SUB prev, tex, {0.0, %3, 0.0, 0.0};\n"
1356"TEX prev, prev, texture[1], %1;\n"
1357"MAD other, 0.5, prev, other;\n"
1358"ADD prev, tex, {0.0, %4, 0.0, 0.0};\n"
1359"TEX tmp, prev, texture[1], %1;\n"
1360"MAD other, -0.0625, tmp, other;\n"
1361"TEX tmp, prev, texture[2], %1;\n"
1362"MAD other, -0.0625, tmp, other;\n"
1363"SUB prev, tex, {0.0, %4, 0.0, 0.0};\n"
1364"TEX tmp, prev, texture[1], %1;\n"
1365"MAD other, -0.0625, tmp, other;\n"
1366"TEX tmp, prev, texture[2], %1;\n"
1367"MAD other, -0.0625, tmp, other;\n"
1368+ field_calc + deint_end_top,
1369
1370"TEX current, tex, texture[1], %1;\n"
1371"MUL other, 0.125, res;\n"
1372"MAD other, 0.125, current, other;\n"
1373"ADD prev, tex, {0.0, %3, 0.0, 0.0};\n"
1374"TEX prev, prev, texture[1], %1;\n"
1375"MAD other, 0.5, prev, other;\n"
1376"SUB prev, tex, {0.0, %3, 0.0, 0.0};\n"
1377"TEX prev, prev, texture[1], %1;\n"
1378"MAD other, 0.5, prev, other;\n"
1379"ADD prev, tex, {0.0, %4, 0.0, 0.0};\n"
1380"TEX tmp, prev, texture[1], %1;\n"
1381"MAD other, -0.0625, tmp, other;\n"
1382"TEX tmp, prev, texture[0], %1;\n"
1383"MAD other, -0.0625, tmp, other;\n"
1384"SUB prev, tex, {0.0, %4, 0.0, 0.0};\n"
1385"TEX tmp, prev, texture[1], %1;\n"
1386"MAD other, -0.0625, tmp, other;\n"
1387"TEX tmp, prev, texture[0], %1;\n"
1388"MAD other, -0.0625, tmp, other;\n"
1389+ field_calc + deint_end_bot
1390};
1391
1392static const QString bicubic =
1393"TEMP coord, coord2, cdelta, parmx, parmy, a, b, c, d;\n"
1394"MAD coord.xy, fragment.texcoord[0], {%6, %7}, {0.5, 0.5};\n"
1395"TEX parmx, coord.x, texture[1], 1D;\n"
1396"TEX parmy, coord.y, texture[1], 1D;\n"
1397"MUL cdelta.xz, parmx.rrgg, {-%5, 0, %5, 0};\n"
1398"MUL cdelta.yw, parmy.rrgg, {0, -%3, 0, %3};\n"
1399"ADD coord, fragment.texcoord[0].xyxy, cdelta.xyxw;\n"
1400"ADD coord2, fragment.texcoord[0].xyxy, cdelta.zyzw;\n"
1401"TEX a, coord.xyxy, texture[0], 2D;\n"
1402"TEX b, coord.zwzw, texture[0], 2D;\n"
1403"TEX c, coord2.xyxy, texture[0], 2D;\n"
1404"TEX d, coord2.zwzw, texture[0], 2D;\n"
1405"LRP a, parmy.b, a, b;\n"
1406"LRP c, parmy.b, c, d;\n"
1407"LRP result.color, parmx.b, a, c;\n";
1408
1409QString OpenGLVideo::GetProgramString(OpenGLFilterType name,
1410                                      QString deint, FrameScanType field)
1411{
1412    QString ret =
1413        "!!ARBfp1.0\n"
1414        "OPTION ARB_precision_hint_fastest;\n";
1415
1416    switch (name)
1417    {
1418        case kGLFilterYUV2RGB:
1419        {
1420            bool need_tex = true;
1421            bool packed = MYTHTV_UYVY == videoTextureType;
1422            QString deint_bit = "";
1423            if (deint != "")
1424            {
1425                uint tmp_field = 0;
1426                if (field == kScan_Intr2ndField)
1427                    tmp_field = 1;
1428                if (deint == "openglbobdeint" ||
1429                    deint == "openglonefield" ||
1430                    deint == "opengldoubleratefieldorder")
1431                {
1432                    deint_bit = bobdeint[tmp_field];
1433                }
1434                else if (deint == "opengllinearblend" ||
1435                         deint == "opengldoubleratelinearblend")
1436                {
1437                    deint_bit = linearblend[tmp_field];
1438                    if (!tmp_field) { need_tex = false; }
1439                }
1440                else if (deint == "openglkerneldeint" ||
1441                         deint == "opengldoubleratekerneldeint")
1442                {
1443                    deint_bit = kerneldeint[tmp_field];
1444                    if (!tmp_field) { need_tex = false; }
1445                }
1446                else
1447                {
1448                    LOG(VB_PLAYBACK, LOG_ERR, LOC +
1449                        "Unrecognised OpenGL deinterlacer");
1450                }
1451            }
1452
1453            ret += attrib_fast;
1454            ret += (deint != "") ? var_deint : "";
1455            ret += packed ? var_col : "";
1456            ret += var_fast + (need_tex ? tex_fast : "");
1457            ret += deint_bit;
1458            ret += packed ? select_col : "";
1459            ret += end_fast;
1460        }
1461            break;
1462
1463        case kGLFilterNone:
1464        case kGLFilterResize:
1465            break;
1466
1467        case kGLFilterBicubic:
1468
1469            ret += bicubic;
1470            break;
1471
1472        case kGLFilterYV12RGB: // TODO: extend this for opengl1
1473        default:
1474            LOG(VB_PLAYBACK, LOG_ERR, LOC + "Unknown fragment program.");
1475            break;
1476    }
1477
1478    CustomiseProgramString(ret);
1479    ret += "END";
1480
1481    LOG(VB_PLAYBACK, LOG_INFO, LOC + QString("Created %1 fragment program %2")
1482                .arg(FilterToString(name)).arg(deint));
1483
1484    return ret;
1485}
1486
1487void OpenGLVideo::CustomiseProgramString(QString &string)
1488{
1489    string.replace("%1", textureRects ? "RECT" : "2D");
1490
1491    if (!textureRects)
1492    {
1493        string.replace("GLSL_SAMPLER", "sampler2D");
1494        string.replace("GLSL_TEXTURE", "texture2D");
1495    }
1496
1497    float lineHeight = 1.0f;
1498    float colWidth   = 1.0f;
1499    float yselect    = 1.0f;
1500    QSize fb_size = GetTextureSize(video_disp_dim);
1501
1502    LOG(VB_GENERAL, LOG_INFO, LOC +
1503        QString("CustomiseProgramString fbsize %1,%2").arg(fb_size.width()).arg(fb_size.height()));
1504    LOG(VB_GENERAL, LOG_INFO, LOC +
1505        QString("CustomiseProgramString inputTextureSize %1,%2 tr %3")
1506            .arg(inputTextureSize.width()).arg(inputTextureSize.height()).arg(textureRects));
1507
1508    if (!textureRects &&
1509       (inputTextureSize.height() > 0))
1510    {
1511        lineHeight /= inputTextureSize.height();
1512        colWidth   /= inputTextureSize.width();
1513        yselect    /= ((float)inputTextureSize.width() / 2.0f);
1514    LOG(VB_GENERAL, LOG_INFO, LOC +
1515        QString("CustomiseProgramString lineHeight %1 colWidth %2 yselect %3")
1516            .arg(QString::number(lineHeight, 'f', 16))
1517                .arg(QString::number(colWidth, 'f', 16))
1518                .arg(QString::number(yselect, 'f', 16)));
1519    }
1520
1521    float maxheight  = (float)(min(inputTextureSize.height(), 2160) - 1) *
1522                       lineHeight;
1523    float fieldSize = 1.0f / (lineHeight * 2.0f);
1524    LOG(VB_GENERAL, LOG_INFO, LOC +
1525        QString("CustomiseProgramString maxHeight %1 fieldSize %2 %3")
1526            .arg(maxheight).arg(fieldSize, 0, 'f', 8).arg(fieldSize, 0, 'f', 16));
1527
1528    string.replace("%2", QString::number(fieldSize, 'f', 8));
1529    string.replace("%3", QString::number(lineHeight, 'f', 16));
1530    string.replace("%4", QString::number(lineHeight * 2.0f, 'f', 16));
1531    string.replace("%5", QString::number(colWidth, 'f', 16));
1532    string.replace("%6", QString::number((float)fb_size.width(), 'f', 1));
1533    string.replace("%7", QString::number((float)fb_size.height(), 'f', 1));
1534    string.replace("%8", QString::number(1.0f / yselect, 'f', 16));
1535    // make sure truncation errors dont affect clamping
1536    string.replace("%9", QString::number(maxheight, 'f', 16));
1537
1538    float width = float(video_dim.width()) / inputTextureSize.width();
1539    string.replace("%WIDTH%", QString::number(width, 'f', 8));
1540
1541    float height = float(video_dim.height()) / inputTextureSize.height();
1542    string.replace("%HEIGHT%", QString::number(height, 'f', 8));
1543
1544    LOG(VB_GENERAL, LOG_INFO, LOC +
1545        QString("CustomiseProgramString width %1 height %2")
1546            .arg(height, 0, 'f', 8).arg(width, 0, 'f', 8));
1547
1548    string.replace("COLOUR_UNIFORM", COLOUR_UNIFORM);
1549}
1550
1551static const QString YUV2RGBVertexShader =
1552"GLSL_DEFINES"
1553"attribute vec2 a_position;\n"
1554"attribute vec2 a_texcoord0;\n"
1555"varying   vec2 v_texcoord0;\n"
1556"uniform   mat4 u_projection;\n"
1557"void main() {\n"
1558"    gl_Position = u_projection * vec4(a_position, 0.0, 1.0);\n"
1559"    v_texcoord0 = a_texcoord0;\n"
1560"}\n";
1561
1562static const QString SelectColumn =
1563"    if (fract(v_texcoord0.x * %8) < 0.5)\n"
1564"        yuva = yuva.rabg;\n";
1565
1566static const QString YUV2RGBFragmentShader =
1567"GLSL_DEFINES"
1568"uniform GLSL_SAMPLER s_texture0;\n"
1569"uniform mat4 COLOUR_UNIFORM;\n"
1570"varying vec2 v_texcoord0;\n"
1571"void main(void)\n"
1572"{\n"
1573"    vec4 yuva    = GLSL_TEXTURE(s_texture0, v_texcoord0);\n"
1574"SELECT_COLUMN"
1575"    gl_FragColor = vec4(yuva.arb, 1.0) * COLOUR_UNIFORM;\n"
1576"}\n";
1577
1578static const QString OneFieldShader[2] = {
1579"GLSL_DEFINES"
1580"uniform GLSL_SAMPLER s_texture0;\n"
1581"uniform mat4 COLOUR_UNIFORM;\n"
1582"varying vec2 v_texcoord0;\n"
1583"void main(void)\n"
1584"{\n"
1585"    float field = v_texcoord0.y - (step(0.5, fract(v_texcoord0.y * %2)) * %3);\n"
1586"    field       = clamp(field, 0.0, %9);\n"
1587"    vec4 yuva   = GLSL_TEXTURE(s_texture0, vec2(v_texcoord0.x, field));\n"
1588"SELECT_COLUMN"
1589"    gl_FragColor = vec4(yuva.arb, 1.0) * COLOUR_UNIFORM;\n"
1590"}\n",
1591
1592"GLSL_DEFINES"
1593"uniform GLSL_SAMPLER s_texture0;\n"
1594"uniform mat4 COLOUR_UNIFORM;\n"
1595"varying vec2 v_texcoord0;\n"
1596"void main(void)\n"
1597"{\n"
1598"    vec2 field   = vec2(0.0, step(0.5, 1.0 - fract(v_texcoord0.y * %2)) * %3);\n"
1599"    vec4 yuva    = GLSL_TEXTURE(s_texture0, v_texcoord0 + field);\n"
1600"SELECT_COLUMN"
1601"    gl_FragColor = vec4(yuva.arb, 1.0) * COLOUR_UNIFORM;\n"
1602"}\n"
1603};
1604
1605static const QString LinearBlendShader[2] = {
1606"GLSL_DEFINES"
1607"uniform GLSL_SAMPLER s_texture0;\n"
1608"uniform mat4 COLOUR_UNIFORM;\n"
1609"varying vec2 v_texcoord0;\n"
1610"void main(void)\n"
1611"{\n"
1612"    vec2 line1 = vec2(v_texcoord0.x, clamp(v_texcoord0.y - %3, 0.0, %9));\n"
1613"    vec2 line2 = vec2(v_texcoord0.x, clamp(v_texcoord0.y + %3, 0.0, %9));\n"
1614"    vec4 yuva  = GLSL_TEXTURE(s_texture0, v_texcoord0);\n"
1615"    vec4 above = GLSL_TEXTURE(s_texture0, line2);\n"
1616"    vec4 below = GLSL_TEXTURE(s_texture0, line1);\n"
1617"    if (fract(v_texcoord0.y * %2) < 0.5)\n"
1618"        yuva = mix(above, below, 0.5);\n"
1619"SELECT_COLUMN"
1620"    gl_FragColor = vec4(yuva.arb, 1.0) * COLOUR_UNIFORM;\n"
1621"}\n",
1622
1623"GLSL_DEFINES"
1624"uniform GLSL_SAMPLER s_texture0;\n"
1625"uniform mat4 COLOUR_UNIFORM;\n"
1626"varying vec2 v_texcoord0;\n"
1627"void main(void)\n"
1628"{\n"
1629"    vec2 line1 = vec2(v_texcoord0.x, clamp(v_texcoord0.y - %3, 0.0, %9));\n"
1630"    vec2 line2 = vec2(v_texcoord0.x, clamp(v_texcoord0.y + %3, 0.0, %9));\n"
1631"    vec4 yuva  = GLSL_TEXTURE(s_texture0, v_texcoord0);\n"
1632"    vec4 above = GLSL_TEXTURE(s_texture0, line2);\n"
1633"    vec4 below = GLSL_TEXTURE(s_texture0, line1);\n"
1634"    if (fract(v_texcoord0.y * %2) >= 0.5)\n"
1635"        yuva = mix(above, below, 0.5);\n"
1636"SELECT_COLUMN"
1637"    gl_FragColor = vec4(yuva.arb, 1.0) * COLOUR_UNIFORM;\n"
1638"}\n"
1639};
1640
1641static const QString KernelShader[2] = {
1642"GLSL_DEFINES"
1643"uniform GLSL_SAMPLER s_texture0;\n"
1644"uniform GLSL_SAMPLER s_texture1;\n"
1645"uniform mat4 COLOUR_UNIFORM;\n"
1646"varying vec2 v_texcoord0;\n"
1647"void main(void)\n"
1648"{\n"
1649"    vec4 yuva    = GLSL_TEXTURE(s_texture0, v_texcoord0);\n"
1650"    if (fract(v_texcoord0.y * %2) < 0.5)\n"
1651"    {\n"
1652"        vec2 twoup   = vec2(v_texcoord0.x, clamp(v_texcoord0.y - %4, 0.0, %9));\n"
1653"        vec2 twodown = vec2(v_texcoord0.x, clamp(v_texcoord0.y + %4, 0.0, %9));\n"
1654"        vec2 oneup   = vec2(v_texcoord0.x, clamp(v_texcoord0.y - %3, 0.0, %9));\n"
1655"        vec2 onedown = vec2(v_texcoord0.x, clamp(v_texcoord0.y + %3, 0.0, %9));\n"
1656"        vec4 line0   = GLSL_TEXTURE(s_texture0, twoup);\n"
1657"        vec4 line1   = GLSL_TEXTURE(s_texture0, oneup);\n"
1658"        vec4 line3   = GLSL_TEXTURE(s_texture0, onedown);\n"
1659"        vec4 line4   = GLSL_TEXTURE(s_texture0, twodown);\n"
1660"        vec4 line00  = GLSL_TEXTURE(s_texture1, twoup);\n"
1661"        vec4 line20  = GLSL_TEXTURE(s_texture1, v_texcoord0);\n"
1662"        vec4 line40  = GLSL_TEXTURE(s_texture1, twodown);\n"
1663"        yuva = (yuva   * 0.125);\n"
1664"        yuva = (line20 * 0.125) + yuva;\n"
1665"        yuva = (line1  * 0.5) + yuva;\n"
1666"        yuva = (line3  * 0.5) + yuva;\n"
1667"        yuva = (line0  * -0.0625) + yuva;\n"
1668"        yuva = (line4  * -0.0625) + yuva;\n"
1669"        yuva = (line00 * -0.0625) + yuva;\n"
1670"        yuva = (line40 * -0.0625) + yuva;\n"
1671"    }\n"
1672"SELECT_COLUMN"
1673"    gl_FragColor = vec4(yuva.arb, 1.0) * COLOUR_UNIFORM;\n"
1674"}\n",
1675
1676"GLSL_DEFINES"
1677"uniform GLSL_SAMPLER s_texture0;\n"
1678"uniform GLSL_SAMPLER s_texture1;\n"
1679"uniform mat4 COLOUR_UNIFORM;\n"
1680"varying vec2 v_texcoord0;\n"
1681"void main(void)\n"
1682"{\n"
1683"    vec4 yuva    = GLSL_TEXTURE(s_texture1, v_texcoord0);\n"
1684"    if (fract(v_texcoord0.y * %2) >= 0.5)\n"
1685"    {\n"
1686"        vec2 twoup   = vec2(v_texcoord0.x, clamp(v_texcoord0.y - %4, 0.0, %9));\n"
1687"        vec2 twodown = vec2(v_texcoord0.x, clamp(v_texcoord0.y + %4, 0.0, %9));\n"
1688"        vec2 oneup   = vec2(v_texcoord0.x, clamp(v_texcoord0.y - %3, 0.0, %9));\n"
1689"        vec2 onedown = vec2(v_texcoord0.x, clamp(v_texcoord0.y + %3, 0.0, %9));\n"
1690"        vec4 line0   = GLSL_TEXTURE(s_texture1, twoup);\n"
1691"        vec4 line1   = GLSL_TEXTURE(s_texture1, oneup);\n"
1692"        vec4 line3   = GLSL_TEXTURE(s_texture1, onedown);\n"
1693"        vec4 line4   = GLSL_TEXTURE(s_texture1, twodown);\n"
1694"        vec4 line00  = GLSL_TEXTURE(s_texture0, twoup);\n"
1695"        vec4 line20  = GLSL_TEXTURE(s_texture0, v_texcoord0);\n"
1696"        vec4 line40  = GLSL_TEXTURE(s_texture0, twodown);\n"
1697"        yuva = (yuva   * 0.125);\n"
1698"        yuva = (line20 * 0.125) + yuva;\n"
1699"        yuva = (line1  * 0.5) + yuva;\n"
1700"        yuva = (line3  * 0.5) + yuva;\n"
1701"        yuva = (line0  * -0.0625) + yuva;\n"
1702"        yuva = (line4  * -0.0625) + yuva;\n"
1703"        yuva = (line00 * -0.0625) + yuva;\n"
1704"        yuva = (line40 * -0.0625) + yuva;\n"
1705"    }\n"
1706"SELECT_COLUMN"
1707"    gl_FragColor = vec4(yuva.arb, 1.0) * COLOUR_UNIFORM;\n"
1708"}\n"
1709};
1710
1711static const QString BicubicShader =
1712"GLSL_DEFINES"
1713"uniform sampler2D s_texture0;\n"
1714"uniform sampler1D s_texture1;\n"
1715"varying vec2 v_texcoord0;\n"
1716"void main(void)\n"
1717"{\n"
1718"    vec2 coord = (v_texcoord0 * vec2(%6, %7)) - vec2(0.5, 0.5);\n"
1719"    vec4 parmx = texture1D(s_texture1, coord.x);\n"
1720"    vec4 parmy = texture1D(s_texture1, coord.y);\n"
1721"    vec2 e_x = vec2(%5, 0.0);\n"
1722"    vec2 e_y = vec2(0.0, %3);\n"
1723"    vec2 coord10 = v_texcoord0 + parmx.x * e_x;\n"
1724"    vec2 coord00 = v_texcoord0 - parmx.y * e_x;\n"
1725"    vec2 coord11 = coord10     + parmy.x * e_y;\n"
1726"    vec2 coord01 = coord00     + parmy.x * e_y;\n"
1727"    coord10      = coord10     - parmy.y * e_y;\n"
1728"    coord00      = coord00     - parmy.y * e_y;\n"
1729"    vec4 tex00   = texture2D(s_texture0, coord00);\n"
1730"    vec4 tex10   = texture2D(s_texture0, coord10);\n"
1731"    vec4 tex01   = texture2D(s_texture0, coord01);\n"
1732"    vec4 tex11   = texture2D(s_texture0, coord11);\n"
1733"    tex00        = mix(tex00, tex01, parmy.z);\n"
1734"    tex10        = mix(tex10, tex11, parmy.z);\n"
1735"    gl_FragColor = mix(tex00, tex10, parmx.z);\n"
1736"}\n";
1737
1738static const QString DefaultFragmentShader =
1739"GLSL_DEFINES"
1740"uniform GLSL_SAMPLER s_texture0;\n"
1741"varying vec2 v_texcoord0;\n"
1742"void main(void)\n"
1743"{\n"
1744"    vec4 color   = GLSL_TEXTURE(s_texture0, v_texcoord0);\n"
1745"    gl_FragColor = vec4(color.xyz, 1.0);\n"
1746"}\n";
1747
1748static const QString YV12RGBVertexShader =
1749"//YV12RGBVertexShader\n"
1750"GLSL_DEFINES"
1751"attribute vec2 a_position;\n"
1752"attribute vec2 a_texcoord0;\n"
1753"varying   vec2 v_texcoord0;\n"
1754"uniform   mat4 u_projection;\n"
1755"void main() {\n"
1756"    gl_Position = u_projection * vec4(a_position, 0.0, 1.0);\n"
1757"    v_texcoord0 = a_texcoord0;\n"
1758"}\n";
1759
1760#ifdef ANDROID
1761#define SAMPLEYVU "\
1762vec3 sampleYVU(in GLSL_SAMPLER texture, vec2 texcoordY)\n\
1763{\n\
1764    vec2 texcoordV, texcoordU;\n\
1765    texcoordV = vec2(texcoordY.s / 2.0, %HEIGHT% + texcoordY.t / 4.0);\n\
1766    texcoordU = vec2(texcoordV.s, texcoordV.t + %HEIGHT% / 4.0);\n\
1767    vec3 yvu;\n\
1768    yvu.r = GLSL_TEXTURE(texture, texcoordY).r;\n\
1769    yvu.g = GLSL_TEXTURE(texture, texcoordV).r;\n\
1770    yvu.b = GLSL_TEXTURE(texture, texcoordU).r;\n\
1771    return yvu;\n\
1772}\n"
1773#else
1774
1775#define SAMPLEYVU "\
1776vec3 sampleYVU(in GLSL_SAMPLER texture, vec2 texcoordY)\n\
1777{\n\
1778    vec2 texcoordV, texcoordU;\n\
1779    texcoordV = vec2(texcoordY.s / 2.0, %HEIGHT% + texcoordY.t / 4.0);\n\
1780    texcoordU = vec2(texcoordV.s, texcoordV.t + %HEIGHT% / 4.0);\n\
1781    if (fract(texcoordY.t * %2) >= 0.5)\n\
1782    {\n\
1783        texcoordV.s += %WIDTH% / 2.0;\n\
1784        texcoordU.s += %WIDTH% / 2.0;\n\
1785    }\n\
1786    vec3 yvu;\n\
1787    yvu.r = GLSL_TEXTURE(texture, texcoordY).r;\n\
1788    yvu.g = GLSL_TEXTURE(texture, texcoordV).r;\n\
1789    yvu.b = GLSL_TEXTURE(texture, texcoordU).r;\n\
1790    return yvu;\n\
1791}\n"
1792#endif
1793
1794static const QString YV12RGBFragmentShader =
1795"//YV12RGBFragmentShader\n"
1796"GLSL_DEFINES"
1797"uniform GLSL_SAMPLER s_texture0; // 4:1:1 YVU planar\n"
1798"uniform mat4 COLOUR_UNIFORM;\n"
1799"varying vec2 v_texcoord0;\n"
1800SAMPLEYVU
1801"void main(void)\n"
1802"{\n"
1803"    vec3 yvu = sampleYVU(s_texture0, v_texcoord0);\n"
1804"    gl_FragColor = vec4(yvu, 1.0) * COLOUR_UNIFORM;\n"
1805"}\n";
1806
1807static const QString YV12RGBOneFieldVertexShader[2] = {
1808"//YV12RGBOneFieldVertexShader 1\n"
1809"GLSL_DEFINES"
1810"attribute vec2 a_position;\n"
1811"attribute vec2 a_texcoord0;\n"
1812"varying   vec2 v_texcoord0;\n"
1813"uniform   mat4 u_projection;\n"
1814"void main() {\n"
1815"    gl_Position = u_projection * vec4(a_position, 0.0, 1.0);\n"
1816"    v_texcoord0 = a_texcoord0;\n"
1817"    if (fract(v_texcoord0.t * %2) >= 0.5)\n"
1818"        v_texcoord0.t -= %3;\n"
1819"}\n",
1820
1821"//YV12RGBOneFieldVertexShader 2\n"
1822"GLSL_DEFINES"
1823"attribute vec2 a_position;\n"
1824"attribute vec2 a_texcoord0;\n"
1825"varying   vec2 v_texcoord0;\n"
1826"uniform   mat4 u_projection;\n"
1827"void main() {\n"
1828"    gl_Position = u_projection * vec4(a_position, 0.0, 1.0);\n"
1829"    v_texcoord0 = a_texcoord0;\n"
1830"    if (fract(v_texcoord0.t * %2) < 0.5)\n"
1831"    {\n"
1832"        v_texcoord0.t += %3;\n"
1833"        v_texcoord0.t = min(v_texcoord0.t, %HEIGHT% - %3);\n"
1834"    }\n"
1835"}\n"
1836};
1837
1838static const QString YV12RGBLinearBlendFragmentShader =
1839"//YV12RGBLinearBlendFragmentShader\n"
1840"GLSL_DEFINES"
1841"uniform GLSL_SAMPLER s_texture0; // 4:1:1 YVU planar\n"
1842"uniform mat4 COLOUR_UNIFORM;\n"
1843"varying vec2 v_texcoord0;\n"
1844SAMPLEYVU
1845"void main(void)\n"
1846"{\n"
1847"    vec2 texcoord;\n"
1848"    texcoord = v_texcoord0 - vec2(0.0, %3);\n"
1849"    vec3 yvu1 = sampleYVU(s_texture0, texcoord);\n"
1850"    vec3 yvu2 = sampleYVU(s_texture0, v_texcoord0);\n"
1851"    texcoord = v_texcoord0 + vec2(0.0, %3);\n"
1852"    texcoord.t = min(texcoord.t, %HEIGHT% - %3);\n"
1853"    vec3 yvu3 = sampleYVU(s_texture0, texcoord);\n"
1854"    vec3 yvu = (yvu1 + 2.0 * yvu2 + yvu3) / 4.0;\n"
1855"    gl_FragColor = vec4(yvu, 1.0) * COLOUR_UNIFORM;\n"
1856"}\n";
1857
1858#define KERNELYVU "\
1859vec3 kernelYVU(in vec3 yvu)\n\
1860{\n\
1861    vec2 twoup   = v_texcoord0 - vec2(0.0, %4);\n\
1862    vec2 twodown = v_texcoord0 + vec2(0.0, %4);\n\
1863    twodown.t = min(twodown.t, %HEIGHT% - %3);\n\
1864    vec2 onedown = v_texcoord0 + vec2(0.0, %3);\n\
1865    onedown.t = min(onedown.t, %HEIGHT% - %3);\n\
1866    vec3 line0   = sampleYVU(s_texture0, twoup);\n\
1867    vec3 line1   = sampleYVU(s_texture0, v_texcoord0 - vec2(0.0, %3));\n\
1868    vec3 line3   = sampleYVU(s_texture0, onedown);\n\
1869    vec3 line4   = sampleYVU(s_texture0, twodown);\n\
1870    vec3 line00  = sampleYVU(s_texture1, twoup);\n\
1871    vec3 line20  = sampleYVU(s_texture1, v_texcoord0);\n\
1872    vec3 line40  = sampleYVU(s_texture1, twodown);\n\
1873    yvu *=           0.125;\n\
1874    yvu += line20 *  0.125;\n\
1875    yvu += line1  *  0.5;\n\
1876    yvu += line3  *  0.5;\n\
1877    yvu += line0  * -0.0625;\n\
1878    yvu += line4  * -0.0625;\n\
1879    yvu += line00 * -0.0625;\n\
1880    yvu += line40 * -0.0625;\n\
1881    return yvu;\n\
1882}\n"
1883
1884static const QString YV12RGBKernelShader[2] = {
1885"//YV12RGBKernelShader 1\n"
1886"GLSL_DEFINES"
1887"uniform GLSL_SAMPLER s_texture0, s_texture1; // 4:1:1 YVU planar\n"
1888"uniform mat4 COLOUR_UNIFORM;\n"
1889"varying vec2 v_texcoord0;\n"
1890SAMPLEYVU
1891KERNELYVU
1892"void main(void)\n"
1893"{\n"
1894"    vec3 yvu = sampleYVU(s_texture0, v_texcoord0);\n"
1895"    if (fract(v_texcoord0.t * %2) >= 0.5)\n"
1896"        yvu = kernelYVU(yvu);\n"
1897"    gl_FragColor = vec4(yvu, 1.0) * COLOUR_UNIFORM;\n"
1898"}\n",
1899
1900"//YV12RGBKernelShader 2\n"
1901"GLSL_DEFINES"
1902"uniform GLSL_SAMPLER s_texture0, s_texture1; // 4:1:1 YVU planar\n"
1903"uniform mat4 COLOUR_UNIFORM;\n"
1904"varying vec2 v_texcoord0;\n"
1905SAMPLEYVU
1906KERNELYVU
1907"void main(void)\n"
1908"{\n"
1909"    vec3 yvu = sampleYVU(s_texture0, v_texcoord0);\n"
1910"    if (fract(v_texcoord0.t * %2) < 0.5)\n"
1911"        yvu = kernelYVU(yvu);\n"
1912"    gl_FragColor = vec4(yvu, 1.0) * COLOUR_UNIFORM;\n"
1913"}\n"
1914};
1915
1916void OpenGLVideo::GetProgramStrings(QString &vertex, QString &fragment,
1917                                    OpenGLFilterType filter,
1918                                    QString deint, FrameScanType field)
1919{
1920    uint bottom = field == kScan_Intr2ndField;
1921    vertex = YUV2RGBVertexShader;
1922    switch (filter)
1923    {
1924        case kGLFilterYUV2RGB:
1925        {
1926            if (deint == "openglonefield" || deint == "openglbobdeint")
1927                fragment = OneFieldShader[bottom];
1928            else if (deint == "opengllinearblend" ||
1929                     deint == "opengldoubleratelinearblend")
1930                fragment = LinearBlendShader[bottom];
1931            else if (deint == "openglkerneldeint" ||
1932                     deint == "opengldoubleratekerneldeint")
1933                fragment = KernelShader[bottom];
1934            else
1935                fragment = YUV2RGBFragmentShader;
1936
1937            fragment.replace("SELECT_COLUMN", MYTHTV_UYVY == videoTextureType ?
1938                                              SelectColumn : "");
1939            break;
1940        }
1941        case kGLFilterYV12RGB:
1942            if (deint == "openglonefield" || deint == "openglbobdeint")
1943            {
1944                vertex = YV12RGBOneFieldVertexShader[bottom];
1945                fragment = YV12RGBFragmentShader;
1946            }
1947            else if (deint == "opengllinearblend" ||
1948                     deint == "opengldoubleratelinearblend")
1949            {
1950                vertex = YV12RGBVertexShader;
1951                fragment = YV12RGBLinearBlendFragmentShader;
1952            }
1953            else if (deint == "openglkerneldeint" ||
1954                     deint == "opengldoubleratekerneldeint")
1955            {
1956                vertex = YV12RGBVertexShader;
1957                fragment = YV12RGBKernelShader[bottom];
1958            }
1959            else
1960            {
1961                vertex = YV12RGBVertexShader;
1962                fragment = YV12RGBFragmentShader;
1963            }
1964            break;
1965        case kGLFilterNone:
1966            break;
1967        case kGLFilterResize:
1968            fragment = DefaultFragmentShader;
1969            break;
1970        case kGLFilterBicubic:
1971            fragment = BicubicShader;
1972            break;
1973        default:
1974            LOG(VB_PLAYBACK, LOG_ERR, LOC + "Unknown filter");
1975            break;
1976    }
1977    CustomiseProgramString(vertex);
1978    CustomiseProgramString(fragment);
1979}