25 __title__ =
"Search all tree views";
26 __mashup_title__ =
"mnvsearch"
27 __author__=
"R.D. Vaughan"
34 __usage_examples__ =
'''
37 Usage: ./mnvsearch.py -hduvlS [parameters] <search text>
38 Version: v0.1.0 Author: R.D.Vaughan
40 For details on the MythTV Netvision plugin see the wiki page at:
41 http://www.mythtv.org/wiki/MythNetvision
44 -h, --help show this help message and exit
45 -d, --debug Show debugging info (URLs, raw XML ... etc, info
47 -u, --usage Display examples for executing the script
48 -v, --version Display grabber name and supported options
49 -l LANGUAGE, --language=LANGUAGE
50 Select data that matches the specified language fall
51 back to English if nothing found (e.g. 'es' EspaƱol,
52 'de' Deutsch ... etc). Not all sites or grabbers
54 -p PAGE NUMBER, --pagenumber=PAGE NUMBER
55 Display specific page of the search results. Default
56 is page 1. Page number is ignored with the Tree View
58 -S, --search Search for videos
62 <name>Search all tree views</name>
63 <author>R.D.Vaughan</author>
64 <thumbnail>mnvsearch.png</thumbnail>
66 <description>MythNetvision treeview data base search</description>
67 <version>v0.11</version>
71 > ./mnvsearch.py -S "Doctor Who"
72 <?xml version="1.0" encoding="UTF-8"?>
73 <rss xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:cnettv="http://cnettv.com/mrss/" xmlns:creativeCommons="http://backend.userland.com/creativeCommonsRssModule" xmlns:media="http://search.yahoo.com/mrss/" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:amp="http://www.adobe.com/amp/1.0" xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
75 <title>Search all tree views</title>
76 <link>http://www.mythtv.org/wiki/MythNetvision</link>
77 <description>MythNetvision treeview data base search</description>
78 <numresults>21</numresults>
79 <returned>20</returned>
80 <startindex>20</startindex>
82 <title>Doctor Who - Doctor Who and The Brain of Morbius - Episode 8</title>
84 <pubDate>Sat, 01 May 2010 15:04:02 GMT</pubDate>
85 <description>The Doctor and Sarah Jane confront the Morbius monster and seek help from the Sisterhood.</description>
86 <link>file:///usr/local/share/mythtv/mythnetvision/scripts/nv_python_libs/configs/HTML/bbciplayer.html?videocode=b00s5ztx</link>
88 <media:thumbnail url="http://node1.bbcimg.co.uk/iplayer/images/episode/b00s5ztx_120_68.jpg"/>
89 <media:content url="file:///usr/local/share/mythtv/mythnetvision/scripts/nv_python_libs/configs/HTML/bbciplayer.html?videocode=b00s5ztx" length="" duration="" width="" height="" lang=""/>
95 <title>Every Doctor Who Story 1963-2008 - by Babelcolour</title>
96 <author>BabelColour</author>
97 <pubDate>Mon, 07 Jul 2008 14:45:12 GMT</pubDate>
98 <description>To celebrate the 45th Anniversary of the series, here is every Who story from 1963 to 2008, with the spin-off shows and bbci internet productions & the Children In Need specials, but doesn't include any of the spoofs, comedy sketches or other charity skits not made by the official Who Production Team. Edit: It was made & uploaded before the BBC Proms Special 'Music Of The Spheres'. That's why it isn't included! The fabulous music mix (called 'Whorythmics') was created by jex</description>
99 <link>http://www.youtube.com/v/lCZhlEdGIm0?f=videos&app=youtube_gdata&autoplay=1</link>
101 <media:thumbnail url="http://i.ytimg.com/vi/lCZhlEdGIm0/hqdefault.jpg"/>
102 <media:content url="http://www.youtube.com/v/lCZhlEdGIm0?f=videos&app=youtube_gdata&autoplay=1" length="" duration="" width="" height="" lang=""/>
104 <rating>4.957553</rating>
105 </item></channel></rss>
107 __search_max_page_items__ = 20
108 __tree_max_page_items__ = 20
114 """Wraps a stream with an encoder"""
123 """Wraps the output stream, encoding Unicode strings with the specified encoding"""
124 if isinstance(obj, str):
126 self.
out.buffer.write(obj)
129 """Delegate everything but write to the stream"""
130 return getattr(self.
out, attr)
132 if isinstance(sys.stdout, io.TextIOWrapper):
140 '''Import the common python class
143 except Exception
as e:
145 The subdirectory "nv_python_libs/common" containing the modules common_api.py and
146 common_exceptions.py (v0.1.3 or greater),
147 They should have been included with the distribution of MythNetvision
151 if common_api.__version__ <
'0.1.3':
152 sys.stderr.write(
"\n! Error: Your current installed common_api.py version is (%s)\nYou must at least have version (0.1.3) or higher.\n" % target.__version__)
158 '''Import the python mnvsearch support classes
161 except Exception
as e:
163 The subdirectory "nv_python_libs/mnvsearch" containing the modules mnvsearch_api and
164 mnvsearch_exceptions.py (v0.1.0 or greater),
165 They should have been included with the distribution of mnvsearch.py.
169 if target.__version__ <
'0.1.0':
170 sys.stderr.write(
"\n! Error: Your current installed mnvsearch_api.py version is (%s)\nYou must at least have version (0.1.0) or higher.\n" % target.__version__)
176 except Exception
as e:
178 The python script "nv_python_libs/mainProcess.py" must be present.
183 if process.__version__ <
'0.2.0':
184 sys.stderr.write(
"\n! Error: Your current installed mainProcess.py version is (%s)\nYou must at least have version (0.2.0) or higher.\n" % process.__version__)
187 if __name__ ==
'__main__':
191 target.baseProcessingDir = os.path.dirname( os.path.realpath( __file__ ))
193 target.common = common_api.Common()
194 main = process.mainProcess(target, apikey, )
195 main.grabberInfo = {}
196 main.grabberInfo[
'enabled'] =
True
197 main.grabberInfo[
'title'] = __title__
198 main.grabberInfo[
'command'] =
'mnvsearch.py'
199 main.grabberInfo[
'mashup_title'] = __mashup_title__
200 main.grabberInfo[
'author'] = __author__
201 main.grabberInfo[
'thumbnail'] =
'mnvsearch.png'
202 main.grabberInfo[
'type'] = [
'video', ]
203 main.grabberInfo[
'desc'] =
"MythTV Online Content database search."
204 main.grabberInfo[
'version'] = __version__
205 main.grabberInfo[
'search'] =
True
206 main.grabberInfo[
'tree'] =
False
207 main.grabberInfo[
'html'] =
False
208 main.grabberInfo[
'usage'] = __usage_examples__
209 main.grabberInfo[
'SmaxPage'] = __search_max_page_items__
210 main.grabberInfo[
'TmaxPage'] = __tree_max_page_items__