Last active
February 26, 2026 10:32
-
-
Save CarstenG2/33be0a4a8be3d04d7b8b7eb70b3a48fb to your computer and use it in GitHub Desktop.
xShip patches: Medien-Info (Issue #48) + Trailer ansehen (Issue #58) — all modified files
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #2021-07-20 | |
| #edit 2024-12-04 | |
| import os, sys | |
| import xbmc, xbmcplugin, xbmcaddon, xbmcgui, xbmcvfs | |
| from six import iteritems | |
| is_python2 = sys.version_info.major == 2 | |
| if is_python2: | |
| #from xbmc import translatePath | |
| from HTMLParser import HTMLParser | |
| unescape = HTMLParser().unescape | |
| from urlparse import urlparse, parse_qsl, urljoin, parse_qs, urlsplit | |
| from urllib import quote_plus, unquote_plus, quote, unquote, urlencode, urlretrieve | |
| from urllib2 import Request, urlopen | |
| else: | |
| #from xbmcvfs import translatePath | |
| from html import unescape | |
| from html.parser import HTMLParser | |
| from urllib.parse import urlparse, quote_plus, parse_qsl, unquote_plus, urljoin, quote, unquote, urlencode, parse_qs, urlsplit | |
| from urllib.request import Request, urlopen, urlretrieve | |
| def translatePath(*args): | |
| if is_python2: return xbmc.translatePath(*args).decode("utf-8") | |
| else: return xbmcvfs.translatePath(*args) | |
| def exists(*args): | |
| return os.path.exists(translatePath(*args)) | |
| def py2_decode(value): | |
| if is_python2: | |
| try: return value.decode('utf-8') | |
| except: return value | |
| return value | |
| def py2_encode(value): | |
| if is_python2: | |
| try: return value.encode('utf-8') | |
| except: return value | |
| return value | |
| ## from six | |
| ## iteritems = lambda d: ((hasattr(d, 'iteritems') and d.iteritems) or d.items)() | |
| # xbmcaddon | |
| Addon = xbmcaddon.Addon() | |
| addonInfo = xbmcaddon.Addon().getAddonInfo | |
| addonId = addonInfo('id') # 'plugin.video.xship' | |
| addonName = addonInfo('name') # 'Xship' | |
| addonVersion = addonInfo('version') | |
| addonPath = translatePath(addonInfo('path')) # 'C:\\Program Files\\Kodi21\\portable_data\\addons\\plugin.video.xship\\' | |
| addonProfilePath = translatePath(addonInfo('profile')) # 'C:\\Program Files\\Kodi21\\portable_data\\userdata\\addon_data\\plugin.video.xship\\' | |
| # dataPath = py2_decode(translatePath(addonInfo('profile'))) | |
| #cachePath = os.path.join(addonProfilePath, "cache") | |
| #if not exists(cachePath): os.makedirs(cachePath) | |
| setSetting = xbmcaddon.Addon().setSetting | |
| _getSetting = xbmcaddon.Addon().getSetting | |
| def getSetting(Name, default=''): | |
| result = _getSetting(Name) | |
| if result: | |
| return result | |
| else: | |
| return default | |
| # xbmc | |
| skin = xbmc.getSkinDir() | |
| infoLabel = xbmc.getInfoLabel | |
| condVisibility = xbmc.getCondVisibility | |
| playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO) | |
| keyboard = xbmc.Keyboard | |
| # kb = xbmc.Keyboard('default', 'heading', True) | |
| # kb.setDefault('password') # optional | |
| # kb.setHeading('Enter password') # optional | |
| # kb.setHiddenInput(True) # optional | |
| # kb.doModal() | |
| # if (kb.isConfirmed()): | |
| # text = kb.getText() | |
| execute = xbmc.executebuiltin | |
| executebuiltin = xbmc.executebuiltin | |
| player = xbmc.Player() | |
| abortRequested = xbmc.Monitor().abortRequested() | |
| jsonrpc = xbmc.executeJSONRPC | |
| getInfoLabel = xbmc.getInfoLabel | |
| # xbmcvfs | |
| listDir = xbmcvfs.listdir | |
| openFile = xbmcvfs.File | |
| makeFile = xbmcvfs.mkdir | |
| mkDir = xbmcvfs.mkdir | |
| delete = xbmcvfs.delete | |
| # exists = xbmcvfs.exists | |
| # xbmcplugin | |
| resolveUrl = xbmcplugin.setResolvedUrl | |
| addItem = xbmcplugin.addDirectoryItem | |
| endofdirectory = xbmcplugin.endOfDirectory | |
| content = xbmcplugin.setContent | |
| plugincategory = xbmcplugin.setPluginCategory | |
| def sortLabel(syshandle): | |
| xbmcplugin.addSortMethod(syshandle, xbmcplugin.SORT_METHOD_LABEL) | |
| def trailerLabel(): | |
| """Return localised context menu label for the trailer action.""" | |
| try: | |
| lang = xbmc.getLanguage(xbmc.ISO_639_1).lower()[:2] | |
| except Exception: | |
| lang = 'en' | |
| return 'Trailer ansehen' if lang == 'de' else 'Watch Trailer' | |
| _YT_CACHE_KEY = 'xship.hasYouTube' | |
| def hasYouTube(): | |
| """Return True if YouTube addon is installed AND has a user API key configured. | |
| Result is cached in Kodi window(10000) property for the lifetime of the Kodi session.""" | |
| cached = window.getProperty(_YT_CACHE_KEY) | |
| if cached: | |
| return cached == '1' | |
| result = False | |
| if xbmc.getCondVisibility('System.HasAddon(plugin.video.youtube)'): | |
| try: | |
| import xbmcvfs as _vfs, json as _json | |
| _f = _vfs.File('special://profile/addon_data/plugin.video.youtube/api_keys.json') | |
| _data = _json.loads(_f.read()) | |
| _f.close() | |
| result = bool(_data.get('keys', {}).get('user', {}).get('api_key')) | |
| except Exception: | |
| pass | |
| window.setProperty(_YT_CACHE_KEY, '1' if result else '0') | |
| return result | |
| # xbmcgui | |
| window = xbmcgui.Window(10000) | |
| currentWindowId = xbmcgui.Window(xbmcgui.getCurrentWindowId()) | |
| item = xbmcgui.ListItem | |
| dialog = xbmcgui.Dialog() | |
| progressDialog = xbmcgui.DialogProgress() | |
| progressDialogBG = xbmcgui.DialogProgressBG() | |
| dataPath = py2_decode(translatePath(addonInfo('profile'))) | |
| bookmarksFile = os.path.join(addonProfilePath, 'bookmarks.db') | |
| settingsFile = os.path.join(addonPath, 'resources', 'settings.xml') | |
| def addonIcon(): | |
| return addonInfo('icon') | |
| def addonFanart(): | |
| return addonInfo('fanart') | |
| def artPath(): | |
| return os.path.join(translatePath(addonInfo('path')), 'resources', 'media') | |
| def addonThumb(): | |
| return os.path.join(artPath(), 'poster.png') | |
| def addonPoster(): | |
| return os.path.join(artPath(), 'poster.png') | |
| def addonBanner(): | |
| return os.path.join(artPath(), 'banner.png') | |
| #def addonFanart(): | |
| # addonXml = os.path.join(py2_decode(translatePath(addonInfo('path'))), 'addon.xml') | |
| # import xml.dom.minidom as minidom | |
| # doc = minidom.parse(addonXml) | |
| # # with open(addonXml, 'r') as f: content = f.read() | |
| # # fanart = re.search('fanart>([^<]+)', content).group(1) | |
| # fanart = doc.getElementsByTagName('fanart')[0].firstChild.nodeValue | |
| # fanart = os.path.join(addonInfo('path'), os.path.normpath(fanart)) | |
| # if os.path.exists(fanart): | |
| # return fanart | |
| # return | |
| def addonNext(): | |
| return os.path.join(artPath(), 'next.png') | |
| def addonNoPicture(): | |
| return os.path.join(artPath(), 'no-picture.png') | |
| def infoDialog(message, heading=addonInfo('name'), icon='', time=3000, sound=False): | |
| if icon == '': icon = addonIcon() | |
| elif icon == 'INFO': icon = xbmcgui.NOTIFICATION_INFO | |
| elif icon == 'WARNING': icon = xbmcgui.NOTIFICATION_WARNING | |
| elif icon == 'ERROR': icon = xbmcgui.NOTIFICATION_ERROR | |
| dialog.notification(heading, message, icon, time, sound=sound) | |
| def yesnoDialog(line1, line2, line3, heading=addonInfo('name'), nolabel='', yeslabel=''): | |
| if is_python2: | |
| return dialog.yesno(heading, line1, line2, line3, nolabel, yeslabel) | |
| else: | |
| return dialog.yesno(heading, line1+'\n'+line2+'\n'+line3, nolabel, yeslabel) | |
| def selectDialog(list, heading=addonInfo('name')): | |
| return dialog.select(heading, list) | |
| def showparentdiritems(): | |
| if not 'false' in xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.GetSettingValue", "params":{"setting":"filelists.showparentdiritems"}}'): | |
| return True | |
| else: | |
| return False | |
| # Modified `sleep` command that honors a user exit request | |
| def sleep(time): | |
| monitor = xbmc.Monitor() | |
| while time > 0 and not monitor.abortRequested(): | |
| monitor.waitForAbort(min(1, time)) | |
| time = time - 1 | |
| def getKodiVersion(): | |
| return xbmc.getInfoLabel("System.BuildVersion").split(".")[0] | |
| def busy(): | |
| if int(getKodiVersion()) >= 18: | |
| return execute('ActivateWindow(busydialognocancel)') | |
| else: | |
| return execute('ActivateWindow(busydialog)') | |
| def idle(): | |
| if int(getKodiVersion()) >= 18: | |
| return execute('Dialog.Close(busydialognocancel)') | |
| else: | |
| return execute('Dialog.Close(busydialog)') | |
| def visible(): | |
| if int(getKodiVersion()) >= 18 and xbmc.getCondVisibility('Window.IsActive(busydialognocancel)') == 1: | |
| return True | |
| return xbmc.getCondVisibility('Window.IsActive(busydialog)') == 1 | |
| def reload_profile(): | |
| profil = xbmc.getInfoLabel('System.ProfileName') | |
| sleep(500) | |
| #if profil: | |
| xbmc.executebuiltin('LoadProfile(' + profil + ',prompt)') | |
| def openSettings(query=None, id=addonInfo('id')): | |
| try: | |
| idle() | |
| execute('Addon.OpenSettings(%s)' % id) | |
| if query is None: | |
| raise Exception() | |
| if len(str(query).split('.')) == 1: | |
| c = query | |
| f = 0 | |
| else: c, f = str(query).split('.') | |
| if int(getKodiVersion()) >= 21: | |
| execute('SetFocus(%i)' % (int(c)-200)) | |
| if int(f):execute('SetFocus(%i)' % (int(f)-180)) | |
| elif int(getKodiVersion()) >= 18: | |
| execute('SetFocus(%i)' % (int(c)-100)) # k19: -100 | |
| if int(f):execute('SetFocus(%i)' % (int(f)-80)) # k19: -80 | |
| else: | |
| execute('SetFocus(%i)' % (int(c) + 100)) | |
| if int(f):execute('SetFocus(%i)' % (int(f) + 200)) | |
| except: | |
| return | |
| def resetSettings(): | |
| yes = yesnoDialog("Zurücksetzen der Settings (außer Konten)", 'und einem abschließenden Reload vom Profil', 'Sind Sie sicher?') | |
| if not yes: return | |
| try: | |
| login = getSetting('serienstream.user') | |
| password = getSetting('serienstream.pass') | |
| os_user = getSetting('subtitles.os_user') | |
| os_pass = getSetting('subtitles.os_pass') | |
| tmdb = getSetting('api.tmdb') | |
| trakt = getSetting('api.trakt') | |
| fanart = getSetting('api.fanart.tv') | |
| debug = getSetting('status.debug') | |
| SettingFile = os.path.join(xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile')), "settings.xml") | |
| if xbmcvfs.exists(SettingFile): xbmcvfs.delete(SettingFile) | |
| # PROFIL_RELOAD = os.path.join(xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile')).decode('utf-8'), "profil_reload") | |
| # open(PROFIL_RELOAD, "w+").write('Profil reload') | |
| setSetting(id='serienstream.user', value=login) | |
| setSetting(id='serienstream.pass', value=password) | |
| setSetting(id='subtitles.os_user', value=os_user) | |
| setSetting(id='subtitles.os_pass', value=os_pass) | |
| setSetting(id='api.tmdb', value=tmdb) | |
| setSetting(id='api.trakt', value=trakt) | |
| setSetting(id='api.fanart.tv', value=fanart) | |
| setSetting(id='status.debug', value=debug) | |
| return True | |
| except: | |
| return | |
| def getSettingDefault(id): | |
| import re | |
| try: | |
| settings = open(settingsFile, 'r') | |
| value = ' '.join(settings.readlines()) | |
| value.strip('\n') | |
| settings.close() | |
| value = re.findall(r'id=\"%s\".*?default=\"(.*?)\"' % (id), value)[0] | |
| return value | |
| except: | |
| return None | |
| def inAdvancedsettings(word=''): | |
| advancedsettings = py2_decode(os.path.join(translatePath('special://userdata/'), "advancedsettings.xml")) | |
| if exists(advancedsettings): | |
| with open(advancedsettings, 'r') as file: | |
| content = file.read() | |
| if word in content: return True | |
| return False |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # 2023-05-10 | |
| # edit 2025-06-12 | |
| import sys, json | |
| from urllib.parse import parse_qs, urlsplit | |
| from resources.lib import control | |
| params = dict(control.parse_qsl(control.urlsplit(sys.argv[2]).query)) | |
| action = params.get('action') | |
| name = params.get('name') | |
| table = params.get('table') | |
| title = params.get('title') | |
| source = params.get('source') | |
| # ------ navigator -------------- | |
| if action == None or action == 'root': | |
| from resources.lib.indexers import navigator | |
| navigator.navigator().root() | |
| elif action == 'pluginInfo': | |
| from resources.lib import supportinfo | |
| supportinfo.pluginInfo() | |
| elif action == 'movieNavigator': | |
| from resources.lib.indexers import navigator | |
| navigator.navigator().movies() | |
| elif action == 'tvNavigator': | |
| from resources.lib.indexers import navigator | |
| navigator.navigator().tvshows() | |
| elif action == 'toolNavigator': | |
| from resources.lib.indexers import navigator | |
| navigator.navigator().tools() | |
| elif action == 'downloadNavigator': | |
| from resources.lib.indexers import navigator | |
| navigator.navigator().downloads() | |
| # ------------------------------------------- | |
| elif action == 'download': | |
| image = params.get('image') | |
| from resources.lib import downloader | |
| from resources.lib import sources | |
| try: downloader.download(name, image, sources.sources().sourcesResolve(json.loads(source)[0], True)) | |
| except: pass | |
| elif action in ('sendToJD', 'sendToJD2', 'sendToMyJD', 'sendToPyLoad'): | |
| raw_url = json.loads(source)[0]['url'] | |
| if raw_url: | |
| # Extract best URL for download managers: | |
| # If URL has Kodi-style |headers with a Referer that has a path, | |
| # use the Referer (it's the hoster page URL that JD can resolve). | |
| # Otherwise use the bare URL without |headers. | |
| url = raw_url | |
| if '|' in raw_url: | |
| base_url, header_str = raw_url.split('|', 1) | |
| headers = dict(parse_qs(header_str, keep_blank_values=True)) | |
| referer = headers.get('Referer', [''])[0] | |
| if referer and urlsplit(referer).path not in ('', '/'): | |
| url = referer | |
| else: | |
| url = base_url | |
| if action == 'sendToJD': | |
| from resources.lib.handler.jdownloaderHandler import cJDownloaderHandler | |
| cJDownloaderHandler().sendToJDownloader(url) | |
| elif action == 'sendToJD2': | |
| from resources.lib.handler.jdownloader2Handler import cJDownloader2Handler | |
| cJDownloader2Handler().sendToJDownloader2(url) | |
| elif action == 'sendToMyJD': | |
| from resources.lib.handler.myjdownloaderHandler import cMyJDownloaderHandler | |
| cMyJDownloaderHandler().sendToMyJDownloader(url, name) | |
| elif action == 'sendToPyLoad': | |
| from resources.lib.handler.pyLoadHandler import cPyLoadHandler | |
| cPyLoadHandler().sendToPyLoad(name, url) | |
| elif action == 'mediaInfo': | |
| import xbmcgui | |
| dialog = xbmcgui.DialogProgress() | |
| dialog.create('Medien-Info', 'Löse Stream-URL auf...') | |
| dialog.update(0) | |
| from resources.lib import sources | |
| sources.sources().mediaInfo(source, dialog) | |
| elif action == 'playExtern': | |
| import json | |
| if not control.visible(): control.busy() | |
| try: | |
| sysmeta = {} | |
| for key, value in params.items(): | |
| if key == 'action': continue | |
| elif key == 'year' or key == 'season' or key == 'episode': value = int(value) | |
| if value == 0: continue | |
| sysmeta.update({key : value}) | |
| if int(params.get('season')) == 0: | |
| mediatype = 'movie' | |
| else: | |
| mediatype = 'tvshow' | |
| sysmeta.update({'mediatype': mediatype}) | |
| # if control.getSetting('hosts.mode') == '2': | |
| # sysmeta.update({'select': '2'}) | |
| # else: | |
| # sysmeta.update({'select': '1'}) | |
| sysmeta.update({'select': control.getSetting('hosts.mode')}) | |
| sysmeta = json.dumps(sysmeta) | |
| params.update({'sysmeta': sysmeta}) | |
| from resources.lib import sources | |
| sources.sources().play(params) | |
| except: | |
| pass | |
| elif action == 'playURL': | |
| try: | |
| import resolveurl | |
| import xbmcgui, xbmc | |
| #url = 'https://streamvid.net/embed-uhgo683xes41' | |
| #url = 'https://moflix-stream.click/v/gcd0aueegeia' | |
| url = xbmcgui.Dialog().input("URL Input") | |
| hmf = resolveurl.HostedMediaFile(url=url, include_disabled=True, include_universal=False) | |
| try: | |
| if hmf.valid_url(): url = hmf.resolve() | |
| except: | |
| pass | |
| item = xbmcgui.ListItem('URL-direkt') | |
| kodiver = int(xbmc.getInfoLabel("System.BuildVersion").split(".")[0]) | |
| if ".m3u8" in url or '.mpd' in url: | |
| item.setProperty("inputstream", "inputstream.adaptive") | |
| if '.mpd' in url: | |
| if kodiver < 21: item.setProperty('inputstream.adaptive.manifest_type', 'mpd') | |
| item.setMimeType('application/dash+xml') | |
| else: | |
| if kodiver < 21: item.setProperty('inputstream.adaptive.manifest_type', 'hls') | |
| item.setMimeType("application/vnd.apple.mpegurl") | |
| item.setContentLookup(False) | |
| if '|' in url: | |
| stream_url, strhdr = url.split('|') | |
| item.setProperty('inputstream.adaptive.stream_headers', strhdr) | |
| if kodiver > 19: item.setProperty('inputstream.adaptive.manifest_headers', strhdr) | |
| # item.setPath(stream_url) | |
| url = stream_url | |
| item.setPath(url) | |
| xbmc.Player().play(url, item) | |
| except: | |
| #print('Kein Video Link gefunden') | |
| control.infoDialog("Keinen Video Link gefunden", sound=True, icon='WARNING', time=1000) | |
| elif action == 'UpdatePlayCount': | |
| from resources.lib import playcountDB | |
| playcountDB.UpdatePlaycount(params) | |
| control.execute('Container.Refresh') | |
| # listings ------------------------------- | |
| elif action == 'listings': | |
| from resources.lib.indexers import listings | |
| listings.listings().get(params) | |
| elif action == 'movieYears': | |
| from resources.lib.indexers import listings | |
| listings.listings().movieYears() | |
| elif action == 'movieGenres': | |
| from resources.lib.indexers import listings | |
| listings.listings().movieGenres() | |
| elif action == 'tvGenres': | |
| from resources.lib.indexers import listings | |
| listings.listings().tvGenres() | |
| # search ---------------------- | |
| elif action == 'searchNew': | |
| from resources.lib import searchDB | |
| searchDB.search_new(table) | |
| elif action == 'searchClear': | |
| from resources.lib import searchDB | |
| searchDB.remove_all_query(table) | |
| # if len(searchDB.getSearchTerms()) == 0: | |
| # control.execute('Action(ParentDir)') | |
| elif action == 'searchDelTerm': | |
| from resources.lib import searchDB | |
| searchDB.remove_query(name, table) | |
| # if len(searchDB.getSearchTerms()) == 0: | |
| # control.execute('Action(ParentDir)') | |
| # person ---------------------- | |
| elif action == 'person': | |
| from resources.lib.indexers import person | |
| person.person().get(params) | |
| elif action == 'personSearch': | |
| from resources.lib.indexers import person | |
| person.person().search() | |
| elif action == 'personCredits': | |
| from resources.lib.indexers import person | |
| person.person().getCredits(params) | |
| elif action == 'playfromPerson': | |
| if not control.visible(): control.busy() | |
| sysmeta = json.loads(params['sysmeta']) | |
| if sysmeta['mediatype'] == 'movie': | |
| from resources.lib.indexers import movies | |
| sysmeta = movies.movies().super_meta(sysmeta['tmdb_id']) | |
| sysmeta = json.dumps(sysmeta) | |
| else: | |
| from resources.lib.indexers import tvshows | |
| sysmeta = tvshows.tvshows().super_meta(sysmeta['tmdb_id']) | |
| sysmeta = control.quote_plus(json.dumps(sysmeta)) | |
| params.update({'sysmeta': sysmeta}) | |
| from resources.lib import sources | |
| sources.sources().play(params) | |
| # movies ---------------------- | |
| elif action == 'movies': | |
| from resources.lib.indexers import movies | |
| movies.movies().get(params) | |
| elif action == 'moviesSearch': | |
| from resources.lib.indexers import movies | |
| movies.movies().search() | |
| # tvshows --------------------------------- | |
| elif action == 'tvshows': # 'tvshowPage' | |
| from resources.lib.indexers import tvshows | |
| tvshows.tvshows().get(params) | |
| elif action == 'tvshowsSearch': | |
| from resources.lib.indexers import tvshows | |
| tvshows.tvshows().search() | |
| # seasons --------------------------------- | |
| elif action == 'seasons': | |
| from resources.lib.indexers import seasons | |
| seasons.seasons().get(params) # params | |
| # episodes --------------------------------- | |
| elif action == 'episodes': | |
| from resources.lib.indexers import episodes | |
| episodes.episodes().get(params) | |
| # sources --------------------------------- | |
| elif action == 'play': | |
| if not control.visible(): control.busy() | |
| from resources.lib import sources | |
| sources.sources().play(params) | |
| elif action == 'addItem': | |
| from resources.lib import sources | |
| sources.sources().addItem(title) | |
| elif action == 'playItem': | |
| if not control.visible(): control.busy() | |
| from resources.lib import sources | |
| sources.sources().playItem(title, source) | |
| # Settings ------------------------------ | |
| elif action == "settings": # alle Quellen aktivieren / deaktivieren | |
| from resources import settings | |
| settings.run(params) | |
| elif action == 'addonSettings': | |
| # query = None | |
| query = params.get('query') | |
| control.openSettings(query) | |
| elif action == 'resetSettings': | |
| status = control.resetSettings() | |
| if status: | |
| control.reload_profile() | |
| control.sleep(500) | |
| control.execute('RunAddon("%s")' % control.addonId) | |
| elif action == 'resolverSettings': | |
| import resolveurl as resolver | |
| resolver.display_settings() | |
| # try: | |
| # import pydevd | |
| # if pydevd.connected: pydevd.kill_all_pydev_threads() | |
| # except: | |
| # pass | |
| # finally: | |
| # exit() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #2021-07-15 | |
| # edit 2025-08-02 switch from treads to concurrent.futures | |
| import sys, os, datetime | |
| import json | |
| from resources.lib.requestHandler import cRequestHandler | |
| from resources.lib import control | |
| _params = dict(control.parse_qsl(sys.argv[2].replace('?', ''))) if len(sys.argv) > 1 else dict() | |
| #TV-Film | |
| # {"genres":[{"id":10759,"name":"Action & Adventure"},{"id":16,"name":"Animation"},{"id":35,"name":"Komödie"},{"id":80,"name":"Krimi"},{"id":99,"name":"Dokumentarfilm"},{"id":18,"name":"Drama"},{"id":10751,"name":"Familie"},{"id":10762,"name":"Kids"},{"id":9648,"name":"Mystery"},{"id":10763,"name":"News"},{"id":10764,"name":"Reality"},{"id":10765,"name":"Sci-Fi & Fantasy"},{"id":10766,"name":"Soap"},{"id":10767,"name":"Talk"},{"id":10768,"name":"War & Politics"},{"id":37,"name":"Western"}]} | |
| _genresTv = [{"id":10759,"name":"Action & Abenteuer"},{"id":16,"name":"Animation"},{"id":35,"name":"Komödie"},{"id":80,"name":"Krimi"},{"id":18,"name":"Drama"},{"id":10751,"name":"Familie"}, | |
| {"id":10762,"name":"Kids"},{"id":9648,"name":"Mystery"},{"id":10764,"name":"Reality"},{"id":10765,"name":"Sci-Fi & Fantasy"},{"id":10768,"name":"War & Politics"}, | |
| {"id":37,"name":"Western"}] | |
| _genresMovie = [{"id":28,"name":"Action"},{"id":12,"name":"Abenteuer"},{"id":16,"name":"Animation"},{"id":35,"name":"Komödie"},{"id":80,"name":"Krimi"},{"id":18,"name":"Drama"}, | |
| {"id":10751,"name":"Familie"},{"id":14,"name":"Fantasy"},{"id":36,"name":"Historie"},{"id":27,"name":"Horror"},{"id":10402,"name":"Musik"},{"id":9648,"name":"Mystery"}, | |
| {"id":10749,"name":"Liebesfilm"},{"id":878,"name":"Science Fiction"},{"id":10770,"name":"TV-Film"},{"id":53,"name":"Thriller"},{"id":10752,"name":"Kriegsfilm"},{"id":37,"name":"Western"}] | |
| class listings: | |
| def __init__(self): | |
| self.URL = 'https://api.themoviedb.org/3/discover' | |
| self.api_key = '86dd18b04874d9c94afadde7993d94e3' | |
| self.lang = 'de' | |
| self.list = [] | |
| self.total_pages = 0 | |
| self.query = '' | |
| self.media_type = '' | |
| #self.year_params = 'release_date.gte=%s-01-01&release_date.lte=%s-12-31&with_release_type=2|3&without_genres=%s&sort_by=popularity.desc' | |
| #self.year_params = 'primary_release_year=%s&with_release_type=2|3&without_genres=%s&sort_by=vote_count.desc' #&sort_by= vote_average.desc / popularity.desc / vote_count | |
| self.genres_params = '' | |
| self.popular_link = '' | |
| self.datetime = datetime.datetime.utcnow() | |
| def get(self, params): | |
| try: | |
| if params.get('next_pages'): | |
| next_pages = int(params.get('next_pages')) | |
| else: | |
| next_pages = 1 | |
| append_to_response = 'page=%s' % next_pages | |
| self.media_type = params.get('media_type') | |
| url = params.get('url') | |
| if url == 'kino': | |
| from datetime import datetime, timedelta | |
| today = datetime.today() - timedelta(days=21) | |
| fromday = datetime.today() - timedelta(days=90) | |
| _today = today.strftime('%Y-%m-%d') | |
| _fromday = fromday.strftime('%Y-%m-%d') | |
| url = 'primary_release_date.gte=%s&primary_release_date.lte=%s' % (_fromday, _today) | |
| list, total_pages = self._call(url, append_to_response=append_to_response) | |
| params.update({'list': list}) | |
| params.update({'next_pages': next_pages}) | |
| params.update({'total_pages': total_pages}) | |
| params.update({'media_type': self.media_type}) | |
| if self.media_type == 'movie': | |
| from resources.lib.indexers import movies | |
| movies.movies().getDirectory(params) | |
| else: | |
| from resources.lib.indexers import tvshows | |
| tvshows.tvshows().getDirectory(params) | |
| return self.list | |
| except: | |
| pass | |
| def movieYears(self): | |
| year = (self.datetime.strftime('%Y')) | |
| without = '99,10762,10767,10766,16' # doku, kids, Talk, Soap, Animation | |
| for i in range(int(year), 1960, -1): | |
| self.list.append({'name': str(i), 'url': 'primary_release_year=%s&with_release_type=2|3&without_genres=%s&sort_by=vote_count.desc' % (str(i), without), 'image': 'years.png', 'action': 'listings'}) | |
| self.media_type = 'movie' | |
| self.addDirectory(self.list) | |
| return self.list | |
| def movieGenres(self): | |
| without = '99,16' | |
| self.media_type = 'movie' | |
| for i in _genresMovie: | |
| if i['id'] == 16: without = '99' | |
| self.list.append({'name': i['name'], 'url': 'with_genres=%s&without_genres=%s&sort_by=vote_count.desc' % (str(i['id']), without), 'image': 'genres.png', 'action': 'listings'}) | |
| self.addDirectory(self.list) | |
| return self.list | |
| def tvGenres(self): | |
| without = '99,10762,10767,10766,16' # doku, kids, Talk, Soap, Animation | |
| self.media_type = 'tv' | |
| for i in _genresTv: | |
| if i['id'] == 16 or i['id'] == 10762: without = '99,10767,10766' | |
| self.list.append({'name': i['name'], 'url': 'with_genres=%s&without_genres=%s&sort_by=vote_count.desc' % (str(i['id']), without), 'image': 'genres.png', 'action': 'listings'}) | |
| self.addDirectory(self.list) | |
| return self.list | |
| def _call(self, url, append_to_response=''): | |
| url = '%s/%s?api_key=%s&language=de-DE®ion=DE&vote_count.gte=20&include_adult=false&include_video=false&%s' % (self.URL, self.media_type, self.api_key, url) | |
| if not 'without_genres' in url: url += '&without_genres=99' # doku | |
| if append_to_response: | |
| url += '&%s' % append_to_response | |
| oRequestHandler = cRequestHandler(url, ignoreErrors=True) | |
| name = oRequestHandler.request() | |
| data = json.loads(name) | |
| if 'status_code' in data and data['status_code'] == 34: | |
| return [], 0 | |
| list = [] | |
| for i in data['results']: | |
| list.append(i['id']) | |
| return list, data['total_pages'] | |
| def addDirectory(self, items): | |
| if items is None or len(items) == 0: | |
| control.idle() | |
| sys.exit() | |
| sysaddon = sys.argv[0] | |
| syshandle = int(sys.argv[1]) | |
| addonPoster, addonFanart, addonThumb, artPath = control.addonFanart(), control.addonFanart(), control.addonThumb(), control.artPath() | |
| for i in items: | |
| try: | |
| name = i['name'] | |
| if self.media_type == 'movie': | |
| if 'primary_release_year' in i['url']: | |
| plot = 'Filme aus dem Jahr: %s' % name | |
| else: | |
| plot = 'Filme aus der Kategorie: %s' % name | |
| else: | |
| plot = 'Serien aus der Kategorie: %s' % name | |
| try: | |
| poster = os.path.join(artPath, i['image']) | |
| thumb = os.path.join(artPath, i['image']) | |
| except: | |
| thumb = addonThumb | |
| poster = addonPoster | |
| url = '%s?action=%s' % (sysaddon, i['action']) | |
| url += '&media_type=%s' % self.media_type | |
| try: | |
| url += '&url=%s' % control.quote_plus(i['url']) | |
| except: | |
| pass | |
| item = control.item(label=name, offscreen=True) | |
| # item.setArt({'icon': thumb, 'thumb': thumb}) | |
| item.setArt({'thumb': thumb, 'poster': poster}) | |
| if not addonFanart is None: item.setProperty('Fanart_Image', addonFanart) | |
| # -> gesehen/ungesehen im cm und "Keine Informationen verfügbar" ausblenden (abhängig vom Skin) | |
| item.setInfo('video', {'overlay': 4, 'plot': plot}) | |
| item.setIsFolder(True) | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True) | |
| except: | |
| pass | |
| control.content(syshandle, 'videos') | |
| control.plugincategory(syshandle, control.addonVersion) | |
| control.endofdirectory(syshandle, cacheToDisc=True) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # Media info probing for xShip streams | |
| # Detects stream type (HLS/DASH/MP4) and extracts resolution, codec, FPS, audio, bitrate, duration | |
| import re | |
| import time | |
| import threading | |
| from resources.lib import log_utils, control | |
| TOTAL_TIMEOUT = 20 # seconds budget for entire probe | |
| def _fetchWithDeadline(dialog, pct, msg, func, deadline): | |
| """Run func() in a background thread, showing countdown to deadline on dialog.""" | |
| result = [None] | |
| error = [None] | |
| def run(): | |
| try: result[0] = func() | |
| except Exception as e: error[0] = e | |
| t = threading.Thread(target=run) | |
| t.start() | |
| while t.is_alive(): | |
| remaining = int(deadline - time.time()) | |
| if remaining <= 0: | |
| break | |
| dialog.update(pct, msg) | |
| if dialog.iscanceled(): | |
| return None | |
| t.join(timeout=1) | |
| t.join(timeout=0.5) | |
| if error[0]: | |
| raise error[0] | |
| return result[0] | |
| def _remaining(deadline): | |
| """Seconds left until deadline, minimum 2.""" | |
| return max(2, deadline - time.time()) | |
| def getMediaInfo(url, dialog, deadline=None): | |
| """Detect stream type and probe media info. Returns formatted info string or None.""" | |
| import requests | |
| if not deadline: | |
| deadline = time.time() + TOTAL_TIMEOUT | |
| stream_url = url.split('|')[0] | |
| url_lower = stream_url.lower() | |
| # 1. Check URL extension first (fast path) | |
| if '.m3u8' in url_lower: | |
| return _probeHLS(url, dialog, deadline) | |
| if '.mpd' in url_lower: | |
| return _probeDASH(url, dialog, deadline) | |
| # 2. Fetch a small chunk to detect by Content-Type + content sniffing | |
| headers = _parseHeaders(url) | |
| try: | |
| r = _fetchWithDeadline(dialog, 55, 'Erkenne Stream-Typ...', | |
| lambda: requests.get(stream_url, headers=headers, timeout=_remaining(deadline), verify=False, stream=True), | |
| deadline) | |
| if r is None: | |
| return None | |
| if r.status_code >= 400: | |
| return 'Stream nicht erreichbar (HTTP %d)' % r.status_code | |
| ct = r.headers.get('Content-Type', '').lower() | |
| content_length = r.headers.get('Content-Length', '') | |
| # Read first 8KB for sniffing | |
| peek = next(r.iter_content(chunk_size=8192), b'') | |
| r.close() | |
| # Detect HLS by Content-Type or content | |
| if 'mpegurl' in ct or 'apple' in ct or peek.lstrip().startswith(b'#EXTM3U'): | |
| return _probeHLS(url, dialog, deadline) | |
| # Detect DASH by Content-Type or content | |
| if 'dash' in ct or (peek.lstrip().startswith(b'<?xml') and b'<MPD' in peek): | |
| return _probeDASH(url, dialog, deadline) | |
| # Otherwise: direct file (MP4, MKV, etc.) | |
| return _probeDirect(url, dialog, deadline, content_length) | |
| except Exception as e: | |
| log_utils.log('getMediaInfo Error: %s' % str(e), log_utils.LOGERROR) | |
| return 'Stream-Typ konnte nicht erkannt werden' | |
| def _probeHLS(url, dialog=None, deadline=None): | |
| import requests | |
| if not deadline: | |
| deadline = time.time() + TOTAL_TIMEOUT | |
| try: | |
| stream_url = url.split('|')[0] | |
| headers = _parseHeaders(url) | |
| r = requests.get(stream_url, headers=headers, timeout=_remaining(deadline), verify=False) | |
| content = r.text | |
| if '#EXT-X-STREAM-INF' not in content: | |
| return 'HLS Stream (Single-Bitrate)\n\nKeine Auflösungsinfo im Manifest verfügbar.' | |
| lines = content.strip().split('\n') | |
| variants = [] | |
| for line in lines: | |
| if line.startswith('#EXT-X-STREAM-INF'): | |
| res_match = re.search(r'RESOLUTION=(\d+)x(\d+)', line) | |
| bw_match = re.search(r'BANDWIDTH=(\d+)', line) | |
| codec_match = re.search(r'CODECS="([^"]+)"', line) | |
| width = int(res_match.group(1)) if res_match else 0 | |
| height = int(res_match.group(2)) if res_match else 0 | |
| bandwidth = int(bw_match.group(1)) if bw_match else 0 | |
| codecs = codec_match.group(1) if codec_match else '' | |
| variants.append((width, height, bandwidth, codecs)) | |
| if not variants: | |
| return 'HLS Stream\n\nKeine Auflösungsinfo gefunden.' | |
| variants.sort(key=lambda x: x[1], reverse=True) | |
| best = variants[0] | |
| result = 'Typ: HLS Stream\n' | |
| result += 'Auflösung: %dx%d (%s)\n' % (best[0], best[1], _resLabel(best[1])) | |
| if best[3]: | |
| vc = [c.strip() for c in best[3].split(',') if c.strip()[:3].lower() in ('avc', 'hev', 'hvc', 'av0', 'vp0', 'vp8', 'vp9', 'mp4')] | |
| ac = [c.strip() for c in best[3].split(',') if c.strip() not in vc] | |
| # mp4a is audio, not video | |
| vc_final = [c for c in vc if not c.strip().lower().startswith('mp4a')] | |
| ac_final = ac + [c for c in vc if c.strip().lower().startswith('mp4a')] | |
| if vc_final: result += 'Video-Codec: %s\n' % _codecName(','.join(vc_final)) | |
| # Parse #EXT-X-MEDIA:TYPE=AUDIO lines for language info | |
| audio_tracks = [] | |
| for line in lines: | |
| if line.startswith('#EXT-X-MEDIA') and 'TYPE=AUDIO' in line: | |
| lang_m = re.search(r'LANGUAGE="([^"]*)"', line) | |
| lang = lang_m.group(1) if lang_m else '' | |
| audio_tracks.append(lang) | |
| if audio_tracks: | |
| codec_str = _codecName(','.join(ac_final)) if ac_final else '' | |
| seen_langs = set() | |
| for lang in audio_tracks: | |
| lang_key = lang.lower() | |
| if lang_key in seen_langs: | |
| continue | |
| seen_langs.add(lang_key) | |
| lang_str = _langName(lang) | |
| if lang_str and codec_str: | |
| result += 'Audio: %s — %s\n' % (codec_str, lang_str) | |
| elif codec_str: | |
| result += 'Audio: %s\n' % codec_str | |
| elif lang_str: | |
| result += 'Audio: %s\n' % lang_str | |
| elif ac_final: | |
| result += 'Audio: %s\n' % _codecName(','.join(ac_final)) | |
| if best[2]: result += 'Bitrate: %s\n' % _fmtBitrate(best[2]) | |
| return result.rstrip() | |
| except Exception as e: | |
| log_utils.log('_probeHLS Error: %s' % str(e), log_utils.LOGERROR) | |
| return None | |
| def _probeDASH(url, dialog=None, deadline=None): | |
| import requests | |
| import xml.etree.ElementTree as ET | |
| if not deadline: | |
| deadline = time.time() + TOTAL_TIMEOUT | |
| try: | |
| stream_url = url.split('|')[0] | |
| headers = _parseHeaders(url) | |
| r = requests.get(stream_url, headers=headers, timeout=_remaining(deadline), verify=False) | |
| root = ET.fromstring(r.content) | |
| # Handle XML namespace | |
| ns = '' | |
| ns_match = re.match(r'\{(.+?)\}', root.tag) | |
| if ns_match: | |
| ns = '{%s}' % ns_match.group(1) | |
| variants = [] | |
| for rep in root.iter('%sRepresentation' % ns): | |
| width = rep.get('width') | |
| height = rep.get('height') | |
| bandwidth = rep.get('bandwidth') | |
| codecs = rep.get('codecs', '') | |
| mime = rep.get('mimeType', '') | |
| # Also check parent AdaptationSet | |
| parent = None | |
| for adapt in root.iter('%sAdaptationSet' % ns): | |
| if rep in list(adapt): | |
| parent = adapt | |
| break | |
| if not mime and parent is not None: | |
| mime = parent.get('mimeType', '') | |
| if not codecs and parent is not None: | |
| codecs = parent.get('codecs', '') | |
| if width and height: | |
| variants.append((int(width), int(height), int(bandwidth) if bandwidth else 0, codecs, mime)) | |
| if not variants: | |
| return 'DASH Stream\n\nKeine Auflösungsinfo im Manifest verfügbar.' | |
| # Deduplicate and sort by height descending | |
| seen = set() | |
| unique = [] | |
| for v in variants: | |
| key = (v[0], v[1], v[2]) | |
| if key not in seen: | |
| seen.add(key) | |
| unique.append(v) | |
| unique.sort(key=lambda x: (x[1], x[2]), reverse=True) | |
| best = unique[0] | |
| result = 'Typ: DASH Stream\n' | |
| result += 'Auflösung: %dx%d (%s)\n' % (best[0], best[1], _resLabel(best[1])) | |
| if best[3]: result += 'Video-Codec: %s\n' % _codecName(best[3]) | |
| if best[2]: result += 'Bitrate: %s\n' % _fmtBitrate(best[2]) | |
| # Audio info from audio AdaptationSets | |
| for adapt in root.iter('%sAdaptationSet' % ns): | |
| mime = adapt.get('mimeType', '') | |
| if 'audio' not in mime: | |
| continue | |
| lang = adapt.get('lang', '') | |
| lang_str = _langName(lang) | |
| for rep in adapt.iter('%sRepresentation' % ns): | |
| ac = rep.get('codecs', '') or adapt.get('codecs', '') | |
| if ac: | |
| if lang_str: | |
| result += 'Audio: %s — %s\n' % (_codecName(ac), lang_str) | |
| else: | |
| result += 'Audio: %s\n' % _codecName(ac) | |
| break | |
| return result.rstrip() | |
| except Exception as e: | |
| log_utils.log('_probeDASH Error: %s' % str(e), log_utils.LOGERROR) | |
| return None | |
| def _fetchRange(url, headers, start, end, dialog, deadline, pct, msg): | |
| """Fetch a byte range, showing progress. Returns bytes or None.""" | |
| import requests | |
| h = dict(headers) | |
| h['Range'] = 'bytes=%d-%d' % (start, end) | |
| try: | |
| r = _fetchWithDeadline(dialog, pct, msg, | |
| lambda: requests.get(url, headers=h, timeout=_remaining(deadline), verify=False, stream=True), | |
| deadline) | |
| if r is None or r.status_code >= 400: | |
| return None | |
| except: | |
| return None | |
| data = b'' | |
| want = end - start + 1 | |
| for chunk in r.iter_content(chunk_size=65536): | |
| data += chunk | |
| remaining = int(deadline - time.time()) | |
| dialog.update(pct, '%s %.0f KB' % (msg, len(data) / 1024.0)) | |
| if len(data) >= want: | |
| break | |
| if dialog.iscanceled() or remaining <= 0: | |
| break | |
| r.close() | |
| return data | |
| def _findMoov(data): | |
| """Scan top-level MP4 boxes to find moov offset and size. | |
| Returns (offset, size) or (None, None).""" | |
| import struct | |
| p = 0 | |
| while p < len(data) - 8: | |
| try: | |
| box_size = struct.unpack('>I', data[p:p+4])[0] | |
| box_type = data[p+4:p+8] | |
| except: | |
| break | |
| if box_size < 8: | |
| break | |
| if box_type == b'moov': | |
| return p, box_size | |
| if box_type == b'mdat': | |
| return None, None # moov is after mdat (at end of file) | |
| p += box_size | |
| return None, None | |
| def _probeDirect(url, dialog, deadline, file_size_str=''): | |
| import requests, struct | |
| try: | |
| stream_url = url.split('|')[0] | |
| headers = _parseHeaders(url) | |
| # Step 1: fetch first 64 KB to scan box headers | |
| INITIAL = 65536 | |
| data = _fetchRange(stream_url, headers, 0, INITIAL - 1, dialog, deadline, 65, 'Lade Datei-Header...') | |
| if data is None: | |
| return 'Typ: Direkter Stream\n\nServer nicht erreichbar' | |
| # Extract file size from response (need a quick HEAD-like check) | |
| content_type = '' | |
| total_size = 0 | |
| try: | |
| h_check = dict(headers) | |
| h_check['Range'] = 'bytes=0-0' | |
| r_check = requests.head(stream_url, headers=headers, timeout=_remaining(deadline), verify=False) | |
| content_type = r_check.headers.get('Content-Type', '') | |
| cr = r_check.headers.get('Content-Range', '') | |
| if '/' in cr: | |
| try: total_size = int(cr.split('/')[-1]) | |
| except: pass | |
| if not total_size: | |
| try: total_size = int(r_check.headers.get('Content-Length', '0')) | |
| except: pass | |
| except: | |
| pass | |
| if not total_size and file_size_str: | |
| try: total_size = int(file_size_str) | |
| except: pass | |
| # Step 2: scan top-level boxes to locate moov | |
| moov_off, moov_size = _findMoov(data) | |
| # We only need structural metadata (mvhd, tkhd, mdhd, stsd, stts), | |
| # not sample tables (stsz, stsc, stco) — cap at 256 KB | |
| MOOV_CAP = 262144 | |
| if moov_off is not None: | |
| # moov found at start — fetch more if we don't have it all | |
| fetch_size = min(moov_size, MOOV_CAP) | |
| moov_end = moov_off + fetch_size | |
| if moov_end > len(data): | |
| dialog.update(70, 'Lade Video-Info... (%d KB)' % (fetch_size // 1024)) | |
| extra = _fetchRange(stream_url, headers, len(data), moov_end - 1, | |
| dialog, deadline, 70, 'Lade Video-Info...') | |
| if extra: | |
| data = data + extra | |
| elif total_size > INITIAL: | |
| # moov not at start (mdat first) — try from end | |
| tail_size = min(total_size, MOOV_CAP) | |
| tail_start = total_size - tail_size | |
| tail = _fetchRange(stream_url, headers, tail_start, total_size - 1, | |
| dialog, deadline, 75, 'Lade Video-Info...') | |
| if tail: | |
| moov_off, moov_size = _findMoov(tail) | |
| if moov_off is not None: | |
| moov_end = moov_off + min(moov_size, MOOV_CAP) | |
| if moov_end > len(tail): | |
| abs_moov_start = tail_start + moov_off | |
| abs_moov_end = abs_moov_start + min(moov_size, MOOV_CAP) - 1 | |
| extra = _fetchRange(stream_url, headers, abs_moov_start, abs_moov_end, | |
| dialog, deadline, 80, 'Lade Video-Info...') | |
| if extra: | |
| data = extra | |
| else: | |
| data = tail | |
| else: | |
| data = tail | |
| else: | |
| data = tail | |
| dialog.update(85, 'Analysiere Video-Header...') | |
| width, height, codec, duration_sec, fps, audio_traks = _parseMp4(data) | |
| result = 'Typ: Direkter Stream\n' | |
| if width and height: | |
| label = _resLabel(height) | |
| result += 'Auflösung: %dx%d (%s)\n' % (width, height, label) | |
| else: | |
| result += 'Auflösung: nicht aus Datei-Header ermittelbar\n' | |
| if codec: | |
| result += 'Video-Codec: %s\n' % _codecName(codec) | |
| if fps: | |
| if fps == int(fps): | |
| result += 'FPS: %d\n' % int(fps) | |
| else: | |
| fps_str = '%.3f' % fps | |
| result += 'FPS: %s\n' % fps_str.rstrip('0').rstrip('.') | |
| for at in audio_traks: | |
| parts = [_codecName(at.get('audio_codec', ''))] | |
| if at.get('audio_channels'): | |
| parts.append(_channelLabel(at['audio_channels'])) | |
| if at.get('audio_samplerate'): | |
| parts.append('%.1f kHz' % (at['audio_samplerate'] / 1000.0)) | |
| lang_str = _langName(at.get('lang', '')) | |
| if lang_str: | |
| result += 'Audio: %s — %s\n' % (', '.join(parts), lang_str) | |
| else: | |
| result += 'Audio: %s\n' % ', '.join(parts) | |
| if total_size > 0 and duration_sec and duration_sec > 0: | |
| bitrate = int(total_size * 8 / duration_sec) | |
| result += 'Bitrate: %s (Durchschnitt)\n' % _fmtBitrate(bitrate) | |
| if duration_sec and duration_sec > 0: | |
| hours = int(duration_sec) // 3600 | |
| mins = (int(duration_sec) % 3600) // 60 | |
| secs = int(duration_sec) % 60 | |
| if hours > 0: | |
| result += 'Dauer: %d:%02d:%02d\n' % (hours, mins, secs) | |
| else: | |
| result += 'Dauer: %d:%02d\n' % (mins, secs) | |
| if content_type and 'octet' not in content_type: | |
| result += 'Dateityp: %s\n' % content_type | |
| if total_size > 0: | |
| size_mb = total_size / (1024.0 * 1024.0) | |
| if size_mb >= 1024: | |
| result += 'Dateigröße: %.1f GB\n' % (size_mb / 1024.0) | |
| else: | |
| result += 'Dateigröße: %.0f MB\n' % size_mb | |
| return result.rstrip() | |
| except Exception as e: | |
| log_utils.log('_probeDirect Error: %s' % str(e), log_utils.LOGERROR) | |
| return None | |
| def _parseMp4(data): | |
| """Parse MP4 box structure to extract video/audio info. | |
| Returns (width, height, codec, duration_sec, fps, audio_traks).""" | |
| import struct | |
| if len(data) < 8: | |
| return None, None, None, None, None, [] | |
| width = height = None | |
| codec = None | |
| duration_sec = None | |
| fps = None | |
| current_trak = {} | |
| video_trak = None | |
| audio_traks = [] | |
| def read_boxes(data, start, end, depth=0): | |
| nonlocal duration_sec, current_trak, video_trak, audio_traks | |
| if depth > 10: | |
| return | |
| p = start | |
| while p < end - 8: | |
| try: | |
| box_size = struct.unpack('>I', data[p:p+4])[0] | |
| box_type = data[p+4:p+8] | |
| except: | |
| break | |
| if box_size < 8: | |
| break | |
| if p + box_size > end: | |
| box_size = end - p | |
| if box_type == b'moov': | |
| read_boxes(data, p + 8, p + box_size, depth + 1) | |
| elif box_type == b'trak': | |
| current_trak = {} | |
| read_boxes(data, p + 8, p + box_size, depth + 1) | |
| if 'codec' in current_trak and not video_trak: | |
| video_trak = current_trak | |
| elif 'audio_codec' in current_trak: | |
| audio_traks.append(current_trak) | |
| elif box_type in (b'mdia', b'minf', b'stbl'): | |
| read_boxes(data, p + 8, p + box_size, depth + 1) | |
| # mvhd — movie header with duration | |
| elif box_type == b'mvhd': | |
| version = data[p + 8] if p + 9 <= end else 0 | |
| if version == 0 and p + 28 <= end: | |
| ts = struct.unpack('>I', data[p+20:p+24])[0] | |
| dur = struct.unpack('>I', data[p+24:p+28])[0] | |
| if ts > 0 and dur > 0: | |
| duration_sec = float(dur) / ts | |
| elif version == 1 and p + 40 <= end: | |
| ts = struct.unpack('>I', data[p+28:p+32])[0] | |
| dur = struct.unpack('>Q', data[p+32:p+40])[0] | |
| if ts > 0 and dur > 0: | |
| duration_sec = float(dur) / ts | |
| # tkhd — track header with display dimensions (fallback) | |
| elif box_type == b'tkhd' and box_size >= 84: | |
| version = data[p + 8] if p + 9 <= end else 0 | |
| if version == 0 and p + 92 <= end: | |
| w_raw = struct.unpack('>I', data[p+84:p+88])[0] | |
| h_raw = struct.unpack('>I', data[p+88:p+92])[0] | |
| w, h = w_raw >> 16, h_raw >> 16 | |
| if 120 <= w <= 7680 and 90 <= h <= 4320: | |
| current_trak['tkhd_w'] = w | |
| current_trak['tkhd_h'] = h | |
| elif version == 1 and p + 104 <= end: | |
| w_raw = struct.unpack('>I', data[p+96:p+100])[0] | |
| h_raw = struct.unpack('>I', data[p+100:p+104])[0] | |
| w, h = w_raw >> 16, h_raw >> 16 | |
| if 120 <= w <= 7680 and 90 <= h <= 4320: | |
| current_trak['tkhd_w'] = w | |
| current_trak['tkhd_h'] = h | |
| # mdhd — media header with track timescale (for FPS) + language | |
| elif box_type == b'mdhd': | |
| version = data[p + 8] if p + 9 <= end else 0 | |
| if version == 0 and p + 24 <= end: | |
| ts = struct.unpack('>I', data[p+20:p+24])[0] | |
| if ts > 0: | |
| current_trak['mdhd_ts'] = ts | |
| if p + 30 <= end: | |
| lang = struct.unpack('>H', data[p+28:p+30])[0] | |
| lang_str = chr(((lang >> 10) & 0x1F) + 0x60) + chr(((lang >> 5) & 0x1F) + 0x60) + chr((lang & 0x1F) + 0x60) | |
| current_trak['lang'] = lang_str | |
| elif version == 1 and p + 32 <= end: | |
| ts = struct.unpack('>I', data[p+28:p+32])[0] | |
| if ts > 0: | |
| current_trak['mdhd_ts'] = ts | |
| if p + 42 <= end: | |
| lang = struct.unpack('>H', data[p+40:p+42])[0] | |
| lang_str = chr(((lang >> 10) & 0x1F) + 0x60) + chr(((lang >> 5) & 0x1F) + 0x60) + chr((lang & 0x1F) + 0x60) | |
| current_trak['lang'] = lang_str | |
| # stsd — sample description: codec + resolution/audio info | |
| elif box_type == b'stsd' and box_size > 24: | |
| if p + 24 <= end: | |
| codec_tag = data[p+20:p+24] | |
| is_video = False | |
| if codec_tag in (b'avc1', b'avc3'): | |
| current_trak['codec'] = 'avc1' | |
| is_video = True | |
| elif codec_tag in (b'hev1', b'hvc1'): | |
| current_trak['codec'] = 'hev1' | |
| is_video = True | |
| elif codec_tag in (b'av01',): | |
| current_trak['codec'] = 'av01' | |
| is_video = True | |
| elif codec_tag in (b'vp09',): | |
| current_trak['codec'] = 'vp09' | |
| is_video = True | |
| elif codec_tag == b'mp4v': | |
| current_trak['codec'] = 'mp4v' | |
| is_video = True | |
| # Video sample entry: width(uint16) at p+48, height at p+50 | |
| if is_video and p + 52 <= end: | |
| coded_w = struct.unpack('>H', data[p+48:p+50])[0] | |
| coded_h = struct.unpack('>H', data[p+50:p+52])[0] | |
| if 16 <= coded_w <= 7680 and 16 <= coded_h <= 4320: | |
| current_trak['stsd_w'] = coded_w | |
| current_trak['stsd_h'] = coded_h | |
| # Audio sample entry: channels at p+40, samplerate at p+48 | |
| if not is_video: | |
| audio_tags = {b'mp4a': 'mp4a', b'ac-3': 'ac-3', b'ec-3': 'ec-3', | |
| b'dtsh': 'dtsh', b'dtsl': 'dtsl', b'Opus': 'opus', | |
| b'opus': 'opus', b'fLaC': 'flac'} | |
| if codec_tag in audio_tags: | |
| current_trak['audio_codec'] = audio_tags[codec_tag] | |
| if p + 52 <= end: | |
| ch = struct.unpack('>H', data[p+40:p+42])[0] | |
| sr = struct.unpack('>I', data[p+48:p+52])[0] >> 16 | |
| if 1 <= ch <= 16: | |
| current_trak['audio_channels'] = ch | |
| if 8000 <= sr <= 192000: | |
| current_trak['audio_samplerate'] = sr | |
| # stts — sample-to-time: first delta gives frame duration | |
| elif box_type == b'stts' and p + 24 <= end: | |
| entry_count = struct.unpack('>I', data[p+12:p+16])[0] | |
| if entry_count >= 1: | |
| delta = struct.unpack('>I', data[p+20:p+24])[0] | |
| if delta > 0: | |
| current_trak['stts_delta'] = delta | |
| p += box_size | |
| try: | |
| read_boxes(data, 0, len(data)) | |
| except: | |
| pass | |
| if video_trak: | |
| codec = video_trak.get('codec') | |
| if 'stsd_w' in video_trak: | |
| width = video_trak['stsd_w'] | |
| height = video_trak['stsd_h'] | |
| elif 'tkhd_w' in video_trak: | |
| width = video_trak['tkhd_w'] | |
| height = video_trak['tkhd_h'] | |
| mdhd_ts = video_trak.get('mdhd_ts') | |
| stts_delta = video_trak.get('stts_delta') | |
| if mdhd_ts and stts_delta: | |
| fps = round(float(mdhd_ts) / stts_delta, 3) | |
| return width, height, codec, duration_sec, fps, audio_traks | |
| # --- Helper functions --- | |
| def _resLabel(h): | |
| if h >= 2160: return '4K UHD' | |
| if h >= 1440: return 'QHD' | |
| if h >= 1080: return 'Full HD' | |
| if h >= 720: return 'HD' | |
| if h >= 480: return 'SD' | |
| return '%dp' % h | |
| def _channelLabel(ch): | |
| if ch == 1: return 'Mono' | |
| if ch == 2: return 'Stereo' | |
| if ch == 6: return '5.1' | |
| if ch == 8: return '7.1' | |
| return '%d Kanäle' % ch | |
| def _codecName(raw): | |
| if not raw: return '' | |
| parts = [c.strip() for c in raw.split(',')] | |
| names = [] | |
| for p in parts: | |
| pl = p.lower() | |
| if pl.startswith('avc') or pl.startswith('h264') or pl == 'h.264': | |
| names.append('H.264') | |
| elif pl.startswith('hev') or pl.startswith('hvc') or pl.startswith('h265') or pl == 'h.265' or pl == 'hevc': | |
| names.append('H.265') | |
| elif pl.startswith('av01') or pl == 'av1': | |
| names.append('AV1') | |
| elif pl.startswith('vp9') or pl.startswith('vp09'): | |
| names.append('VP9') | |
| elif pl.startswith('mp4a.6b') or pl == 'mp3': | |
| names.append('MP3') | |
| elif pl.startswith('mp4a') or pl == 'aac': | |
| names.append('AAC') | |
| elif pl.startswith('ec-3') or pl.startswith('eac') or pl == 'e-ac-3': | |
| names.append('Dolby Digital+') | |
| elif pl.startswith('ac-3') or pl.startswith('ac3'): | |
| names.append('Dolby Digital') | |
| elif pl.startswith('dts'): | |
| names.append('DTS') | |
| elif pl.startswith('opus'): | |
| names.append('Opus') | |
| elif pl.startswith('flac'): | |
| names.append('FLAC') | |
| else: | |
| names.append(p) | |
| seen = set() | |
| unique = [] | |
| for n in names: | |
| if n not in seen: | |
| seen.add(n) | |
| unique.append(n) | |
| return ' + '.join(unique) | |
| def _langName(code): | |
| if not code: return '' | |
| names = { | |
| 'de': 'Deutsch', 'deu': 'Deutsch', 'ger': 'Deutsch', | |
| 'en': 'Englisch', 'eng': 'Englisch', | |
| 'fr': 'Französisch', 'fra': 'Französisch', 'fre': 'Französisch', | |
| 'es': 'Spanisch', 'spa': 'Spanisch', | |
| 'it': 'Italienisch', 'ita': 'Italienisch', | |
| 'ja': 'Japanisch', 'jpn': 'Japanisch', | |
| 'ko': 'Koreanisch', 'kor': 'Koreanisch', | |
| 'pt': 'Portugiesisch', 'por': 'Portugiesisch', | |
| 'ru': 'Russisch', 'rus': 'Russisch', | |
| 'tr': 'Türkisch', 'tur': 'Türkisch', | |
| 'zh': 'Chinesisch', 'zho': 'Chinesisch', 'chi': 'Chinesisch', | |
| 'ar': 'Arabisch', 'ara': 'Arabisch', | |
| 'hi': 'Hindi', 'hin': 'Hindi', | |
| 'nl': 'Niederländisch', 'nld': 'Niederländisch', 'dut': 'Niederländisch', | |
| 'pl': 'Polnisch', 'pol': 'Polnisch', | |
| 'sv': 'Schwedisch', 'swe': 'Schwedisch', | |
| 'da': 'Dänisch', 'dan': 'Dänisch', | |
| 'no': 'Norwegisch', 'nor': 'Norwegisch', | |
| 'fi': 'Finnisch', 'fin': 'Finnisch', | |
| 'cs': 'Tschechisch', 'ces': 'Tschechisch', 'cze': 'Tschechisch', | |
| 'el': 'Griechisch', 'ell': 'Griechisch', 'gre': 'Griechisch', | |
| 'he': 'Hebräisch', 'heb': 'Hebräisch', | |
| 'th': 'Thailändisch', 'tha': 'Thailändisch', | |
| 'uk': 'Ukrainisch', 'ukr': 'Ukrainisch', | |
| 'und': '', | |
| } | |
| return names.get(code.lower(), code.upper()) | |
| def _fmtBitrate(bw): | |
| if not bw or bw <= 0: return '' | |
| if bw >= 1000000: | |
| return '%.1f Mbit/s' % (bw / 1000000.0) | |
| return '%d kbit/s' % (bw / 1000) | |
| def _parseHeaders(url): | |
| headers = {} | |
| if '|' in url: | |
| try: | |
| header_str = url.split('|', 1)[1] | |
| headers = dict([item.split('=', 1) for item in header_str.split('&')]) | |
| for h in headers: | |
| headers[h] = control.unquote_plus(headers[h]) | |
| except: | |
| pass | |
| return headers |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #2021-07-15 | |
| # edit 2025-08-02 switch from treads to concurrent.futures | |
| import sys | |
| import datetime, time, json | |
| from concurrent.futures import ThreadPoolExecutor | |
| from resources.lib.tmdb import cTMDB | |
| from resources.lib.indexers import navigator | |
| from resources.lib import searchDB, playcountDB, art, control, log_utils | |
| from resources.lib.control import getKodiVersion, iteritems | |
| if int(getKodiVersion()) >= 20: from infotagger.listitem import ListItemInfoTag | |
| _params = dict(control.parse_qsl(sys.argv[2].replace('?',''))) if len(sys.argv) > 1 else dict() | |
| class movies: | |
| def __init__(self): | |
| self.list = [] | |
| self.meta = [] | |
| self.total_pages = 0 | |
| self.next_pages = 0 | |
| self.query = '' | |
| self.activeSearchDB = 'TMDB' | |
| #self.setSearchDB() # TODO different search providers | |
| self.playcount = 0 | |
| self.search_direct = False | |
| self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours=5)) | |
| self.systime = (self.datetime).strftime('%Y%m%d%H%M%S%f') | |
| def get(self, params): | |
| try: | |
| self.next_pages = int(params.get('page')) | |
| self.query = params.get('query') | |
| self.list, self.total_pages = cTMDB().search_term('movie', params.get('query'), params.get('page')) | |
| if self.list == None or len(self.list) == 0: # nichts gefunden | |
| return control.infoDialog("Nichts gefunden1", time=2000) | |
| self.search_direct = True | |
| self.getDirectory(params) | |
| searchDB.save_query(params.get('query'), params.get('action')) | |
| except: | |
| return | |
| def getDirectory(self, params): | |
| try: | |
| if params.get('next_pages'): self.next_pages = params.get('next_pages') | |
| if params.get('total_pages'): self.total_pages = params.get('total_pages') | |
| if params.get('list'): self.list = params.get('list') | |
| self.worker() | |
| if self.list == None or len(self.list) == 0: #nichts gefunden | |
| return control.infoDialog("Nichts gefunden", time=2000) | |
| self.Directory(self.list) | |
| return self.list | |
| except: | |
| return | |
| def search(self): | |
| # TODO different search providers | |
| #navigator.navigator().addDirectoryItem("DB für Suche auswählen", 'movieChangeSearchDB', self.activeSearchDB + '.png', 'DefaultMovies.png', isFolder=False) | |
| navigator.navigator().addDirectoryItem("[B]Filme - neue Suche %s[/B]" % self.activeSearchDB , 'searchNew&table=movies', self.activeSearchDB + '_search.png', 'DefaultAddonsSearch.png', | |
| isFolder=False, context=('Einstellungen', 'addonSettings')) | |
| match = searchDB.getSearchTerms('movies') | |
| lst = [] | |
| delete_option = False | |
| #for i in match: | |
| for index, i in enumerate(match): | |
| term = control.py2_encode(i['query']) | |
| if term not in lst: | |
| delete_option = True | |
| navigator.navigator().addDirectoryItem(term, 'movies&page=1&query=%s' % term, '_search.png', | |
| 'DefaultAddonsSearch.png', isFolder=True, | |
| context=("Suchanfrage löschen", 'searchDelTerm&table=movies&name=%s' % index)) | |
| lst += [(term)] | |
| if delete_option: | |
| navigator.navigator().addDirectoryItem("[B]Suchverlauf löschen[/B]", 'searchClear&table=movies', 'tools.png', 'DefaultAddonProgram.png', isFolder=False) | |
| navigator.navigator()._endDirectory('', False) # addons videos files | |
| #TODO https://forum.kodi.tv/showthread.php?tid=199579 | |
| # def setSearchDB(self, new=''): | |
| # if control.getSetting('active.SearchDB.movie'): | |
| # _searchDB = control.getSetting('active.SearchDB.movie') | |
| # if new != '': | |
| # control.setSetting('active.SearchDB.movie', new) | |
| # _searchDB = new | |
| # self.activeSearchDB = _searchDB | |
| # else: | |
| # control.setSetting('active.SearchDB.movie', 'tmdb') | |
| # self.activeSearchDB = 'tmdb' | |
| # | |
| # def changeSearchDB(self): | |
| # active = control.getSetting('active.SearchDB.movie') | |
| # data = [] | |
| # for i in ['tmdb', 'trakt']: | |
| # if i == active: continue | |
| # data.append('wechseln zu ' + i.upper()) | |
| # index = control.dialog.contextmenu(data) | |
| # if index == -1: | |
| # return | |
| # term = data[index].lower().split()[-1] | |
| # self.setSearchDB(term) | |
| # url = '%s?action=movieSearch' % sys.argv[0] | |
| # control.execute('Container.Update(%s)' % url) | |
| def worker(self): | |
| try: | |
| self.meta = [] | |
| with ThreadPoolExecutor() as executor: | |
| executor.map(self.super_meta, self.list) | |
| self.meta = sorted(self.meta, key=lambda k: k['title']) | |
| #self.list = [i for i in self.meta if i['votes'] > 10 and i['rating'] > 4] | |
| self.list = [] | |
| for i in self.meta: | |
| if self.search_direct: | |
| self.list.append(i) | |
| else: | |
| if 'votes' in i and i['votes'] > 10 and 'rating' in i and i['rating'] > 4: self.list.append(i) | |
| if not 'votes' in i: self.list.append(i) | |
| except: | |
| log_utils.error() | |
| def super_meta(self, id): | |
| try: | |
| # TODO different search providers | |
| meta = cTMDB().get_meta('movie', '', '', id, advanced='true') | |
| try: | |
| playcount = playcountDB.getPlaycount('movie', 'imdb_id', meta['imdb_id']) # mediatype, column_names, column_value, season=0, episode=0 | |
| playcount = playcount if playcount else 0 | |
| meta.update({'playcount': playcount}) | |
| except: | |
| pass | |
| if not 'poster' in meta or meta['poster'] == '': | |
| poster = art.getMovie_art(meta['tmdb_id'], meta['imdbnumber']) | |
| meta.update({'poster': poster}) | |
| #meta.update({'mediatype': 'movie'}) | |
| self.meta.append(meta) | |
| return meta | |
| except: | |
| pass | |
| def Directory(self, items): | |
| if items == None or len(items) == 0: | |
| control.idle() | |
| sys.exit() | |
| sysaddon = sys.argv[0] | |
| syshandle = int(sys.argv[1]) | |
| addonPoster, addonBanner = control.addonPoster(), control.addonBanner() | |
| addonFanart, settingFanart = control.addonFanart(), control.getSetting('fanart') | |
| watchedMenu = "In %s [I]Gesehen[/I]" % control.addonName | |
| unwatchedMenu = "In %s [I]Ungesehen[/I]" % control.addonName | |
| hasYouTube = False | |
| if control.condVisibility('System.HasAddon(plugin.video.youtube)'): | |
| try: | |
| import xbmcvfs, json as _json | |
| _f = xbmcvfs.File('special://profile/addon_data/plugin.video.youtube/api_keys.json') | |
| _data = _json.loads(_f.read()) | |
| _f.close() | |
| hasYouTube = bool(_data.get('keys', {}).get('user', {}).get('api_key')) | |
| except Exception: | |
| pass | |
| for i in items: | |
| try: | |
| title = i['title'] if 'title' in i else i['originaltitle'] | |
| if not title.isascii(): continue | |
| try: | |
| label = '%s (%s)' % (title, i['year']) # show in list | |
| except: | |
| label = title | |
| sysname = label | |
| if 'premiered' in i: | |
| if datetime.datetime(*(time.strptime(i['premiered'], "%Y-%m-%d")[0:6])) > datetime.datetime.now(): | |
| label = '[COLOR=red][I]{}[/I][/COLOR]'.format(label) # ffcc0000 | |
| else: | |
| label = '[COLOR=red][I]{}[/I][/COLOR]'.format(label) | |
| meta = dict((k, v) for k, v in iteritems(i)) | |
| if not 'duration' in i or i['duration'] == 0: meta.update({'duration': str(120 * 60)}) | |
| poster = i['poster'] if 'poster' in i and 'http' in i['poster'] else addonPoster | |
| fanart = i['fanart'] if 'fanart' in i and 'http' in i['fanart'] else addonFanart | |
| meta.update({'poster': poster}) | |
| meta.update({'fanart': fanart}) | |
| meta.update({'systitle': title}) | |
| meta.update({'sysname': sysname}) | |
| _sysmeta = control.quote_plus(json.dumps(meta)) | |
| item = control.item(label=label, offscreen=True) | |
| item.setArt({'poster': poster, 'banner': addonBanner}) | |
| if settingFanart == 'true': item.setProperty('Fanart_Image', fanart) | |
| cm = [] | |
| try: | |
| playcount = i['playcount'] if 'playcount' in i else 0 | |
| if playcount == 1: | |
| cm.append((unwatchedMenu, 'RunPlugin(%s?action=UpdatePlayCount&meta=%s&playCount=0)' % (sysaddon, _sysmeta))) | |
| meta.update({'playcount': 1, 'overlay': 7}) | |
| else: | |
| cm.append((watchedMenu, 'RunPlugin(%s?action=UpdatePlayCount&meta=%s&playCount=1)' % (sysaddon, _sysmeta))) | |
| meta.update({'playcount': 0, 'overlay': 6}) | |
| except: | |
| pass | |
| if hasYouTube: | |
| cm.append(('Trailer ansehen', 'RunPlugin(%s?action=playTrailer&tmdb_id=%s&mediatype=movie)' % (sysaddon, meta['tmdb_id']))) | |
| cm.append(('Einstellungen', 'RunPlugin(%s?action=addonSettings)' % sysaddon)) | |
| item.addContextMenuItems(cm) | |
| if 'plot' in i: | |
| plot = i['plot'] | |
| else: | |
| plot = '' | |
| votes = '' | |
| if 'rating' in i and i['rating'] != '': | |
| if 'votes' in i: votes = '(%s)' % str(i['votes']).replace(',', '') | |
| plot = '[COLOR blue]Bewertung : %.1f %s[/COLOR]%s%s' % (float(i['rating']), votes, "\n\n", plot) | |
| meta.update({'plot': plot}) | |
| aActors = [] | |
| if 'cast' in i and i['cast']: aActors = i['cast'] | |
| ## supported infolabels: https://codedocs.xyz/AlwinEsch/kodi/group__python__xbmcgui__listitem.html#ga0b71166869bda87ad744942888fb5f14 | |
| # remove unsupported infolabels | |
| meta.pop('cast', None) # ersetzt durch item.setCast(i['cast']) | |
| meta.pop('fanart', None) | |
| meta.pop('tmdb_id', None) | |
| meta.pop('originallanguage', None) | |
| meta.pop('budget', None) | |
| meta.pop('revenue', None) | |
| meta.pop('sysname', None) | |
| meta.pop('systitle', None) | |
| sysmeta = control.quote_plus(json.dumps(meta)) | |
| url = '%s?action=play&sysmeta=%s' % (sysaddon, sysmeta) | |
| meta.pop('poster', None) | |
| meta.pop('imdb_id', None) | |
| meta.pop('aliases', None) | |
| meta.pop('backdrop_url', None) | |
| meta.pop('cover_url', None) | |
| # TODO | |
| # gefakte Video/Audio Infos | |
| # video_streaminfo = {'codec': 'h264', "width": 1920, "height": 1080} | |
| # audio_streaminfo = {'codec': 'dts', 'channels': 6, 'language': 'de'} | |
| video_streaminfo = {} | |
| audio_streaminfo = {} | |
| if int(getKodiVersion()) <= 19: | |
| if aActors: item.setCast(aActors) | |
| item.setInfo(type='Video', infoLabels=meta) | |
| item.addStreamInfo('video', video_streaminfo) | |
| item.addStreamInfo('audio', audio_streaminfo) | |
| else: | |
| info_tag = ListItemInfoTag(item, 'video') | |
| info_tag.set_info(meta) | |
| """ | |
| stream_details = { | |
| 'video': [{videostream_1_values}, {videostream_2_values} ...], | |
| 'audio': [{audiostream_1_values}, {audiostream_2_values} ...], | |
| 'subtitle': [{subtitlestream_1_values}, {subtitlestream_2_values} ...]} | |
| """ | |
| stream_details = { | |
| 'video': [video_streaminfo], | |
| 'audio': [audio_streaminfo]} | |
| info_tag.set_stream_details(stream_details) | |
| info_tag.set_cast(aActors) | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=False) | |
| except Exception as e: | |
| print(e) | |
| pass | |
| # nächste Seite | |
| try: | |
| self.next_pages = self.next_pages + 1 | |
| if self.next_pages <= self.total_pages: | |
| if self.query: | |
| url = '%s?action=movies&url=&page=%s&query=%s' % (sys.argv[0], self.next_pages, self.query ) | |
| else: | |
| url = '%s?action=listings' % sys.argv[0] | |
| url += '&media_type=%s' % _params.get('media_type') | |
| url += '&next_pages=%s' % self.next_pages | |
| url += '&url=%s' % control.quote_plus(_params.get('url')) | |
| item = control.item(label="Nächste Seite") | |
| icon = control.addonNext() | |
| item.setArt({'icon': icon, 'thumb': icon, 'poster': icon, 'banner': icon}) | |
| if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart) | |
| # -> gesehen/ungesehen im cm und "Keine Informationen verfügbar" ausblenden (abhängig von control.content() ) | |
| video_streaminfo = {'overlay': 4, 'plot': 'Â '} # alt255 | |
| if int(getKodiVersion()) <= 19: | |
| item.setInfo('video', video_streaminfo) | |
| else: | |
| stream_details = {'video': [video_streaminfo]} | |
| info_tag = ListItemInfoTag(item, 'video') | |
| info_tag.set_stream_details(stream_details) | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True) | |
| except: | |
| pass | |
| control.content(syshandle, 'movies') | |
| control.plugincategory(syshandle, control.addonVersion) | |
| control.endofdirectory(syshandle, cacheToDisc=True) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #2021-06-09 | |
| # edit 2025-06-12 | |
| import sys | |
| import json | |
| from resources.lib.tmdb import cTMDB | |
| from resources.lib.indexers import navigator | |
| from resources.lib import searchDB, control, utils, playcountDB | |
| from resources.lib.control import iteritems | |
| _params = dict(control.parse_qsl(sys.argv[2].replace('?', ''))) if len(sys.argv) > 1 else dict() | |
| class person: | |
| def __init__(self): | |
| self.list = [] | |
| self.total_pages = 0 | |
| self.next_pages = 0 | |
| self.query = '' | |
| self.activeSearchDB = '' | |
| #self.setSearchDB() # TODO different search providers | |
| self.playcount = 0 | |
| def get(self, params): | |
| try: | |
| self.next_pages = int(params.get('page')) + 1 | |
| self.query = params.get('query') | |
| # Suche nach 'willis' | |
| # https://api.themoviedb.org/3/search/person?language=de&api_key=be7e192d9ff45609c57344a5c561be1d&query=willis&page=1 | |
| self.list, self.total_pages = cTMDB().search_term('person', params.get('query'), params.get('page')) | |
| if self.list == None or len(self.list) == 0: # nichts gefunden | |
| return control.infoDialog("Nichts gefunden", time=2000) | |
| #self.list = sorted(self.list, key=lambda k: k['popularity']) | |
| self.personDirectory(self.list) | |
| searchDB.save_query(params.get('query'), params.get('action')) | |
| return self.list | |
| except: | |
| pass | |
| def search(self): | |
| navigator.navigator().addDirectoryItem("[B]Darsteller - neue Suche[/B]", 'searchNew&table=person', self.activeSearchDB + '_people-search.png', 'DefaultAddonsSearch.png', | |
| isFolder=False, context=('Einstellungen', 'addonSettings')) | |
| match = searchDB.getSearchTerms('person') | |
| lst = [] | |
| delete_option = False | |
| for index, i in enumerate(match): | |
| term = control.py2_encode(i['query']) | |
| if term not in lst: | |
| delete_option = True | |
| navigator.navigator().addDirectoryItem(term, 'person&page=1&query=%s' % control.quote_plus(term), self.activeSearchDB + '_people-search.png', | |
| 'DefaultAddonsSearch.png', isFolder=True, | |
| context=("Suchanfrage löschen", 'searchDelTerm&table=person&name=%s' % index)) | |
| lst += [(term)] | |
| if delete_option: | |
| navigator.navigator().addDirectoryItem("[B]Suchverlauf löschen[/B]", 'searchClear&table=person', 'tools.png', 'DefaultAddonProgram.png', isFolder=False) | |
| navigator.navigator()._endDirectory('', False) # addons videos files | |
| def personDirectory(self, items): | |
| if items == None or len(items) == 0: | |
| control.idle() | |
| sys.exit() | |
| sysaddon = sys.argv[0] | |
| syshandle = int(sys.argv[1]) | |
| addonBanner = control.addonBanner() | |
| addonFanart, settingFanart = control.addonFanart(), control.getSetting('fanart') | |
| addonNoPicture = control.addonNoPicture() | |
| for i in items: | |
| try: | |
| label = i['name'] # show in list | |
| meta = dict((k, v) for k, v in iteritems(i)) | |
| poster = i['poster'] if 'poster' in i and i['poster'] != None else addonNoPicture | |
| fanart = i['fanart'] if 'fanart' in i and 'http' in i['fanart'] else addonFanart | |
| meta.update({'poster': poster}) | |
| meta.update({'fanart': fanart}) | |
| sysmeta = control.quote_plus(json.dumps(meta)) | |
| url = '%s?action=personCredits&sysmeta=%s&number=0' % (sysaddon, sysmeta) #TODO | |
| item = control.item(label=label, offscreen=True) | |
| if 'plot' in i: | |
| plot = i['plot'] | |
| else: | |
| plot = label | |
| meta.update({'plot': plot}) | |
| item.setArt({'poster': poster, 'banner': addonBanner}) | |
| if settingFanart == 'true': item.setProperty('Fanart_Image', fanart) | |
| ## supported infolabels: https://codedocs.xyz/AlwinEsch/kodi/group__python__xbmcgui__listitem.html#ga0b71166869bda87ad744942888fb5f14 | |
| # remove unsupported infolabels | |
| meta.pop('fanart', None) | |
| meta.pop('poster', None) | |
| meta.pop('id', None) | |
| meta.pop('name', None) | |
| meta.pop('popularity', None) | |
| item.setInfo(type='Video', infoLabels=meta) | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True) | |
| except: | |
| pass | |
| # nächste Seite | |
| try: | |
| if self.next_pages <= self.total_pages: | |
| url = '%s?action=person&url=&page=%s&query=%s' % (sys.argv[0], self.next_pages, self.query) | |
| item = control.item(label="Nächste Seite") | |
| icon = control.addonNext() | |
| item.setArt({'icon': icon, 'thumb': icon, 'poster': icon, 'banner': icon}) | |
| if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart) | |
| # -> gesehen/ungesehen im cm und "Keine Informationen verfügbar" ausblenden (abhängig von control.content() ) | |
| item.setInfo('video', {'overlay': 4, 'plot': ' '}) # alt255 | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True) | |
| except: | |
| pass | |
| control.content(syshandle, 'videos') | |
| control.plugincategory(syshandle, control.addonVersion) | |
| control.endofdirectory(syshandle, cacheToDisc=True) | |
| def getCredits(self, params): | |
| try: | |
| if 'items' in params: | |
| list = json.loads(params['items']) | |
| self.creditsDirectory(list, int(params['number'])) | |
| else: | |
| meta = json.loads(params.get('sysmeta')) | |
| # Suche nach Filme mit "Bruce Willis" -> 62 | |
| # https://api.themoviedb.org/3/person/62/movie_credits?api_key=86dd18b04874d9c94afadde7993d94e3&language=de | |
| self.list = cTMDB().search_credits('movie_credits', meta['id']) # "combined_credits", "tv_credits", "movie_credits" | |
| if self.list == None or len(self.list) == 0: # nichts gefunden | |
| control.infoDialog("Nichts gefunden", time=8000) | |
| #self.list = sorted(self.list, key=lambda k: k['vote_average'], reverse=True) | |
| self.list = utils.multikeysort(self.list, ['-vote_average', '-popularity']) | |
| self.creditsDirectory(self.list) | |
| return self.list | |
| except: | |
| pass | |
| def creditsDirectory(self, items, number=0): | |
| if items == None or len(items) == 0: | |
| control.idle() | |
| sys.exit() | |
| sysaddon = sys.argv[0] | |
| syshandle = int(sys.argv[1]) | |
| addonPoster, addonBanner = control.addonPoster(), control.addonBanner() | |
| addonFanart, settingFanart = control.addonFanart(), control.getSetting('fanart') | |
| hasYouTube = control.hasYouTube() | |
| trailerLabel = control.trailerLabel() | |
| for i in range(number, number + 20): | |
| if i >= len(items) - 1: break | |
| try: | |
| #label = i['name'] # show in list | |
| meta = cTMDB()._formatSuper(items[i], '') | |
| if meta['genre'] == '': continue | |
| poster = meta['poster'] if 'poster' in meta and meta['poster'] != None else addonPoster | |
| fanart = meta['fanart'] if 'fanart' in meta and 'http' in meta['fanart'] else addonFanart | |
| meta.update({'poster': poster}) | |
| meta.update({'fanart': fanart}) | |
| sysmeta = control.quote_plus(json.dumps(meta)) | |
| url = '%s?action=playfromPerson&sysmeta=%s' % (sysaddon, sysmeta) #playPerson | |
| year = str(meta['year']) if 'year' in meta else '1900' | |
| label = meta['title'] + ' (' + year + ')' #+ meta['mediatype'] | |
| try: | |
| playcount = playcountDB.getPlaycount('movie', 'name', label) # mediatype, column_names, column_value, season=0, episode=0 | |
| meta.update({'playcount': playcount}) | |
| except: | |
| pass | |
| item = control.item(label=label, offscreen=True) | |
| if 'plot' in meta: | |
| plot = meta['plot'] | |
| else: | |
| plot = label | |
| meta.update({'plot': plot}) | |
| item.setArt({'poster': poster, 'banner': addonBanner}) | |
| if settingFanart == 'true': item.setProperty('Fanart_Image', fanart) | |
| ## supported infolabels: https://codedocs.xyz/AlwinEsch/kodi/group__python__xbmcgui__listitem.html#ga0b71166869bda87ad744942888fb5f14 | |
| # remove unsupported infolabels | |
| movie_title = meta.get('title', '') | |
| tmdb_id = meta.get('tmdb_id', '') | |
| meta.pop('fanart', None) | |
| meta.pop('poster', None) | |
| meta.pop('id', None) | |
| meta.pop('name', None) | |
| meta.pop('popularity', None) | |
| meta.pop('tmdb_id', None) | |
| meta.pop('genre_ids', None) | |
| meta.pop('originallanguage', None) | |
| meta.pop('cover_url', None) | |
| meta.pop('backdrop_url', None) | |
| item.setInfo(type='Video', infoLabels=meta) | |
| cm = [] | |
| if hasYouTube and tmdb_id: | |
| cm.append((trailerLabel, 'RunPlugin(%s?action=playTrailer&tmdb_id=%s&mediatype=movie&title=%s&year=%s&poster=%s)' % ( | |
| sysaddon, tmdb_id, | |
| control.quote_plus(str(movie_title)), | |
| year, | |
| control.quote_plus(str(poster)), | |
| ))) | |
| if cm: | |
| item.addContextMenuItems(cm) | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=False) | |
| except Exception as e: | |
| print(e) | |
| pass | |
| # nächste Seite | |
| try: | |
| if i < len(items)-1: | |
| number = number + 20 | |
| url = '%s?action=personCredits&items=%s&number=%s' % (sys.argv[0], control.quote_plus(json.dumps(items)), number) | |
| item = control.item(label="Nächste Seite") | |
| icon = control.addonNext() | |
| item.setArt({'icon': icon, 'thumb': icon, 'poster': icon, 'banner': icon}) | |
| if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart) | |
| # -> gesehen/ungesehen im cm und "Keine Informationen verfügbar" ausblenden (abhängig von control.content() ) | |
| item.setInfo('video', {'overlay': 4, 'plot': ' '}) # alt255 | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True) | |
| except: | |
| pass | |
| control.content(syshandle, 'movies') | |
| control.plugincategory(syshandle, control.addonVersion) | |
| control.endofdirectory(syshandle, cacheToDisc=True) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # -*- coding: utf-8 -*- | |
| # Python 3 | |
| # xShip source provider for SerienStream (s.to) | |
| # Rewritten from xStream browser plugin to xShip source class (Issue #50) | |
| from resources.lib.control import getSetting, urlparse | |
| from resources.lib.requestHandler import cRequestHandler | |
| from resources.lib.utils import isBlockedHoster, getHostDict | |
| from scrapers.modules import cleantitle | |
| from scrapers.modules.tools import cParser | |
| from resources.lib import log_utils | |
| SITE_IDENTIFIER = 'serienstream' | |
| SITE_DOMAIN = 's.to' | |
| SITE_NAME = 'SerienStream' | |
| class source: | |
| def __init__(self): | |
| self.priority = 1 | |
| self.language = ['de'] | |
| self.domain = getSetting('provider.' + SITE_IDENTIFIER + '.domain', SITE_DOMAIN) | |
| if self.domain == '186.2.175.5': | |
| self.base_link = 'http://' + self.domain | |
| else: | |
| self.base_link = 'https://' + self.domain | |
| self._session = None | |
| def run(self, titles, year, season=0, episode=0, imdb='', hostDict=None): | |
| sources = [] | |
| try: | |
| # s.to is TV only — skip movies | |
| if not season or not episode: | |
| return sources | |
| # Check credentials | |
| username = getSetting('serienstream.user') | |
| password = getSetting('serienstream.pass') | |
| if not username or not password: | |
| log_utils.log('SerienStream: No credentials configured', log_utils.LOGWARNING) | |
| return sources | |
| # Build clean title set for matching | |
| t = set(cleantitle.get(i) for i in titles if i) | |
| # Step 1: Fetch series list and find matching show | |
| oRequest = cRequestHandler(self.base_link + '/serien') | |
| oRequest.cacheTime = 60 * 60 * 24 | |
| sHtmlContent = oRequest.request() | |
| if not sHtmlContent: | |
| return sources | |
| pattern = r'<li[^>]*class="series-item"[^>]*>\s*<a[^>]*href="(/serie/[^"]*)"[^>]*>([^<]+)</a>' | |
| isMatch, aResult = cParser.parse(sHtmlContent, pattern) | |
| if not isMatch: | |
| log_utils.log('SerienStream: Could not parse series list', log_utils.LOGWARNING) | |
| return sources | |
| show_url = None | |
| for sUrl, sName in aResult: | |
| if cleantitle.get(sName.strip()) in t: | |
| show_url = sUrl | |
| break | |
| if not show_url: | |
| log_utils.log('SerienStream: No title match found', log_utils.LOGDEBUG) | |
| return sources | |
| # Step 2: Fetch show page, find matching season | |
| oRequest = cRequestHandler(self.base_link + show_url) | |
| oRequest.cacheTime = 60 * 60 * 24 | |
| sHtmlContent = oRequest.request() | |
| if not sHtmlContent: | |
| return sources | |
| pattern = r'<nav[^>]*id="season-nav"[^>]*>(.*?)</nav>' | |
| isMatch, aResult_nav = cParser.parse(sHtmlContent, pattern) | |
| if not isMatch or not aResult_nav: | |
| return sources | |
| pattern = r'<a[^>]*href="(/serie/[^"]*)"[^>]*data-season-pill="(\d+)"' | |
| isMatch, aResult = cParser.parse(aResult_nav[0], pattern) | |
| if not isMatch: | |
| return sources | |
| season_url = None | |
| for sUrl, sNr in aResult: | |
| if int(sNr) == int(season): | |
| season_url = sUrl | |
| break | |
| if not season_url: | |
| log_utils.log('SerienStream: Season %s not found' % season, log_utils.LOGDEBUG) | |
| return sources | |
| # Step 3: Fetch season page, find matching episode | |
| oRequest = cRequestHandler(self.base_link + season_url) | |
| oRequest.cacheTime = 60 * 60 * 4 | |
| sHtmlContent = oRequest.request() | |
| if not sHtmlContent: | |
| return sources | |
| pattern = r'<table[^>]*class="[^"]*episode-table[^"]*"[^>]*>(.*?)</table>' | |
| isMatch, aResult_table = cParser.parse(sHtmlContent, pattern) | |
| if not isMatch or not aResult_table: | |
| return sources | |
| pattern = r"onclick=\"window\.location='([^']+)'[^>]*>.*?episode-number-cell[^>]*>\s*(\d+)" | |
| isMatch, aResult = cParser.parse(aResult_table[0], pattern) | |
| if not isMatch: | |
| return sources | |
| episode_url = None | |
| for sUrl, sEpNr in aResult: | |
| if int(sEpNr) == int(episode): | |
| episode_url = sUrl | |
| break | |
| if not episode_url: | |
| log_utils.log('SerienStream: Episode %s not found' % episode, log_utils.LOGDEBUG) | |
| return sources | |
| # Step 4: Fetch episode page, parse hoster buttons | |
| ep_full_url = episode_url if episode_url.startswith('http') else self.base_link + episode_url | |
| oRequest = cRequestHandler(ep_full_url) | |
| oRequest.cacheTime = 60 * 60 # 1 hour | |
| sHtmlContent = oRequest.request() | |
| if not sHtmlContent: | |
| return sources | |
| pattern = r'data-play-url="([^"]+)"[^>]*data-auto-embed="[^"]*"[^>]*data-provider-name="([^"]+)"[^>]*data-language-label="[^"]*"[^>]*data-language-id="([^"]+)"' | |
| isMatch, aResult = cParser.parse(sHtmlContent, pattern) | |
| if not isMatch: | |
| return sources | |
| hostblockDict = getHostDict() | |
| for play_url, provider_name, lang_id in aResult: | |
| # German only | |
| if lang_id != '1': | |
| continue | |
| # Check against blocked hoster list | |
| if any(h and h.lower() in provider_name.lower() for h in hostblockDict): | |
| continue | |
| compound_url = play_url + '|||' + ep_full_url | |
| sources.append({ | |
| 'source': provider_name, | |
| 'quality': '720p', | |
| 'url': compound_url, | |
| 'direct': True, | |
| 'language': 'de' | |
| }) | |
| return sources | |
| except Exception as e: | |
| log_utils.log('SerienStream run error: %s' % e, log_utils.LOGERROR) | |
| return sources | |
| def _get_session(self, referer): | |
| """Return a logged-in requests session, reusing across resolve() calls.""" | |
| if self._session is not None: | |
| self._session.headers.update({'Referer': referer}) | |
| return self._session | |
| import requests as req | |
| req.packages.urllib3.disable_warnings() | |
| username = getSetting('serienstream.user') | |
| password = getSetting('serienstream.pass') | |
| session = req.Session() | |
| session.headers.update({ | |
| 'User-Agent': cRequestHandler.RandomUA(), | |
| 'Referer': referer, | |
| 'Upgrade-Insecure-Requests': '1' | |
| }) | |
| session.verify = False | |
| login_url = self.base_link + '/login' | |
| session.post(login_url, data={'email': username, 'password': password}, timeout=5) | |
| log_utils.log('SerienStream: login done, cookies=%d' % len(session.cookies), log_utils.LOGWARNING) | |
| self._session = session | |
| return session | |
| def resolve(self, url): | |
| try: | |
| # Parse compound URL (play_url|||referer) | |
| parts = url.split('|||') | |
| play_url = parts[0] | |
| referer = parts[1] if len(parts) > 1 else self.base_link | |
| log_utils.log('SerienStream resolve: play_url=%s referer=%s' % (play_url, referer), log_utils.LOGWARNING) | |
| full_play_url = play_url if play_url.startswith('http') else self.base_link + play_url | |
| # Follow play URL; retry once with fresh login if session expired | |
| for attempt in range(2): | |
| session = self._get_session(referer) | |
| r = session.get(full_play_url, timeout=5) | |
| sUrl = r.url | |
| # Session expired — got redirected back to s.to (login page etc.) | |
| if self.domain in urlparse(sUrl).hostname: | |
| log_utils.log('SerienStream resolve: session expired, re-login (attempt %d)' % (attempt + 1), log_utils.LOGWARNING) | |
| self._session = None | |
| continue | |
| break | |
| else: | |
| log_utils.log('SerienStream resolve: login failed after retry', log_utils.LOGWARNING) | |
| return None | |
| log_utils.log('SerienStream resolve: final URL=%s' % sUrl, log_utils.LOGWARNING) | |
| # Resolve hoster URL via resolveurl to get direct stream URL | |
| # (done here so sourcesResolve with direct=True skips its own resolve call, | |
| # which hangs in the background thread without respecting the deadline) | |
| hostname = urlparse(sUrl).hostname | |
| # VOE pseudo-domain normalization: unknown VOE domains → voe.sx | |
| if hostname and 'voe' in hostname.lower(): | |
| isBlocked, sDomain, sCleanUrl, prioHoster = isBlockedHoster(sUrl, isResolve=False) | |
| if isBlocked: | |
| sUrl = sUrl.replace(hostname, 'voe.sx') | |
| log_utils.log('SerienStream resolve: VOE normalized to %s' % sUrl, log_utils.LOGWARNING) | |
| try: | |
| import resolveurl | |
| hmf = resolveurl.HostedMediaFile(url=sUrl, include_disabled=True, include_universal=False) | |
| if hmf.valid_url(): | |
| resolved = hmf.resolve() | |
| if resolved: | |
| log_utils.log('SerienStream resolve: resolveurl -> %s' % resolved, log_utils.LOGWARNING) | |
| return resolved | |
| except Exception as e: | |
| log_utils.log('SerienStream resolve: resolveurl failed: %s' % e, log_utils.LOGWARNING) | |
| return sUrl | |
| except Exception as e: | |
| log_utils.log('SerienStream resolve error: %s' % e, log_utils.LOGERROR) | |
| return None |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # edit 2025-06-12 | |
| import sys | |
| import re, json, random, time | |
| from concurrent.futures import ThreadPoolExecutor | |
| from resources.lib import log_utils, utils, control | |
| from resources.lib.control import py2_decode, py2_encode, quote_plus, parse_qsl | |
| import resolveurl as resolver | |
| # from functools import reduce | |
| from resources.lib.control import getKodiVersion | |
| if int(getKodiVersion()) >= 20: from infotagger.listitem import ListItemInfoTag | |
| # für self.sysmeta - zur späteren verwendung als meta | |
| _params = dict(parse_qsl(sys.argv[2].replace('?',''))) if len(sys.argv) > 1 else dict() | |
| class sources: | |
| def __init__(self): | |
| self.getConstants() | |
| self.sources = [] | |
| self.current = int(time.time()) | |
| if 'sysmeta' in _params: self.sysmeta = _params['sysmeta'] # string zur späteren verwendung als meta | |
| self.watcher = False | |
| self.executor = ThreadPoolExecutor(max_workers=20) | |
| self.url = None | |
| def get(self, params): | |
| data = json.loads(params['sysmeta']) | |
| self.mediatype = data.get('mediatype') | |
| self.aliases = data.get('aliases') if 'aliases' in data else [] | |
| title = py2_encode(data.get('title')) | |
| originaltitle = py2_encode(data.get('originaltitle')) if 'originaltitle' in data else title | |
| year = data.get('year') if 'year' in data else None | |
| imdb = data.get('imdb_id') if 'imdb_id' in data else data.get('imdbnumber') if 'imdbnumber' in data else None | |
| if not imdb and 'imdb' in data: imdb = data.get('imdb') | |
| tmdb = data.get('tmdb_id') if 'tmdb_id' in data else None | |
| #if tmdb and not imdb: print 'hallo' #TODO | |
| season = data.get('season') if 'season' in data else 0 | |
| episode = data.get('episode') if 'episode' in data else 0 | |
| premiered = data.get('premiered') if 'premiered' in data else None | |
| meta = params['sysmeta'] | |
| select = data.get('select') if 'select' in data else None | |
| return title, year, imdb, season, episode, originaltitle, premiered, meta, select | |
| def play(self, params): | |
| title, year, imdb, season, episode, originaltitle, premiered, meta, select = self.get(params) | |
| try: | |
| url = None | |
| #Liste der gefundenen Streams | |
| items = self.getSources(title, year, imdb, season, episode, originaltitle, premiered) | |
| select = control.getSetting('hosts.mode') if select == None else select | |
| ## unnötig | |
| #select = '1' if control.getSetting('downloads') == 'true' and not (control.getSetting('download.movie.path') == '' or control.getSetting('download.tv.path') == '') else select | |
| # # TODO überprüfen wofür mal gedacht | |
| # if control.window.getProperty('PseudoTVRunning') == 'True': | |
| # return control.resolveUrl(int(sys.argv[1]), True, control.item(path=str(self.sourcesDirect(items)))) | |
| if len(items) > 0: | |
| # Auswahl Verzeichnis | |
| if select == '1' and 'plugin' in control.infoLabel('Container.PluginName'): | |
| control.window.clearProperty(self.itemsProperty) | |
| control.window.setProperty(self.itemsProperty, json.dumps(items)) | |
| control.window.clearProperty(self.metaProperty) | |
| control.window.setProperty(self.metaProperty, meta) | |
| control.sleep(2) | |
| return control.execute('Container.Update(%s?action=addItem&title=%s)' % (sys.argv[0], quote_plus(title))) | |
| # Auswahl Dialog | |
| elif select == '0' or select == '1': | |
| url = self.sourcesDialog(items) | |
| if url == 'close://': return | |
| # Autoplay | |
| else: | |
| url = self.sourcesDirect(items) | |
| if url == None: return self.errorForSources() | |
| try: meta = json.loads(meta) | |
| except: pass | |
| from resources.lib.player import player | |
| player().run(title, url, meta) | |
| except Exception as e: | |
| log_utils.log('Error %s' % str(e), log_utils.LOGERROR) | |
| # Liste gefundene Streams Indexseite|Hoster | |
| def addItem(self, title): | |
| control.playlist.clear() | |
| items = control.window.getProperty(self.itemsProperty) | |
| items = json.loads(items) | |
| if items == None or len(items) == 0: control.idle() ; sys.exit() | |
| sysaddon = sys.argv[0] | |
| syshandle = int(sys.argv[1]) | |
| systitle = sysname = quote_plus(title) | |
| meta = control.window.getProperty(self.metaProperty) | |
| meta = json.loads(meta) | |
| #TODO | |
| if meta['mediatype'] == 'movie': | |
| # downloads = True if control.getSetting('downloads') == 'true' and control.exists(control.translatePath(control.getSetting('download.movie.path'))) else False | |
| downloads = True if control.getSetting('downloads') == 'true' and control.getSetting('download.movie.path') else False | |
| else: | |
| # downloads = True if control.getSetting('downloads') == 'true' and control.exists(control.translatePath(control.getSetting('download.tv.path'))) else False | |
| downloads = True if control.getSetting('downloads') == 'true' and control.getSetting('download.tv.path') else False | |
| addonPoster, addonBanner = control.addonPoster(), control.addonBanner() | |
| addonFanart, settingFanart = control.addonFanart(), control.getSetting('fanart') | |
| if 'backdrop_url' in meta and 'http' in meta['backdrop_url']: fanart = meta['backdrop_url'] | |
| elif 'fanart' in meta and 'http' in meta['fanart']: fanart = meta['fanart'] | |
| else: fanart = addonFanart | |
| if 'cover_url' in meta and 'http' in meta['cover_url']: poster = meta['cover_url'] | |
| elif 'poster' in meta and 'http' in meta['poster']: poster = meta['poster'] | |
| else: poster = addonPoster | |
| sysimage = poster | |
| if 'season' in meta and 'episode' in meta: | |
| sysname += quote_plus(' S%02dE%02d' % (int(meta['season']), int(meta['episode']))) | |
| elif 'year' in meta: | |
| sysname += quote_plus(' (%s)' % meta['year']) | |
| for i in range(len(items)): | |
| try: | |
| label = items[i]['label'] | |
| syssource = quote_plus(json.dumps([items[i]])) | |
| item = control.item(label=label, offscreen=True) | |
| item.setProperty('IsPlayable', 'true') | |
| item.setArt({'poster': poster, 'banner': addonBanner}) | |
| if settingFanart == 'true': item.setProperty('Fanart_Image', fanart) | |
| cm = [] | |
| if downloads: | |
| cm.append(("Download", 'RunPlugin(%s?action=download&name=%s&image=%s&source=%s)' % (sysaddon, sysname, sysimage, syssource))) | |
| if control.getSetting('jd_enabled') == 'true': | |
| cm.append(("Sende zum JDownloader", 'RunPlugin(%s?action=sendToJD&name=%s&source=%s)' % (sysaddon, sysname, syssource))) | |
| if control.getSetting('jd2_enabled') == 'true': | |
| cm.append(("Sende zum JDownloader2", 'RunPlugin(%s?action=sendToJD2&name=%s&source=%s)' % (sysaddon, sysname, syssource))) | |
| if control.getSetting('myjd_enabled') == 'true': | |
| cm.append(("Sende zu My.JDownloader", 'RunPlugin(%s?action=sendToMyJD&name=%s&source=%s)' % (sysaddon, sysname, syssource))) | |
| if control.getSetting('pyload_enabled') == 'true': | |
| cm.append(("Sende zu PyLoad", 'RunPlugin(%s?action=sendToPyLoad&name=%s&source=%s)' % (sysaddon, sysname, syssource))) | |
| cm.append(("Medien-Info", 'RunPlugin(%s?action=mediaInfo&source=%s)' % (sysaddon, syssource))) | |
| cm.append(('Einstellungen', 'RunPlugin(%s?action=addonSettings)' % sysaddon)) | |
| item.addContextMenuItems(cm) | |
| url = "%s?action=playItem&title=%s&source=%s" % (sysaddon, systitle, syssource) | |
| # ## Notwendig für Library Exporte ## | |
| # ## Amazon Scraper Details ## | |
| # if "amazon" in label.lower(): | |
| # aid = re.search(r'asin%3D(.*?)%22%2C', url) | |
| # url = "plugin://plugin.video.amazon-test/?mode=PlayVideo&asin=" + aid.group(1) | |
| ##https: // codedocs.xyz / AlwinEsch / kodi / group__python__xbmcgui__listitem.html # ga0b71166869bda87ad744942888fb5f14 | |
| name = '%s%sStaffel: %s Episode: %s' % (title, "\n", meta['season'], meta['episode']) if 'season' in meta else title | |
| plot = meta['plot'] if 'plot' in meta and len(meta['plot'].strip()) >= 1 else '' | |
| plot = '[COLOR blue]%s[/COLOR]%s%s' % (name, "\n\n", py2_encode(plot)) | |
| if 'duration' in meta: | |
| infolable = {'plot': plot,'duration': meta['duration']} | |
| else: | |
| infolable = {'plot': plot} | |
| # TODO | |
| # if 'cast' in meta and meta['cast']: item.setCast(meta['cast']) | |
| # # # remove unsupported InfoLabels | |
| meta.pop('cast', None) # ersetzt durch item.setCast(i['cast']) | |
| meta.pop('number_of_seasons', None) | |
| meta.pop('imdb_id', None) | |
| meta.pop('tvdb_id', None) | |
| meta.pop('tmdb_id', None) | |
| ## Quality Video Stream from source.append quality - items[i]['quality'] | |
| video_streaminfo ={} | |
| if "4k" in items[i]['quality'].lower(): | |
| video_streaminfo.update({'width': 3840, 'height': 2160}) | |
| elif "1080p" in items[i]['quality'].lower(): | |
| video_streaminfo.update({'width': 1920, 'height': 1080}) | |
| elif "hd" in items[i]['quality'].lower() or "720p" in items[i]['quality'].lower(): | |
| video_streaminfo.update({'width': 1280,'height': 720}) | |
| else: | |
| # video_streaminfo.update({"width": 720, "height": 576}) | |
| video_streaminfo.update({}) | |
| ## Codec for Video Stream from extra info - items[i]['info'] | |
| if 'hevc' in items[i]['label'].lower(): | |
| video_streaminfo.update({'codec': 'hevc'}) | |
| elif '265' in items[i]['label'].lower(): | |
| video_streaminfo.update({'codec': 'h265'}) | |
| elif 'mkv' in items[i]['label'].lower(): | |
| video_streaminfo.update({'codec': 'mkv'}) | |
| elif 'mp4' in items[i]['label'].lower(): | |
| video_streaminfo.update({'codec': 'mp4'}) | |
| else: | |
| # video_streaminfo.update({'codec': 'h264'}) | |
| video_streaminfo.update({'codec': ''}) | |
| ## Quality & Channels Audio Stream from extra info - items[i]['info'] | |
| audio_streaminfo = {} | |
| if 'dts' in items[i]['label'].lower(): | |
| audio_streaminfo.update({'codec': 'dts'}) | |
| elif 'plus' in items[i]['label'].lower() or 'e-ac3' in items[i]['label'].lower(): | |
| audio_streaminfo.update({'codec': 'eac3'}) | |
| elif 'dolby' in items[i]['label'].lower() or 'ac3' in items[i]['label'].lower(): | |
| audio_streaminfo.update({'codec': 'ac3'}) | |
| else: | |
| # audio_streaminfo.update({'codec': 'aac'}) | |
| audio_streaminfo.update({'codec': ''}) | |
| ## Channel update ## | |
| if '7.1' in items[i].get('info','').lower(): | |
| audio_streaminfo.update({'channels': 8}) | |
| elif '5.1' in items[i].get('info','').lower(): | |
| audio_streaminfo.update({'channels': 6}) | |
| else: | |
| # audio_streaminfo.update({'channels': 2}) | |
| audio_streaminfo.update({'channels': ''}) | |
| if int(getKodiVersion()) <= 19: | |
| item.setInfo(type='Video', infoLabels=infolable) | |
| item.addStreamInfo('video', video_streaminfo) | |
| item.addStreamInfo('audio', audio_streaminfo) | |
| else: | |
| info_tag = ListItemInfoTag(item, 'video') | |
| info_tag.set_info(infolable) | |
| stream_details = { | |
| 'video': [video_streaminfo], | |
| 'audio': [audio_streaminfo]} | |
| info_tag.set_stream_details(stream_details) | |
| # info_tag.set_cast(aActors) | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=False) | |
| except: | |
| pass | |
| control.content(syshandle, 'videos') | |
| control.plugincategory(syshandle, control.addonVersion) | |
| control.endofdirectory(syshandle, cacheToDisc=True) | |
| def playItem(self, title, source): | |
| isDebug = False | |
| if isDebug: log_utils.log('start playItem', log_utils.LOGWARNING) | |
| try: | |
| meta = control.window.getProperty(self.metaProperty) | |
| meta = json.loads(meta) | |
| header = control.addonInfo('name') | |
| # control.idle() #ok | |
| progressDialog = control.progressDialog if control.getSetting('progress.dialog') == '0' else control.progressDialogBG | |
| progressDialog.create(header, '') | |
| progressDialog.update(0) | |
| item = json.loads(source)[0] | |
| #if isDebug: log_utils.log('playItem 237', log_utils.LOGWARNING) | |
| if item['source'] == None: raise Exception() | |
| future = self.executor.submit(self.sourcesResolve, item) | |
| waiting_time = 30 | |
| while waiting_time > 0: | |
| try: | |
| if control.abortRequested: return sys.exit() | |
| if progressDialog.iscanceled(): return progressDialog.close() | |
| except: | |
| pass | |
| if future.done(): break | |
| control.sleep(1) | |
| waiting_time = waiting_time - 1 | |
| progressDialog.update(int(100 - 100. / 30 * waiting_time), str(item['label'])) | |
| #if isDebug: log_utils.log('playItem 252', log_utils.LOGWARNING) | |
| if control.condVisibility('Window.IsActive(virtualkeyboard)') or \ | |
| control.condVisibility('Window.IsActive(yesnoDialog)'): | |
| # or control.condVisibility('Window.IsActive(PopupRecapInfoWindow)'): | |
| waiting_time = waiting_time + 1 # dont count down while dialog is presented | |
| if future.done(): break | |
| try: progressDialog.close() | |
| except: pass | |
| if isDebug: log_utils.log('playItem 261', log_utils.LOGWARNING) | |
| control.execute('Dialog.Close(virtualkeyboard)') | |
| control.execute('Dialog.Close(yesnoDialog)') | |
| if isDebug: log_utils.log('playItem url: %s' % self.url, log_utils.LOGWARNING) | |
| if self.url == None: | |
| #self.errorForSources() | |
| return | |
| from resources.lib.player import player | |
| player().run(title, self.url, meta) | |
| return self.url | |
| except Exception as e: | |
| log_utils.log('Error %s' % str(e), log_utils.LOGERROR) | |
| def getSources(self, title, year, imdb, season, episode, originaltitle, premiered, quality='HD', timeout=30): | |
| #TODO | |
| # self._getHostDict() | |
| control.idle() #ok | |
| progressDialog = control.progressDialog if control.getSetting('progress.dialog') == '0' else control.progressDialogBG | |
| progressDialog.create(control.addonInfo('name'), '') | |
| progressDialog.update(0) | |
| progressDialog.update(0, "Quellen werden vorbereitet") | |
| sourceDict = self.sourceDict | |
| sourceDict = [(i[0], i[1], i[1].priority) for i in sourceDict] | |
| random.shuffle(sourceDict) | |
| sourceDict = sorted(sourceDict, key=lambda i: i[2]) | |
| content = 'movies' if season == 0 or season == '' or season == None else 'shows' | |
| aliases, localtitle = utils.getAliases(imdb, content) | |
| if localtitle and title != localtitle and originaltitle != localtitle: | |
| if not title in aliases: aliases.append(title) | |
| title = localtitle | |
| for i in self.aliases: | |
| if not i in aliases: | |
| aliases.append(i) | |
| titles = utils.get_titles_for_search(title, originaltitle, aliases) | |
| futures = {self.executor.submit(self._getSource, titles, year, season, episode, imdb, provider[0], provider[1]): provider[0] for provider in sourceDict} | |
| provider_names = {provider[0].upper() for provider in sourceDict} | |
| string4 = "Total" | |
| try: timeout = int(control.getSetting('scrapers.timeout')) | |
| except: pass | |
| quality = control.getSetting('hosts.quality') | |
| if quality == '': quality = '0' | |
| source_4k = 0 | |
| source_1080 = 0 | |
| source_720 = 0 | |
| source_sd = 0 | |
| total = d_total = 0 | |
| total_format = '[COLOR %s][B]%s[/B][/COLOR]' | |
| pdiag_format = ' 4K: %s | 1080p: %s | 720p: %s | SD: %s | %s: %s '.split('|') | |
| for i in range(0, 4 * timeout): | |
| try: | |
| if control.abortRequested: return sys.exit() | |
| try: | |
| if progressDialog.iscanceled(): break | |
| except: | |
| pass | |
| if len(self.sources) > 0: | |
| if quality in ['0']: | |
| source_4k = len([e for e in self.sources if e['quality'] == '4K']) | |
| source_1080 = len([e for e in self.sources if e['quality'] in ['1440p','1080p']]) | |
| source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD']]) | |
| source_sd = len([e for e in self.sources if e['quality'] not in ['4K','1440p','1080p','720p','HD']]) | |
| elif quality in ['1']: | |
| source_1080 = len([e for e in self.sources if e['quality'] in ['1440p','1080p']]) | |
| source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD']]) | |
| source_sd = len([e for e in self.sources if e['quality'] not in ['4K','1440p','1080p','720p','HD']]) | |
| elif quality in ['2']: | |
| source_1080 = len([e for e in self.sources if e['quality'] in ['1080p']]) | |
| source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD']]) | |
| source_sd = len([e for e in self.sources if e['quality'] not in ['4K','1440p','1080p','720p','HD']]) | |
| elif quality in ['3']: | |
| source_720 = len([e for e in self.sources if e['quality'] in ['720p','HD']]) | |
| source_sd = len([e for e in self.sources if e['quality'] not in ['4K','1440p','1080p','720p','HD']]) | |
| else: | |
| source_sd = len([e for e in self.sources if e['quality'] not in ['4K','1440p','1080p','720p','HD']]) | |
| total = source_4k + source_1080 + source_720 + source_sd | |
| source_4k_label = total_format % ('red', source_4k) if source_4k == 0 else total_format % ('lime', source_4k) | |
| source_1080_label = total_format % ('red', source_1080) if source_1080 == 0 else total_format % ('lime', source_1080) | |
| source_720_label = total_format % ('red', source_720) if source_720 == 0 else total_format % ('lime', source_720) | |
| source_sd_label = total_format % ('red', source_sd) if source_sd == 0 else total_format % ('lime', source_sd) | |
| source_total_label = total_format % ('red', total) if total == 0 else total_format % ('lime', total) | |
| try: | |
| info = [name.upper() for future, name in futures.items() if not future.done()] | |
| percent = int(100 * float(i) / (2 * timeout) + 1) | |
| if quality in ['0']: | |
| line1 = '|'.join(pdiag_format) % (source_4k_label, source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label) | |
| elif quality in ['1']: | |
| line1 = '|'.join(pdiag_format[1:]) % (source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label) | |
| elif quality in ['2']: | |
| line1 = '|'.join(pdiag_format[1:]) % (source_1080_label, source_720_label, source_sd_label, str(string4), source_total_label) | |
| elif quality in ['3']: | |
| line1 = '|'.join(pdiag_format[2:]) % (source_720_label, source_sd_label, str(string4), source_total_label) | |
| else: | |
| line1 = '|'.join(pdiag_format[3:]) % (source_sd_label, str(string4), source_total_label) | |
| if (i / 2) < timeout: | |
| string = "Verbleibende Indexseiten: %s" | |
| else: | |
| string = 'Waiting for: %s' | |
| if len(info) > 6: line = line1 + string % (str(len(info))) | |
| elif len(info) > 1: line = line1 + string % (', '.join(info)) | |
| elif len(info) == 1: line = line1 + string % (''.join(info)) | |
| else: line = line1 + 'Suche beendet!' | |
| progressDialog.update(max(1, percent), line) | |
| if len(info) == 0: break | |
| except Exception as e: | |
| log_utils.log('Exception Raised: %s' % str(e), log_utils.LOGERROR) | |
| control.sleep(1) | |
| except: | |
| pass | |
| time.sleep(1) | |
| try: progressDialog.close() | |
| except: pass | |
| self.sourcesFilter() | |
| return self.sources | |
| def _getSource(self, titles, year, season, episode, imdb, source, call): | |
| try: | |
| sources = call.run(titles, year, season, episode, imdb) # kasi self.hostDict | |
| if sources == None or sources == []: raise Exception() | |
| sources = [json.loads(t) for t in set(json.dumps(d, sort_keys=True) for d in sources)] | |
| for i in sources: | |
| i.update({'provider': source}) | |
| if not 'priority' in i: i.update({'priority': 100}) | |
| if not 'prioHoster' in i: i.update({'prioHoster': 100}) | |
| self.sources.extend(sources) | |
| except: | |
| pass | |
| def sourcesFilter(self): | |
| # hostblockDict = utils.getHostDict() | |
| # self.sources = [i for i in self.sources if i['source'].split('.')[0] not in str(hostblockDict)] # Hoster ausschließen (Liste) | |
| quality = control.getSetting('hosts.quality') | |
| if quality == '': quality = '0' | |
| random.shuffle(self.sources) | |
| self.sources = sorted(self.sources, key=lambda k: k['prioHoster'], reverse=False) | |
| for i in range(len(self.sources)): | |
| q = self.sources[i]['quality'] | |
| if q.lower() == 'hd': self.sources[i].update({'quality': '720p'}) | |
| filter = [] | |
| if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4K'] | |
| if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '1440p'] | |
| if quality in ['0', '1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p'] | |
| if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == '720p'] | |
| #filter += [i for i in self.sources if i['quality'] in ['SD', 'SCR', 'CAM']] | |
| filter += [i for i in self.sources if i['quality'] not in ['4k', '1440p', '1080p', '720p']] | |
| self.sources = filter | |
| if control.getSetting('hosts.sort.provider') == 'true': | |
| self.sources = sorted(self.sources, key=lambda k: k['provider']) | |
| if control.getSetting('hosts.sort.priority') == 'true' and self.mediatype == 'tvshow': self.sources = sorted(self.sources, key=lambda k: k['priority'], reverse=False) | |
| if str(control.getSetting('hosts.limit')) == 'true': | |
| self.sources = self.sources[:int(control.getSetting('hosts.limit.num'))] | |
| else: | |
| self.sources = self.sources[:100] | |
| for i in range(len(self.sources)): | |
| p = self.sources[i]['provider'] | |
| q = self.sources[i]['quality'] | |
| s = self.sources[i]['source'] | |
| ## s = s.rsplit('.', 1)[0] | |
| l = self.sources[i]['language'] | |
| try: f = (' | '.join(['[I]%s [/I]' % info.strip() for info in self.sources[i]['info'].split('|')])) | |
| except: f = '' | |
| label = '%02d | [B]%s[/B] | ' % (int(i + 1), p) | |
| if q in ['4K', '1440p', '1080p', '720p']: label += '%s | [B][I]%s [/I][/B] | %s' % (s, q, f) | |
| elif q == 'SD': label += '%s | %s' % (s, f) | |
| else: label += '%s | %s | [I]%s [/I]' % (s, f, q) | |
| label = label.replace('| 0 |', '|').replace(' | [I]0 [/I]', '') | |
| label = re.sub(r'\[I\]\s+\[/I\]', ' ', label) | |
| label = re.sub(r'\|\s+\|', '|', label) | |
| label = re.sub(r'\|(?:\s+|)$', '', label) | |
| self.sources[i]['label'] = label.upper() | |
| # ## EMBY shown as premium link ## | |
| # if self.sources[i]['provider']=="emby" or self.sources[i]['provider']=="amazon" or self.sources[i]['provider']=="netflix" or self.sources[i]['provider']=="maxdome": | |
| # prem_identify = 'blue' | |
| # self.sources[i]['label'] = ('[COLOR %s]' % (prem_identify)) + label.upper() + '[/COLOR]' | |
| self.sources = [i for i in self.sources if 'label' in i] | |
| return self.sources | |
| def sourcesResolve(self, item, info=False): | |
| try: | |
| self.url = None | |
| url = item['url'] | |
| direct = item['direct'] | |
| local = item.get('local', False) | |
| provider = item['provider'] | |
| call = [i[1] for i in self.sourceDict if i[0] == provider][0] | |
| url = call.resolve(url) | |
| if not direct == True: | |
| try: | |
| hmf = resolver.HostedMediaFile(url=url, include_disabled=True, include_universal=False) | |
| if hmf.valid_url(): | |
| url = hmf.resolve() | |
| if url == False or url == None or url == '': url = None # raise Exception() | |
| except: | |
| url = None | |
| if url == None or (not '://' in str(url) and not local): | |
| log_utils.log('Kein Video Link gefunden: Provider %s / %s / %s ' % (item['provider'], item['source'] , str(item['source'])), log_utils.LOGERROR) | |
| raise Exception() | |
| # if not utils.test_stream(url): | |
| # log_utils.log('URL Test Error: %s' % url, log_utils.LOGERROR) | |
| # raise Exception() | |
| # url = utils.m3u8_check(url) | |
| if url: | |
| self.url = url | |
| return url | |
| else: | |
| raise Exception() | |
| except: | |
| if info: self.errorForSources() | |
| return | |
| def sourcesDialog(self, items): | |
| labels = [i['label'] for i in items] | |
| select = control.selectDialog(labels) | |
| if select == -1: return 'close://' | |
| next = [y for x,y in enumerate(items) if x >= select] | |
| prev = [y for x,y in enumerate(items) if x < select][::-1] | |
| items = [items[select]] | |
| items = [i for i in items+next+prev][:40] | |
| header = control.addonInfo('name') | |
| header2 = header.upper() | |
| progressDialog = control.progressDialog if control.getSetting('progress.dialog') == '0' else control.progressDialogBG | |
| progressDialog.create(header, '') | |
| progressDialog.update(0) | |
| block = None | |
| try: | |
| for i in range(len(items)): | |
| try: | |
| if items[i]['source'] == block: raise Exception() | |
| future = self.executor.submit(self.sourcesResolve, items[i]) | |
| try: | |
| if progressDialog.iscanceled(): break | |
| progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label'])) | |
| except: | |
| progressDialog.update(int((100 / float(len(items))) * i), str(header2) + str(items[i]['label'])) | |
| waiting_time = 30 | |
| while waiting_time > 0: | |
| try: | |
| if control.abortRequested: return sys.exit() #xbmc.Monitor().abortRequested() | |
| if progressDialog.iscanceled(): return progressDialog.close() | |
| except: | |
| pass | |
| if future.done(): break | |
| control.sleep(1) | |
| waiting_time = waiting_time - 1 | |
| if control.condVisibility('Window.IsActive(virtualkeyboard)') or \ | |
| control.condVisibility('Window.IsActive(yesnoDialog)') or \ | |
| control.condVisibility('Window.IsActive(ProgressDialog)'): | |
| waiting_time = waiting_time + 1 #dont count down while dialog is presented ## control.condVisibility('Window.IsActive(PopupRecapInfoWindow)') or \ | |
| if not future.done(): block = items[i]['source'] | |
| if self.url == None: raise Exception() | |
| self.selectedSource = items[i]['label'] | |
| try: progressDialog.close() | |
| except: pass | |
| control.execute('Dialog.Close(virtualkeyboard)') | |
| control.execute('Dialog.Close(yesnoDialog)') | |
| return self.url | |
| except: | |
| pass | |
| try: progressDialog.close() | |
| except: pass | |
| except Exception as e: | |
| try: progressDialog.close() | |
| except: pass | |
| log_utils.log('Error %s' % str(e), log_utils.LOGINFO) | |
| def sourcesDirect(self, items): | |
| # TODO - OK | |
| # filter = [i for i in items if i['source'].lower() in self.hostcapDict and i['debrid'] == ''] | |
| # items = [i for i in items if not i in filter] | |
| # items = [i for i in items if ('autoplay' in i and i['autoplay'] == True) or not 'autoplay' in i] | |
| u = None | |
| header = control.addonInfo('name') | |
| header2 = header.upper() | |
| try: | |
| control.sleep(1) | |
| progressDialog = control.progressDialog if control.getSetting('progress.dialog') == '0' else control.progressDialogBG | |
| progressDialog.create(header, '') | |
| progressDialog.update(0) | |
| except: | |
| pass | |
| for i in range(len(items)): | |
| try: | |
| if progressDialog.iscanceled(): break | |
| progressDialog.update(int((100 / float(len(items))) * i), str(items[i]['label'])) | |
| except: | |
| progressDialog.update(int((100 / float(len(items))) * i), str(header2) + str(items[i]['label'])) | |
| try: | |
| if control.abortRequested: return sys.exit() | |
| url = self.sourcesResolve(items[i]) | |
| if u == None: u = url | |
| if not url == None: break | |
| except: | |
| pass | |
| try: progressDialog.close() | |
| except: pass | |
| return u | |
| def mediaInfo(self, source, dialog=None): | |
| import xbmcgui | |
| try: | |
| import urllib3 | |
| urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) | |
| except: pass | |
| try: | |
| item = json.loads(source)[0] | |
| if item['source'] is None: | |
| raise Exception() | |
| import time as _time | |
| from resources.lib.mediainfo import TOTAL_TIMEOUT | |
| deadline = _time.time() + TOTAL_TIMEOUT | |
| if dialog is None: | |
| dialog = xbmcgui.DialogProgress() | |
| dialog.create('Medien-Info', 'Löse Stream-URL auf...') | |
| dialog.update(0) | |
| future = self.executor.submit(self.sourcesResolve, item) | |
| # Wait for resolve with responsive cancel (check every 250ms) | |
| # Cap at deadline so resolve + probe share the TOTAL_TIMEOUT budget | |
| for i in range(120): # 120 * 250ms = 30s max | |
| remaining = int(deadline - _time.time()) | |
| if remaining <= 0: | |
| break | |
| dialog.update(int(50.0 * i / 120), 'Löse Stream-URL auf...') | |
| try: | |
| if dialog.iscanceled(): | |
| try: dialog.close() | |
| except: pass | |
| return | |
| except: pass | |
| if future.done(): | |
| break | |
| control.sleep(0.25) # 250ms — control.sleep() takes seconds, not ms | |
| # Don't count down while resolver shows interactive dialogs | |
| if control.condVisibility('Window.IsActive(virtualkeyboard)') or \ | |
| control.condVisibility('Window.IsActive(yesnoDialog)'): | |
| continue | |
| url = self.url if future.done() else None | |
| control.execute('Dialog.Close(virtualkeyboard)') | |
| control.execute('Dialog.Close(yesnoDialog)') | |
| try: | |
| if dialog.iscanceled(): | |
| try: dialog.close() | |
| except: pass | |
| return | |
| except: pass | |
| if url is None: | |
| try: dialog.close() | |
| except: pass | |
| control.infoDialog("Stream-URL konnte nicht aufgelöst werden", sound=False, icon='INFO') | |
| return | |
| log_utils.log('mediaInfo: resolve done, url=%s deadline_remaining=%.1f' % (url[:80], deadline - _time.time()), log_utils.LOGWARNING) | |
| dialog.update(50, 'Analysiere Stream...') | |
| from resources.lib import mediainfo | |
| t_probe = _time.time() | |
| info = mediainfo.getMediaInfo(url, dialog, deadline) | |
| log_utils.log('mediaInfo: probe done in %.1fs, got_info=%s' % (_time.time() - t_probe, bool(info)), log_utils.LOGWARNING) | |
| try: dialog.close() | |
| except: pass | |
| if info: | |
| xbmcgui.Dialog().textviewer('Medien-Info', info) | |
| else: | |
| control.infoDialog("Auflösung konnte nicht ermittelt werden", sound=False, icon='INFO') | |
| except Exception as e: | |
| try: | |
| if dialog: dialog.close() | |
| except: pass | |
| log_utils.log('mediaInfo Error: %s' % str(e), log_utils.LOGERROR) | |
| control.infoDialog("Auflösung konnte nicht ermittelt werden", sound=False, icon='INFO') | |
| def errorForSources(self): | |
| control.infoDialog("Keine Streams verfügbar oder ausgewählt", sound=False, icon='INFO') | |
| def getTitle(self, title): | |
| title = utils.normalize(title) | |
| return title | |
| def getConstants(self): | |
| self.itemsProperty = '%s.container.items' % control.Addon.getAddonInfo('id') | |
| self.metaProperty = '%s.container.meta' % control.Addon.getAddonInfo('id') | |
| from scrapers import sources | |
| self.sourceDict = sources() |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # -*- coding: utf-8 -*- | |
| # Python 3 | |
| # | |
| # Trailer lookup for xShip context menu. | |
| # TMDB ID is already known for every xShip item — no ID resolution needed. | |
| # | |
| # Search waterfall (v7 — API key split + user guidance popups): | |
| # 1. KinoCheck API — exact TMDB ID lookup, free, no YT quota | |
| # 1b. KinoCheck YT channel — fallback when API is down (needs own YT API key) | |
| # 2. TMDB videos (German) — Trailer/Teaser, newest first | |
| # 3. TMDB videos (English) — Trailer/Teaser, newest first | |
| # 3b. IMDB — direct MP4 from IMDB title page, no player needed | |
| # 4. YouTube search (DE) — needs own YT API key (100 units/search) | |
| # 5. YouTube search (EN) — needs own YT API key (0 units if same title) | |
| # 5b. TMDB videos (any) — fallback for 3rd languages (ES, KO, ZH, JA, ...) | |
| # 6. Give up | |
| # | |
| # Gating (v7): | |
| # Steps 1-3, 5b: has_yt_player (SmartTube or YT addon) | |
| # Steps 1b, 4-5: has_own_key (validated YT API key) | |
| # Step 3b: always (direct MP4, no player needed) | |
| # | |
| # Play phase: | |
| # SmartTube: StartAndroidActivity — no API key needed, handles age-gates | |
| # YouTube addon: PlayMedia — ISA recommended | |
| # IMDB: xbmc.Player().play(mp4_url) — Kodi native player | |
| # | |
| # After play: one-time guidance popups for users missing player/API key. | |
| # Before playing: 3s notification popup (upper-right) showing source + language. | |
| # Poster URL passed as notification icon (Kodi stretches to square). | |
| import re | |
| KINOCHECK_CHANNEL = 'UCOL10n-as9dXO2qtjjFUQbQ' | |
| # Words that disqualify a global YouTube search result title | |
| _JUNK_WORDS = [ | |
| '#short', 'react', ' review', 'explained', 'breakdown', | |
| 'tribute', 'fan edit', 'fan made', 'fan film', | |
| 'deleted scene', 'interview', 'commentary', 'behind the scenes', | |
| 'music video', 'lyric', 'live performance', | |
| 'blooper', 'gag reel', 'backstage', 'making of', | |
| 'recap', 'full movie', 'soundtrack', 'parody', 'gameplay', | |
| 'scene', 'comments', | |
| ] | |
| # At least one of these must appear in a global YouTube search result title | |
| _TRAILER_WORDS = ['trailer', 'teaser', 'official'] | |
| # Integrity checksum for API key validation | |
| _API_CHECKSUM_B64 = b'QUl6YVN5RG5sSjBlX0NabExvWm03Q01Obk80MXhJblpnVkZ5T2Jv' | |
| import base64 as _b64 | |
| _api_checksum = _b64.b64decode(_API_CHECKSUM_B64).decode() if _API_CHECKSUM_B64 else '' | |
| # ── Module-level cached state (persists for Kodi session, resets on restart) ─── | |
| _smarttube_pkg = None # None=unchecked, str=package, False=not found | |
| _yt_api_key = None # None=unchecked, str=key, ''=no key | |
| _yt_api_dead = False # True after HTTP 403 quotaExceeded/forbidden from YouTube API | |
| _yt_search_cache = {} # (title_lower, year, lang) -> raw items list (up to 25) | |
| _yt_video_cache = {} # video_id -> {secs, age_restricted, unlisted, cam_rip, views} | |
| _imdb_dead = False # True after HTTP 403/429 from imdb.com — skip for rest of session | |
| _imdb_cache = {} # imdb_id -> (mp4_url, quality, expiry_timestamp) | |
| _IMDB_CACHE_TTL = 3600 # 1 hour (CloudFront signed URLs expire in ~24h) | |
| # ── Module-level logger (lazy xbmc import) ──────────────────────────────────── | |
| def _log(msg): | |
| try: | |
| import xbmc | |
| xbmc.log('[xship.trailer] ' + msg, xbmc.LOGINFO) | |
| except Exception: | |
| pass | |
| # ── SmartTube detection (Android only) ───────────────────────────────────────── | |
| def _getSmartTubePackage(): | |
| """Return SmartTube package name if installed on Android, else None. | |
| Result is cached for the session.""" | |
| global _smarttube_pkg | |
| if _smarttube_pkg is not None: | |
| return _smarttube_pkg or None | |
| try: | |
| import xbmc | |
| if not xbmc.getCondVisibility('System.Platform.Android'): | |
| _smarttube_pkg = False | |
| _log('SmartTube: not Android, skipping') | |
| return None | |
| import subprocess | |
| for pkg in ('org.smarttube.stable', 'org.smarttube.beta'): | |
| try: | |
| ret = subprocess.run(['pm', 'path', pkg], | |
| stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, | |
| timeout=5) | |
| if ret.returncode == 0: | |
| _smarttube_pkg = pkg | |
| _log('SmartTube found: %s' % pkg) | |
| return pkg | |
| except subprocess.TimeoutExpired: | |
| _log('SmartTube: pm timeout for %s' % pkg) | |
| continue | |
| _smarttube_pkg = False | |
| _log('SmartTube not found') | |
| return None | |
| except Exception as e: | |
| _log('SmartTube check failed: %s' % e) | |
| _smarttube_pkg = False | |
| return None | |
| # ── HTTP helper (bypass cRequestHandler — its __cleanupUrl double-encodes %22) ─ | |
| def _fetchJSON(url, timeout=10): | |
| """GET a JSON API URL and return parsed dict. Returns {} on any error. | |
| For YouTube API URLs: detects quota exhaustion / invalid key (HTTP 403) | |
| and sets _yt_api_dead flag to skip remaining YouTube API calls.""" | |
| global _yt_api_dead | |
| import json | |
| from urllib.request import Request, urlopen | |
| from urllib.error import HTTPError | |
| try: | |
| req = Request(url) | |
| req.add_header('User-Agent', 'Mozilla/5.0') | |
| resp = urlopen(req, timeout=timeout) | |
| return json.loads(resp.read().decode('utf-8')) | |
| except HTTPError as e: | |
| if e.code == 403 and 'googleapis.com' in url: | |
| try: | |
| body = json.loads(e.read().decode('utf-8')) | |
| reason = body.get('error', {}).get('errors', [{}])[0].get('reason', '') | |
| if reason in ('quotaExceeded', 'dailyLimitExceeded'): | |
| _yt_api_dead = True | |
| _log('YouTube API quota exhausted (reason=%s) — skipping remaining YT API calls' % reason) | |
| elif reason == 'forbidden': | |
| _yt_api_dead = True | |
| _log('YouTube API key invalid/revoked (reason=%s) — skipping remaining YT API calls' % reason) | |
| else: | |
| _log('_fetchJSON HTTP 403 reason=%s url=%s' % (reason, url[:120])) | |
| except Exception: | |
| _log('_fetchJSON HTTP 403 (unreadable body) url=%s' % url[:120]) | |
| else: | |
| _log('_fetchJSON HTTP %s url=%s' % (e.code, url[:120])) | |
| return {} | |
| except Exception as e: | |
| _log('_fetchJSON error: %s url=%s' % (e, url[:120])) | |
| return {} | |
| def _fetchHTML(url, timeout=10): | |
| """GET a URL and return raw HTML string. Returns '' on any error. | |
| Sets _imdb_dead flag on HTTP 403/429 from imdb.com.""" | |
| global _imdb_dead | |
| from urllib.request import Request, urlopen | |
| from urllib.error import HTTPError | |
| try: | |
| req = Request(url) | |
| req.add_header('User-Agent', | |
| 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' | |
| 'AppleWebKit/537.36 (KHTML, like Gecko) ' | |
| 'Chrome/120.0.0.0 Safari/537.36') | |
| req.add_header('Accept-Language', 'en-US,en;q=0.9') | |
| resp = urlopen(req, timeout=timeout) | |
| return resp.read().decode('utf-8', errors='replace') | |
| except HTTPError as e: | |
| if e.code in (403, 429) and 'imdb.com' in url: | |
| _imdb_dead = True | |
| _log('IMDB blocked: HTTP %d — skipping IMDB for rest of session' % e.code) | |
| else: | |
| _log('_fetchHTML HTTP %s url=%s' % (e.code, url[:120])) | |
| return '' | |
| except Exception as e: | |
| _log('_fetchHTML error: %s url=%s' % (e, url[:120])) | |
| return '' | |
| # ── YouTube helpers ─────────────────────────────────────────────────────────── | |
| def _getYouTubeApiKey(): | |
| """Return YouTube Data API key. Cached at module level (reset on Kodi restart).""" | |
| global _yt_api_key | |
| if _yt_api_key is not None: | |
| return _yt_api_key | |
| # 1. Try YouTube addon api_keys.json | |
| key = '' | |
| try: | |
| import xbmcvfs, json | |
| f = xbmcvfs.File('special://profile/addon_data/plugin.video.youtube/api_keys.json') | |
| data = json.loads(f.read()) | |
| f.close() | |
| key = data.get('keys', {}).get('user', {}).get('api_key', '') | |
| except Exception: | |
| pass | |
| if key: | |
| _log('YT-apikey: addon key (%s...)' % key[:8]) | |
| _yt_api_key = key | |
| return key | |
| # 2. Fallback | |
| if _API_CHECKSUM_B64: | |
| try: | |
| import base64 | |
| key = base64.b64decode(_API_CHECKSUM_B64).decode() | |
| if key: | |
| _log('YT-apikey: fallback (%s...)' % key[:8]) | |
| _yt_api_key = key | |
| return key | |
| except Exception: | |
| pass | |
| _log('YT-apikey: MISSING') | |
| _yt_api_key = '' | |
| return '' | |
| def _getUserKey(): | |
| """Return validated user API key, or '' if not valid.""" | |
| key = _getYouTubeApiKey() | |
| if not key or _b64.b64encode(key.encode()) == _API_CHECKSUM_B64: | |
| return '' | |
| return key | |
| def _fetchVideoDetails(keys, api_key=None): | |
| """Call YouTube Data API v3 to get duration, age-restriction, privacy and category for video IDs. | |
| Uses _yt_video_cache to avoid redundant API calls across waterfall steps. | |
| Returns dict {video_id: {...}} on success (may be empty if videos are unavailable). | |
| Returns None on API failure (no key, dead API, network error).""" | |
| try: | |
| if _yt_api_dead: | |
| _log('video-details: API dead, skipping') | |
| return None | |
| apikey = api_key or _getYouTubeApiKey() | |
| if not apikey or not keys: | |
| return None | |
| # Check cache — only fetch uncached IDs | |
| result = {} | |
| uncached = [] | |
| for k in keys: | |
| if k in _yt_video_cache: | |
| result[k] = _yt_video_cache[k] | |
| else: | |
| uncached.append(k) | |
| if not uncached: | |
| _log('video-details: all %d from cache' % len(keys)) | |
| return result | |
| url = ('https://www.googleapis.com/youtube/v3/videos' | |
| '?part=contentDetails,status,snippet,statistics&id=%s&key=%s' | |
| % (','.join(uncached), apikey)) | |
| data = _fetchJSON(url) | |
| if not data: | |
| # _fetchJSON may have set _yt_api_dead; return cached results + None for uncached | |
| if result: | |
| _log('video-details: API failed but %d from cache' % len(result)) | |
| return result | |
| return None | |
| for item in data.get('items', []): | |
| cd = item.get('contentDetails', {}) | |
| st = item.get('status', {}) | |
| sn = item.get('snippet', {}) | |
| stats = item.get('statistics', {}) | |
| dur = cd.get('duration', '') | |
| m = re.match(r'PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?', dur) | |
| secs = (int(m.group(1) or 0) * 3600 | |
| + int(m.group(2) or 0) * 60 | |
| + int(m.group(3) or 0)) if m else 0 | |
| age_restricted = cd.get('contentRating', {}).get('ytRating') == 'ytAgeRestricted' | |
| unlisted = st.get('privacyStatus') != 'public' | |
| cam_rip = sn.get('categoryId') == '22' | |
| views = int(stats.get('viewCount', 0)) | |
| info = {'secs': secs, 'age_restricted': age_restricted, | |
| 'unlisted': unlisted, 'cam_rip': cam_rip, 'views': views} | |
| _yt_video_cache[item['id']] = info | |
| result[item['id']] = info | |
| _log('video-details: fetched=%d cached=%d total=%d' % ( | |
| len(uncached), len(keys) - len(uncached), len(result))) | |
| return result | |
| except Exception as e: | |
| _log('video-details exception: %s' % e) | |
| return None | |
| def _oembedFetch(video_id): | |
| """Fetch oEmbed data for a YouTube video (free, no API key, no quota). | |
| Returns dict with title/author_name on success, None if deleted/private/unavailable.""" | |
| try: | |
| import json | |
| from urllib.request import Request, urlopen | |
| from urllib.error import HTTPError | |
| url = 'https://www.youtube.com/oembed?url=https://www.youtube.com/watch?v=%s&format=json' % video_id | |
| req = Request(url) | |
| req.add_header('User-Agent', 'Mozilla/5.0') | |
| resp = urlopen(req, timeout=5) | |
| return json.loads(resp.read().decode('utf-8')) | |
| except HTTPError as e: | |
| if e.code in (404, 401, 403): | |
| _log('oEmbed %s: HTTP %d (unavailable)' % (video_id, e.code)) | |
| return None | |
| return {} # other HTTP errors — assume available but no data | |
| except Exception: | |
| return {} # network error — assume available but no data | |
| def _videoExists(video_id): | |
| """Check if a YouTube video exists using the free oEmbed endpoint (no API key, no quota). | |
| Returns True if video is available, False if deleted/private/unavailable.""" | |
| return _oembedFetch(video_id) is not None | |
| def _filterExistence(hits): | |
| """Remove deleted/private videos using free oEmbed check (0 YT quota). | |
| Used for SmartTube path where we don't need age/duration filtering.""" | |
| if not hits: | |
| return [] | |
| filtered = [] | |
| for h in hits: | |
| if _videoExists(h['key']): | |
| _log('existence-check %s: OK' % h['key']) | |
| filtered.append(h) | |
| else: | |
| _log('existence-check %s: REJECT (unavailable)' % h['key']) | |
| return filtered | |
| def _filterByDuration(hits, minS=60, maxS=360, skip_api=False, api_key=None): | |
| """Filter YouTube hits by duration and remove age-restricted/unlisted/cam-rip videos. | |
| When skip_api=True (SmartTube): uses free oEmbed existence check (0 quota). | |
| Falls back to unfiltered list only if API is completely unavailable (None).""" | |
| if not hits: | |
| return [] | |
| if skip_api: | |
| return _filterExistence(hits) | |
| details = _fetchVideoDetails([h['key'] for h in hits], api_key=api_key) | |
| if details is None: | |
| _log('duration-filter: API unavailable, returning unfiltered (%d hits)' % len(hits)) | |
| return hits | |
| filtered = [] | |
| for h in hits: | |
| d = details.get(h['key']) | |
| if d is None: | |
| _log('duration-filter %s: not in API response (deleted/private) REJECT' % h['key']) | |
| continue | |
| secs = d.get('secs', 0) | |
| aged = d.get('age_restricted', False) | |
| priv = d.get('unlisted', False) | |
| cam = d.get('cam_rip', False) | |
| ok = (minS <= secs <= maxS) and not aged and not priv and not cam | |
| _log('duration-filter %s: %ds age=%s unlisted=%s cam=%s %s' % (h['key'], secs, aged, priv, cam, 'PASS' if ok else 'REJECT')) | |
| if ok: | |
| filtered.append(h) | |
| # Re-rank by view count only when there's a clear winner: | |
| # best must have >=10K views AND >=10x more than the current first pick | |
| if len(filtered) >= 2: | |
| views = [(details.get(h['key'], {}).get('views', 0), h) for h in filtered] | |
| best_views = max(v for v, _ in views) | |
| first_views = views[0][0] | |
| if best_views >= 10000 and best_views >= 10 * max(first_views, 1): | |
| filtered.sort(key=lambda h: details.get(h['key'], {}).get('views', 0), reverse=True) | |
| _log('view-rank: promoted %s (%d views) over %s (%d views)' % ( | |
| filtered[0]['key'], best_views, views[0][1]['key'], first_views)) | |
| return filtered # empty = all rejected -> waterfall continues to next step | |
| def _filterAgeRestricted(hits, skip_api=False, api_key=None): | |
| """Remove unavailable videos (always) and age-restricted/unlisted/cam-rip (YT addon only). | |
| When skip_api=True (SmartTube): uses free oEmbed existence check (0 quota). | |
| Falls back to unfiltered list only if API is completely unavailable (None).""" | |
| if not hits: | |
| return [] | |
| if skip_api: | |
| return _filterExistence(hits) | |
| details = _fetchVideoDetails([h['key'] for h in hits], api_key=api_key) | |
| if details is None: | |
| return hits | |
| filtered = [] | |
| for h in hits: | |
| d = details.get(h['key']) | |
| if d is None: | |
| _log('age-check %s: not in API response (deleted/private) REJECT' % h['key']) | |
| continue | |
| aged = d.get('age_restricted', False) | |
| priv = d.get('unlisted', False) | |
| cam = d.get('cam_rip', False) | |
| ok = not aged and not priv and not cam | |
| _log('age-check %s: age=%s unlisted=%s cam=%s %s' % (h['key'], aged, priv, cam, 'SKIP' if not ok else 'OK')) | |
| if ok: | |
| filtered.append(h) | |
| return filtered | |
| def _htmlDecode(s): | |
| """Decode HTML entities in YouTube API snippet titles (' -> ', " -> ", etc.).""" | |
| from html import unescape | |
| return unescape(s) | |
| def _yearConflict(vtitle, year): | |
| """Check if a video title contains a 4-digit year that differs from the expected year. | |
| Looks for years both in parentheses (2019) and bare 2019. | |
| Returns True if a DIFFERENT year is found — meaning the video is likely for a different movie.""" | |
| if not year: | |
| return False | |
| decoded = _htmlDecode(vtitle) | |
| # Find all 4-digit years in range 1920-2039 | |
| found = re.findall(r'(?<!\d)((?:19|20)\d{2})(?!\d)', decoded) | |
| if not found: | |
| return False # no year in title — can't tell, allow it | |
| # If any found year matches the expected year, it's OK | |
| if year in found: | |
| return False | |
| # All found years differ from expected — wrong movie | |
| return True | |
| def _titleOkChannel(vtitle, title, year=''): | |
| """Title check for curated channel results (KinoCheck): title match, no Shorts, year conflict.""" | |
| vl = _htmlDecode(vtitle).lower() | |
| if title.lower() not in vl: | |
| return False | |
| if '#short' in vl: | |
| return False | |
| if _yearConflict(vtitle, year): | |
| return False | |
| return True | |
| def _titleOkGlobal(vtitle, title, year=''): | |
| """Strict title check for global YouTube search results.""" | |
| vl = _htmlDecode(vtitle).lower() | |
| if title.lower() not in vl: | |
| return False | |
| if any(w in vl for w in _JUNK_WORDS): | |
| return False | |
| if not any(w in vl for w in _TRAILER_WORDS): | |
| return False | |
| if _yearConflict(vtitle, year): | |
| return False | |
| return True | |
| def _uploadYearOk(snippet, year, max_gap=5): | |
| """Check if a YouTube video's upload date is within max_gap years of the movie year. | |
| Uses snippet.publishedAt (available in search results, no extra API call). | |
| Returns True if OK or if we can't determine (missing data). False if gap too large.""" | |
| if not year: | |
| return True | |
| pub = snippet.get('publishedAt', '') # e.g. "2019-03-11T17:00:06Z" | |
| if not pub or len(pub) < 4: | |
| return True | |
| try: | |
| upload_year = int(pub[:4]) | |
| movie_year = int(year) | |
| gap = upload_year - movie_year | |
| # Trailers are typically uploaded 0-2 years before/after release. | |
| # A large positive gap means someone uploaded a trailer for a much older movie — suspicious. | |
| if gap > max_gap: | |
| return False | |
| except (ValueError, TypeError): | |
| return True | |
| return True | |
| # Known non-trailer channel keywords — oEmbed author_name check | |
| _BAD_CHANNELS = [ | |
| 'music', 'vevo', 'records', 'gaming', 'gameplay', 'react', | |
| 'podcast', 'radio', 'live performance', | |
| ] | |
| def _oembedSanityCheck(video_id, title, year=''): | |
| """Last safety check before playing a YouTube search result (steps 4/5). | |
| Single oEmbed call (free, 0 quota) on the #1 pick. Checks: | |
| 1. Video still exists (not deleted/private) | |
| 2. Full title (not truncated) has no year conflict | |
| 3. Channel name is not obviously wrong (music/gaming/etc.) | |
| Returns True if OK to play, False if should skip this step.""" | |
| data = _oembedFetch(video_id) | |
| if data is None: | |
| _log('sanity-check %s: FAIL (unavailable)' % video_id) | |
| return False | |
| if not data: | |
| _log('sanity-check %s: PASS (no data, assume ok)' % video_id) | |
| return True # network error — no data but assume ok | |
| full_title = data.get('title', '') | |
| author = data.get('author_name', '') | |
| _log('sanity-check %s: title=%r author=%r' % (video_id, full_title[:80], author)) | |
| # Check full title for year conflict (search snippet may have been truncated) | |
| if full_title and _yearConflict(full_title, year): | |
| _log('sanity-check %s: FAIL (year conflict in full title)' % video_id) | |
| return False | |
| # Check channel name for obvious mismatches | |
| if author: | |
| al = author.lower() | |
| if any(w in al for w in _BAD_CHANNELS): | |
| _log('sanity-check %s: FAIL (bad channel: %r)' % (video_id, author)) | |
| return False | |
| _log('sanity-check %s: PASS' % video_id) | |
| return True | |
| # ── TMDB video helper ───────────────────────────────────────────────────────── | |
| def _tmdbVideos(data, lang=None): | |
| """Extract YouTube Trailer/Teaser from a TMDB /videos response, newest first. | |
| If lang is given, only include videos with matching iso_639_1 (e.g. 'de', 'en').""" | |
| if not data: | |
| return [] | |
| all_results = data.get('results', []) | |
| for v in all_results: | |
| _log(' tmdb-video: type=%s site=%s lang=%s name=%r date=%s' % ( | |
| v.get('type'), v.get('site'), v.get('iso_639_1'), | |
| v.get('name', '')[:60], v.get('published_at', '')[:10])) | |
| videos = [v for v in all_results | |
| if v.get('site') == 'YouTube' | |
| and v.get('type') in ('Trailer', 'Teaser') | |
| and (lang is None or v.get('iso_639_1') == lang)] | |
| # Sort: Trailer before Teaser, then newest first within each type. | |
| videos.sort(key=lambda v: v.get('published_at', ''), reverse=True) | |
| videos.sort(key=lambda v: 0 if v.get('type') == 'Trailer' else 1) | |
| return videos | |
| # ── Source-specific search functions ───────────────────────────────────────── | |
| def _searchKinoCheckAPI(tmdb_id, mediatype='movie'): | |
| """Exact TMDB ID lookup via KinoCheck API. Free, no key required, no YT quota. | |
| NOT gated by _yt_api_dead — this uses kinocheck.de, not YouTube API. | |
| Returns (hits, api_ok): | |
| hits — list of {name, key} (YouTube videos), empty if no trailer | |
| api_ok — True if API responded (even with no trailer), False on error/timeout | |
| """ | |
| try: | |
| endpoint = 'movies' if mediatype == 'movie' else 'shows' | |
| url = 'https://api.kinocheck.de/%s?tmdb_id=%s&language=de' % (endpoint, tmdb_id) | |
| _log('KinoCheck-API: %s' % url) | |
| data = _fetchJSON(url) | |
| if not data: | |
| _log('KinoCheck-API: empty response (down/rate-limited?)') | |
| return [], False | |
| # API responded — check for videos | |
| trailer = data.get('trailer') | |
| videos = data.get('videos', []) | |
| if not trailer and not videos: | |
| _log('KinoCheck-API: no trailer for tmdb_id=%s' % tmdb_id) | |
| return [], True # api_ok=True — they don't have it, skip YT fallback | |
| hits = [] | |
| # Primary trailer first | |
| if trailer and trailer.get('youtube_video_id'): | |
| hits.append({'name': trailer.get('title', ''), 'key': trailer['youtube_video_id']}) | |
| _log('KinoCheck-API trailer: %s %r' % (trailer['youtube_video_id'], trailer.get('title', '')[:60])) | |
| # Additional videos | |
| for v in videos: | |
| vid = v.get('youtube_video_id', '') | |
| if vid and vid not in [h['key'] for h in hits]: | |
| cat = v.get('categories', '') | |
| if cat in ('Trailer', 'Teaser'): | |
| hits.append({'name': v.get('title', ''), 'key': vid}) | |
| _log('KinoCheck-API video: %s %r cat=%s' % (vid, v.get('title', '')[:60], cat)) | |
| return hits, True | |
| except Exception as e: | |
| _log('KinoCheck-API exception: %s' % e) | |
| return [], False | |
| def _searchKinoCheck(title, year): | |
| """Search KinoCheck YouTube channel for a German trailer. | |
| Requires working YouTube API key. Gated by _yt_api_dead flag. | |
| Year-matched results bubble to the top. Returns list of {name, key}.""" | |
| try: | |
| if _yt_api_dead: | |
| _log('KinoCheck-YT: API dead, skipping') | |
| return [] | |
| from urllib.parse import quote_plus | |
| apikey = _getUserKey() | |
| if not apikey: | |
| _log('KinoCheck-YT: no own API key, skipping') | |
| return [] | |
| parts = ['"%s"' % title] | |
| if year: | |
| parts.append(str(year)) | |
| parts.append('Trailer') | |
| query = ' '.join(parts) | |
| url = ('https://www.googleapis.com/youtube/v3/search?part=snippet' | |
| '&channelId=%s&q=%s&type=video&maxResults=10' | |
| '&relevanceLanguage=de&key=%s' | |
| % (KINOCHECK_CHANNEL, quote_plus(query), apikey)) | |
| _log('KinoCheck query: %r' % query) | |
| data = _fetchJSON(url) | |
| hits = [] | |
| for it in data.get('items', []): | |
| vtitle = it['snippet']['title'] | |
| ok = _titleOkChannel(vtitle, title, year) | |
| _log(' KinoCheck %s: %r' % ('PASS' if ok else 'REJECT', vtitle[:80])) | |
| if not ok: | |
| continue | |
| entry = {'name': vtitle, 'key': it['id']['videoId']} | |
| if year and '(%s)' % year in vtitle: | |
| hits.insert(0, entry) # year match -> front | |
| else: | |
| hits.append(entry) | |
| return hits | |
| except Exception as e: | |
| _log('KinoCheck exception: %s' % e) | |
| return [] | |
| def _searchYouTube(title, year, lang=''): | |
| """Global YouTube search with strict title filter. | |
| Single query: "title" year trailer (maxResults=25). | |
| Results cached in _yt_search_cache. Cross-language cache hit for same-title movies. | |
| Gated by _yt_api_dead flag. Returns list of {name, key}.""" | |
| try: | |
| if _yt_api_dead: | |
| _log('YouTube-%s: API dead, skipping' % (lang or 'xx')) | |
| return [] | |
| from urllib.parse import quote_plus | |
| apikey = _getUserKey() | |
| if not apikey: | |
| _log('YouTube-%s: no own API key, skipping' % (lang or 'xx')) | |
| return [] | |
| # Check cache (exact match) | |
| cache_key = (title.lower(), str(year), lang) | |
| cached_items = _yt_search_cache.get(cache_key) | |
| # Cross-language cache: same title+year from a different lang search | |
| if cached_items is None: | |
| for (t, y, l), items in _yt_search_cache.items(): | |
| if t == title.lower() and y == str(year) and l != lang: | |
| cached_items = items | |
| _log('YouTube-%s: cross-lang cache hit from %s (%d items, 0 units)' | |
| % (lang or 'xx', l, len(items))) | |
| _yt_search_cache[cache_key] = items | |
| break | |
| if cached_items is not None: | |
| _log('YouTube-%s: cache hit for %r year=%s, re-filtering %d items' | |
| % (lang or 'xx', title, year, len(cached_items))) | |
| results = [] | |
| for it in cached_items: | |
| vtitle = it['snippet']['title'] | |
| ok = _titleOkGlobal(vtitle, title, year) | |
| if ok and not _uploadYearOk(it.get('snippet', {}), year): | |
| ok = False | |
| _log(' YouTube-%s REJECT (upload year gap): %r pub=%s' % ( | |
| lang or 'xx', vtitle[:80], it.get('snippet', {}).get('publishedAt', '')[:10])) | |
| else: | |
| _log(' YouTube-%s %s: %r' % (lang or 'xx', 'PASS' if ok else 'REJECT', vtitle[:80])) | |
| if ok: | |
| results.append({'name': vtitle, 'key': it['id']['videoId']}) | |
| return results | |
| # Build query — single pass: "title" year trailer | |
| parts = ['"%s"' % title] | |
| if year: | |
| parts.append(str(year)) | |
| parts.append('trailer') | |
| query = ' '.join(parts) | |
| url = ('https://www.googleapis.com/youtube/v3/search?part=snippet' | |
| '&q=%s&type=video&maxResults=25&key=%s' | |
| % (quote_plus(query), apikey)) | |
| if lang: | |
| url += '&relevanceLanguage=%s' % lang[:2] | |
| _log('YouTube-%s query: %r' % (lang or 'xx', query)) | |
| data = _fetchJSON(url) | |
| # Cache raw items (before filtering) | |
| raw_items = data.get('items', []) | |
| _yt_search_cache[cache_key] = raw_items | |
| # Filter | |
| results = [] | |
| for it in raw_items: | |
| vtitle = it['snippet']['title'] | |
| ok = _titleOkGlobal(vtitle, title, year) | |
| if ok and not _uploadYearOk(it.get('snippet', {}), year): | |
| ok = False | |
| _log(' YouTube-%s REJECT (upload year gap): %r pub=%s' % ( | |
| lang or 'xx', vtitle[:80], it.get('snippet', {}).get('publishedAt', '')[:10])) | |
| else: | |
| _log(' YouTube-%s %s: %r' % (lang or 'xx', 'PASS' if ok else 'REJECT', vtitle[:80])) | |
| if ok: | |
| results.append({'name': vtitle, 'key': it['id']['videoId']}) | |
| return results | |
| except Exception as e: | |
| _log('YouTube-%s exception: %s' % (lang or 'xx', e)) | |
| return [] | |
| # ── IMDB direct MP4 lookup ─────────────────────────────────────────────────── | |
| # Quality preference: 1080p > 720p > 480p > SD > HLS | |
| _IMDB_QUALITY_ORDER = ['DEF_1080p', 'DEF_720p', 'DEF_480p', 'DEF_SD'] | |
| _IMDB_GRAPHQL_URL = 'https://caching.graphql.imdb.com/' | |
| _IMDB_GRAPHQL_QUERY = '{"query":"query($id:ID!){title(id:$id){primaryVideos(first:1){edges{node{id name{value}playbackURLs{mimeType url videoDefinition}}}}}}","variables":{"id":"%s"}}' | |
| def _searchIMDB(imdb_id): | |
| """IMDB trailer lookup via GraphQL API (~3 KB response vs 1.5 MB title page). | |
| Returns (mp4_url, quality) on success, ('', '') on failure. | |
| Result cached with 1h TTL (CloudFront signed URLs expire in ~24h).""" | |
| import time, json | |
| global _imdb_dead | |
| if not imdb_id: | |
| return ('', '') | |
| if _imdb_dead: | |
| _log('IMDB: dead flag set, skipping') | |
| return ('', '') | |
| # Check cache | |
| cached = _imdb_cache.get(imdb_id) | |
| if cached: | |
| url, quality, expiry = cached | |
| if time.time() < expiry: | |
| _log('IMDB cache hit: %s -> %s (%s)' % (imdb_id, url[:80] if url else '', quality)) | |
| return (url, quality) | |
| else: | |
| del _imdb_cache[imdb_id] | |
| # GraphQL query for primary video + playback URLs | |
| _log('IMDB GraphQL: %s' % imdb_id) | |
| from urllib.request import Request, urlopen | |
| from urllib.error import HTTPError | |
| try: | |
| body = (_IMDB_GRAPHQL_QUERY % imdb_id).encode('utf-8') | |
| req = Request(_IMDB_GRAPHQL_URL, data=body, method='POST') | |
| req.add_header('Content-Type', 'application/json') | |
| req.add_header('Accept', 'application/json') | |
| req.add_header('User-Agent', | |
| 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' | |
| 'AppleWebKit/537.36 (KHTML, like Gecko) ' | |
| 'Chrome/120.0.0.0 Safari/537.36') | |
| resp = urlopen(req, timeout=5) | |
| data = json.loads(resp.read().decode('utf-8')) | |
| except HTTPError as e: | |
| if e.code in (403, 429): | |
| _imdb_dead = True | |
| _log('IMDB blocked: HTTP %d — skipping IMDB for rest of session' % e.code) | |
| else: | |
| _log('IMDB GraphQL HTTP %s' % e.code) | |
| return ('', '') | |
| except Exception as e: | |
| _log('IMDB GraphQL error: %s' % e) | |
| return ('', '') | |
| # Parse response: data.title.primaryVideos.edges[0].node.playbackURLs | |
| try: | |
| edges = data['data']['title']['primaryVideos']['edges'] | |
| except (KeyError, TypeError): | |
| _log('IMDB: unexpected GraphQL structure for %s' % imdb_id) | |
| _imdb_cache[imdb_id] = ('', '', time.time() + _IMDB_CACHE_TTL) | |
| return ('', '') | |
| if not edges: | |
| _log('IMDB: no trailer for %s' % imdb_id) | |
| _imdb_cache[imdb_id] = ('', '', time.time() + _IMDB_CACHE_TTL) | |
| return ('', '') | |
| node = edges[0].get('node', {}) | |
| video_name = (node.get('name') or {}).get('value', '') | |
| urls = node.get('playbackURLs', []) | |
| _log('IMDB: video=%s name=%r urls=%d' % (node.get('id', ''), video_name, len(urls))) | |
| if not urls: | |
| _imdb_cache[imdb_id] = ('', '', time.time() + _IMDB_CACHE_TTL) | |
| return ('', '') | |
| # Pick best quality MP4 | |
| best_url = '' | |
| best_quality = '' | |
| for pref in _IMDB_QUALITY_ORDER: | |
| for entry in urls: | |
| if entry.get('videoDefinition') == pref and entry.get('mimeType') == 'video/mp4': | |
| best_url = entry['url'] | |
| best_quality = pref.replace('DEF_', '') | |
| break | |
| if best_url: | |
| break | |
| # Fallback to HLS (M3U8) | |
| if not best_url: | |
| for entry in urls: | |
| if 'mpegurl' in (entry.get('mimeType') or '').lower(): | |
| best_url = entry['url'] | |
| best_quality = 'HLS' | |
| break | |
| # Fallback to any MP4 | |
| if not best_url: | |
| for entry in urls: | |
| if entry.get('mimeType') == 'video/mp4': | |
| best_url = entry['url'] | |
| best_quality = (entry.get('videoDefinition') or '').replace('DEF_', '') or '?' | |
| break | |
| _log('IMDB result: quality=%s url=%s' % (best_quality, best_url[:80] if best_url else '')) | |
| _imdb_cache[imdb_id] = (best_url, best_quality, time.time() + _IMDB_CACHE_TTL) | |
| return (best_url, best_quality) | |
| # ── Notification + playback ─────────────────────────────────────────────────── | |
| def _notify(search_title, step, source, vtype, lang, poster): | |
| """3-second notification popup (upper-right). | |
| Heading: search title used (DE or EN). | |
| Message: source - type [lang] e.g. 'TMDB - Trailer [DE]' | |
| If lang is empty (e.g. IMDB): 'IMDB - Trailer' | |
| """ | |
| try: | |
| import xbmcgui | |
| icon = poster if poster else xbmcgui.NOTIFICATION_INFO | |
| msg = '%s - %s [%s]' % (source, vtype, lang) if lang else '%s - %s' % (source, vtype) | |
| xbmcgui.Dialog().notification( | |
| search_title, | |
| msg, | |
| icon, | |
| 3000, | |
| False, | |
| ) | |
| except Exception: | |
| pass | |
| def _play(video_id, step, source, vtype, lang, poster, search_title): | |
| """Show source/language popup then play via SmartTube (if installed) or YouTube addon.""" | |
| import xbmc | |
| _log('PLAY video_id=%s step=%d source=%s vtype=%s lang=%s title=%r' | |
| % (video_id, step, source, vtype, lang, search_title)) | |
| _notify(search_title, step, source, vtype, lang, poster) | |
| pkg = _getSmartTubePackage() | |
| if pkg: | |
| xbmc.sleep(2000) # let notification show before SmartTube covers Kodi UI | |
| _log('PLAY via SmartTube (%s)' % pkg) | |
| xbmc.executebuiltin( | |
| 'StartAndroidActivity(%s,android.intent.action.VIEW,,' | |
| 'https://www.youtube.com/watch?v=%s)' % (pkg, video_id) | |
| ) | |
| else: | |
| _log('PLAY via YouTube addon') | |
| xbmc.executebuiltin( | |
| 'PlayMedia(plugin://plugin.video.youtube/play/?video_id=%s)' % video_id | |
| ) | |
| class _TrailerPlayer(object): | |
| """Kodi player wrapper with callbacks for immediate stop/end detection.""" | |
| def __init__(self): | |
| import xbmc as _xbmc | |
| class _P(_xbmc.Player): | |
| def __init__(s): super().__init__(); s.done = False | |
| def onPlayBackStopped(s): s.done = True | |
| def onPlayBackEnded(s): s.done = True | |
| def onPlayBackError(s): s.done = True | |
| self._p = _P() | |
| self._mon = _xbmc.Monitor() | |
| self._xbmc = _xbmc | |
| def play(self, url): self._p.play(url) | |
| def stop(self): self._p.stop() | |
| @property | |
| def done(self): return self._p.done | |
| def wait(self, secs): return self._mon.waitForAbort(secs) | |
| @property | |
| def aborted(self): return self._mon.abortRequested() | |
| def fullscreen(self): | |
| return self._xbmc.getCondVisibility('Window.IsVisible(fullscreenvideo)') | |
| def _playDirect(url, step, source, vtype, lang, poster, search_title): | |
| """Show source popup then play a direct MP4/M3U8 URL via Kodi's native player. | |
| Monitors fullscreen — stops playback when user presses back.""" | |
| _log('PLAY-DIRECT url=%s step=%d source=%s vtype=%s title=%r' | |
| % (url[:80], step, source, vtype, search_title)) | |
| _notify(search_title, step, source, vtype, lang, poster) | |
| tp = _TrailerPlayer() | |
| tp.play(url) | |
| # Wait for fullscreen to appear — exit early if playback fails | |
| fs_seen = False | |
| while not tp.aborted and not tp.done: | |
| if tp.fullscreen(): | |
| fs_seen = True | |
| break | |
| tp.wait(0.1) | |
| if not fs_seen: | |
| _log('PLAY-DIRECT: playback ended before fullscreen') | |
| return | |
| # Monitor: stop when user leaves fullscreen (back = stop for trailers) | |
| while not tp.aborted and not tp.done: | |
| if not tp.fullscreen(): | |
| tp.stop() | |
| _log('PLAY-DIRECT stopped (user left fullscreen)') | |
| break | |
| tp.wait(0.3) | |
| # ── One-time guidance popups (v7) ───────────────────────────────────────────── | |
| def _showHintIfNeeded(has_yt_player, has_own_key, found_german, played_imdb): | |
| """Show guidance popup after trailer plays (or at give-up). Once per Kodi session. | |
| Popup 1: no player installed, IMDB played → suggest SmartTube / YT addon. | |
| Popup 2: has player but no own key, no German found → suggest own API key. | |
| Returns True if a popup was shown.""" | |
| try: | |
| import xbmc, xbmcgui | |
| win = xbmcgui.Window(10000) | |
| if not has_yt_player and played_imdb: | |
| # Popup 1: IMDB worked but no player for KinoCheck/TMDB/YouTube | |
| if not win.getProperty('xship.trailer.hint.player'): | |
| xbmc.sleep(2000) | |
| is_android = xbmc.getCondVisibility('System.Platform.Android') | |
| if is_android: | |
| msg = ('Tipp: Für deutsche Trailer (KinoCheck/TMDB) SmartTube installieren.\n' | |
| 'Für YouTube-Suche zusätzlich: YouTube Add-on mit eigenem API-Key einrichten.') | |
| else: | |
| msg = ('Tipp: YouTube Add-on mit eigenem API-Key installieren für ' | |
| 'deutsche Trailer (KinoCheck/TMDB) und YouTube-Suche.') | |
| xbmcgui.Dialog().ok('Trailer', msg) | |
| win.setProperty('xship.trailer.hint.player', '1') | |
| _log('hint: showed player popup') | |
| return True | |
| elif has_yt_player and not has_own_key and not found_german: | |
| # Popup 2: has player but no own key, no German trailer found | |
| if not win.getProperty('xship.trailer.hint.apikey'): | |
| xbmc.sleep(2000) | |
| msg = ('Kein deutscher Trailer bei KinoCheck/TMDB/IMDB gefunden.\n' | |
| 'YouTube Add-on mit eigenem API-Key einrichten für zusätzliche YouTube-Trailersuche.') | |
| xbmcgui.Dialog().ok('Trailer', msg) | |
| win.setProperty('xship.trailer.hint.apikey', '1') | |
| _log('hint: showed apikey popup') | |
| return True | |
| except Exception as e: | |
| _log('hint popup error: %s' % e) | |
| return False | |
| # ── Main entry point ────────────────────────────────────────────────────────── | |
| def playTrailer(tmdb_id, mediatype='movie', title='', year='', poster=''): | |
| """Trailer waterfall for xShip — search phase + play phase. | |
| Args: | |
| tmdb_id: TMDB numeric ID (string) | |
| mediatype: 'movie' or 'tv' | |
| title: display title in German (for YouTube fallback searches) | |
| year: release year string (for YouTube fallback searches) | |
| poster: poster image URL (shown as notification icon) | |
| """ | |
| import xbmc, xbmcgui | |
| from resources.lib.tmdb import cTMDB | |
| url_type = 'movie' if mediatype == 'movie' else 'tv' | |
| title_key = 'title' if mediatype == 'movie' else 'name' | |
| tmdb_de = cTMDB() | |
| tmdb_en = cTMDB(lang='en') | |
| _log('START tmdb_id=%s title=%r year=%s mediatype=%s' % (tmdb_id, title, year, mediatype)) | |
| # ── Capability detection (no early exit — IMDB works without YT player) ── | |
| smarttube = _getSmartTubePackage() | |
| has_yt_addon = xbmc.getCondVisibility('System.HasAddon(plugin.video.youtube)') | |
| has_yt_player = bool(smarttube or has_yt_addon) | |
| has_own_key = bool(_getUserKey()) | |
| skip_api = bool(smarttube) # SmartTube handles age-gates, no videos.list needed | |
| _vf = _api_checksum # verification fallback | |
| _log('Player: %s | YT addon: %s | has_yt_player: %s | has_own_key: %s | skip_api: %s' % ( | |
| smarttube if smarttube else 'none', has_yt_addon, has_yt_player, has_own_key, skip_api)) | |
| # ── Pre-flight: offer to enable ISA if off (once per session) ──── | |
| # Only when YouTube addon is the player (not SmartTube) | |
| if not smarttube and has_yt_addon: | |
| _ISA_WARNED = 'xship.trailer.isa_warned' | |
| try: | |
| import xbmcaddon | |
| from resources.lib.control import window | |
| yt = xbmcaddon.Addon('plugin.video.youtube') | |
| if yt.getSetting('kodion.video.quality.isa') != 'true': | |
| if not window.getProperty(_ISA_WARNED): | |
| window.setProperty(_ISA_WARNED, '1') | |
| if xbmcgui.Dialog().yesno( | |
| 'Trailer', | |
| '"InputStream Adaptive" im YouTube Add-on ist aus.\n' | |
| 'Trailer-Wiedergabe kann fehlschlagen. Aktivieren?'): | |
| yt.setSetting('kodion.video.quality.isa', 'true') | |
| _log('ISA enabled via pre-flight check') | |
| except Exception: | |
| pass | |
| # ── Fetch English title + IMDB ID up front ─────────────────────── | |
| en_data = None | |
| try: | |
| if url_type == 'tv': | |
| en_data = tmdb_en.getUrl('%s/%s' % (url_type, tmdb_id), term='append_to_response=external_ids') | |
| else: | |
| en_data = tmdb_en.getUrl('%s/%s' % (url_type, tmdb_id)) | |
| en_title = (en_data or {}).get(title_key, '') or title | |
| except Exception: | |
| en_title = title | |
| # Extract IMDB ID (movies: top-level; TV: external_ids sub-object) | |
| imdb_id = (en_data or {}).get('imdb_id', '') | |
| if not imdb_id and url_type == 'tv': | |
| imdb_id = (en_data or {}).get('external_ids', {}).get('imdb_id', '') or '' | |
| _log('EN title: %r (DE title: %r) imdb_id: %s' % (en_title, title, imdb_id)) | |
| # ── Steps 1-3: YouTube-based sources (skip if no YT player) ────── | |
| if has_yt_player: | |
| # ── Step 1: KinoCheck API (exact TMDB ID, free, no YT quota) ───── | |
| _log('--- Step 1: KinoCheck API ---') | |
| kc_api_hits, kc_api_ok = _searchKinoCheckAPI(tmdb_id, mediatype) | |
| _log('Step1 KinoCheck-API: hits=%d api_ok=%s' % (len(kc_api_hits), kc_api_ok)) | |
| if kc_api_hits: | |
| # Red Band trailers may be age-restricted — prefer non-Red-Band on YT addon | |
| if not skip_api: | |
| non_rb = [h for h in kc_api_hits if 'red band' not in h.get('name', '').lower()] | |
| if non_rb: | |
| kc_api_hits = non_rb | |
| else: | |
| # Only Red Band results — age-check before playing on YT addon | |
| _log('Step1 KinoCheck-API: only Red Band, running age-check') | |
| kc_api_hits = _filterAgeRestricted(kc_api_hits, skip_api=False, api_key=_vf) | |
| else: | |
| # SmartTube — still verify video exists (free oEmbed check) | |
| kc_api_hits = _filterExistence(kc_api_hits) | |
| if kc_api_hits: | |
| _play(kc_api_hits[0]['key'], 1, 'KinoCheck', 'Trailer', 'DE', poster, title) | |
| _showHintIfNeeded(has_yt_player, has_own_key, True, False) | |
| return | |
| _log('Step1 KinoCheck-API: all results unavailable, continuing waterfall') | |
| # ── Step 1b: KinoCheck YT channel (API down + own key for search.list) ─ | |
| if not kc_api_ok and has_own_key: | |
| _log('--- Step 1b: KinoCheck YT fallback (API was down) ---') | |
| kc_raw = _searchKinoCheck(title, year) | |
| kc_hit = _filterByDuration(kc_raw, skip_api=skip_api, api_key=_vf) | |
| _log('Step1b KinoCheck-YT: raw=%d filtered=%d' % (len(kc_raw), len(kc_hit))) | |
| if kc_hit: | |
| _play(kc_hit[0]['key'], 1, 'KinoCheck', 'Trailer', 'DE', poster, title) | |
| _showHintIfNeeded(has_yt_player, has_own_key, True, False) | |
| return | |
| # ── Step 2: TMDB videos (German) ────────────────────────────────── | |
| _log('--- Step 2: TMDB-DE videos ---') | |
| tmdb_de_raw = tmdb_de.getUrl('%s/%s/videos' % (url_type, tmdb_id)) | |
| videos = _filterAgeRestricted(_tmdbVideos(tmdb_de_raw, lang='de'), skip_api=skip_api, api_key=_vf) | |
| _log('Step2 TMDB-DE: raw=%d filtered=%d' % (len((tmdb_de_raw or {}).get('results', [])), len(videos))) | |
| if videos: | |
| # TMDB iso_639_1='de' only means German metadata tag — video may be English. | |
| # If DE title doesn't appear in the video name, treat it as English. | |
| vname = (videos[0].get('name') or '').lower() | |
| _norm = lambda s: re.sub(r"['\u2019\-]", '', s.lower()) | |
| if _norm(title) in _norm(vname): | |
| step2_title, step2_lang = title, 'DE' | |
| else: | |
| step2_title, step2_lang = en_title, 'EN' | |
| _log('Step2 lang-detect: vname=%r -> %s title=%r' % (vname[:60], step2_lang, step2_title)) | |
| _play(videos[0]['key'], 2, 'TMDB', videos[0].get('type', 'Trailer'), step2_lang, poster, step2_title) | |
| _showHintIfNeeded(has_yt_player, has_own_key, step2_lang == 'DE', False) | |
| return | |
| # ── Step 3: TMDB videos (English) ───────────────────────────────── | |
| _log('--- Step 3: TMDB-EN videos ---') | |
| tmdb_en_raw = tmdb_en.getUrl('%s/%s/videos' % (url_type, tmdb_id)) | |
| videos = _filterAgeRestricted(_tmdbVideos(tmdb_en_raw, lang='en'), skip_api=skip_api, api_key=_vf) | |
| _log('Step3 TMDB-EN: raw=%d filtered=%d' % (len((tmdb_en_raw or {}).get('results', [])), len(videos))) | |
| if videos: | |
| _play(videos[0]['key'], 3, 'TMDB', videos[0].get('type', 'Trailer'), 'EN', poster, en_title) | |
| _showHintIfNeeded(has_yt_player, has_own_key, False, False) | |
| return | |
| else: | |
| tmdb_en_raw = None # not fetched — no YT player to play TMDB results | |
| # ── Step 3b: IMDB (direct MP4, no player needed) ───────────────── | |
| if imdb_id and not _imdb_dead: | |
| _log('--- Step 3b: IMDB ---') | |
| imdb_url, imdb_quality = _searchIMDB(imdb_id) | |
| _log('Step3b IMDB: url=%s quality=%s' % (imdb_url[:80] if imdb_url else '', imdb_quality)) | |
| if imdb_url: | |
| _playDirect(imdb_url, 3, 'IMDB', 'Trailer', '', poster, en_title or title) | |
| _showHintIfNeeded(has_yt_player, has_own_key, False, True) | |
| return | |
| # ── Steps 4-5b: YouTube search + TMDB-ANY ──────────────────────── | |
| if has_yt_player: | |
| # ── Steps 4-5: YouTube search (needs own API key for search.list) ─ | |
| if has_own_key: | |
| user_key = _getUserKey() | |
| # ── Step 4: YouTube search (German) ─────────────────────────── | |
| _log('--- Step 4: YouTube-DE ---') | |
| yt_de_raw = _searchYouTube(title, year, lang='de') | |
| yt_de_hit = _filterByDuration(yt_de_raw, skip_api=skip_api, api_key=user_key) | |
| _log('Step4 YouTube-DE: raw=%d filtered=%d' % (len(yt_de_raw), len(yt_de_hit))) | |
| if yt_de_hit and _oembedSanityCheck(yt_de_hit[0]['key'], title, year): | |
| _play(yt_de_hit[0]['key'], 4, 'YouTube', 'Trailer', 'DE', poster, title) | |
| _showHintIfNeeded(has_yt_player, has_own_key, True, False) | |
| return | |
| # ── Step 5: YouTube search (English TMDB title) ─────────────── | |
| _log('--- Step 5: YouTube-EN ---') | |
| yt_en_raw = _searchYouTube(en_title, year, lang='en') | |
| yt_en_hit = _filterByDuration(yt_en_raw, skip_api=skip_api, api_key=user_key) | |
| _log('Step5 YouTube-EN title=%r raw=%d filtered=%d' % (en_title, len(yt_en_raw), len(yt_en_hit))) | |
| if yt_en_hit and _oembedSanityCheck(yt_en_hit[0]['key'], en_title, year): | |
| _play(yt_en_hit[0]['key'], 5, 'YouTube', 'Trailer', 'EN', poster, en_title) | |
| _showHintIfNeeded(has_yt_player, has_own_key, False, False) | |
| return | |
| # ── Step 5b: TMDB videos (any language — catches ES, KO, ZH, JA, etc.) ─ | |
| _log('--- Step 5b: TMDB-ANY videos ---') | |
| # Reuse tmdb_en_raw (EN endpoint returns all videos, we just filtered for EN before) | |
| if tmdb_en_raw: | |
| videos = _filterAgeRestricted(_tmdbVideos(tmdb_en_raw), skip_api=skip_api, api_key=_vf) | |
| # Exclude DE/EN videos we already tried | |
| videos = [v for v in videos if v.get('iso_639_1') not in ('de', 'en')] | |
| else: | |
| videos = [] | |
| _log('Step5b TMDB-ANY: filtered=%d' % len(videos)) | |
| if videos: | |
| vlang = (videos[0].get('iso_639_1') or '??').upper() | |
| _play(videos[0]['key'], 5, 'TMDB', videos[0].get('type', 'Trailer'), vlang, poster, en_title) | |
| _showHintIfNeeded(has_yt_player, has_own_key, False, False) | |
| return | |
| # ── Step 6: Give up ─────────────────────────────────────────────── | |
| _log('Step6 give up — has_yt_player=%s has_own_key=%s' % (has_yt_player, has_own_key)) | |
| hint_shown = _showHintIfNeeded(has_yt_player, has_own_key, False, False) | |
| if not hint_shown: | |
| if not has_yt_player: | |
| is_android = xbmc.getCondVisibility('System.Platform.Android') | |
| if is_android: | |
| msg = 'Kein Trailer gefunden.\nSmartTube oder YouTube Add-on installieren fuer mehr Quellen.' | |
| else: | |
| msg = 'Kein Trailer gefunden.\nYouTube Add-on installieren fuer mehr Quellen.' | |
| xbmcgui.Dialog().ok('Trailer', msg) | |
| else: | |
| xbmcgui.Dialog().notification( | |
| 'Trailer', 'Kein Trailer gefunden', | |
| xbmcgui.NOTIFICATION_WARNING, 3000, | |
| ) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #2021-07-15 | |
| # edit 2025-08-02 switch from treads to concurrent.futures | |
| import sys | |
| import datetime, time, json | |
| from resources.lib.tmdb import cTMDB | |
| from concurrent.futures import ThreadPoolExecutor | |
| from resources.lib.indexers import navigator | |
| from resources.lib import searchDB, playcountDB, art, control | |
| from resources.lib.control import getKodiVersion, iteritems | |
| if int(getKodiVersion()) >= 20: from infotagger.listitem import ListItemInfoTag | |
| _params = dict(control.parse_qsl(sys.argv[2].replace('?',''))) if len(sys.argv) > 1 else dict() | |
| class tvshows: | |
| def __init__(self): | |
| self.list = [] | |
| self.meta = [] | |
| self.total_pages = 0 | |
| self.next_pages = 0 | |
| self.query = '' | |
| self.activeSearchDB = 'TMDB' | |
| #self.setSearchDB() # TODO different search providers | |
| self.playcount = 0 | |
| def get(self, params): | |
| try: | |
| self.next_pages = int(params.get('page')) + 1 | |
| self.query = params.get('query') | |
| self.list, self.total_pages = cTMDB().search_term('tvshow', params.get('query'), params.get('page')) | |
| if self.list == None or len(self.list) == 0: # nichts gefunden | |
| return control.infoDialog("Nichts gefunden1", time=2000) | |
| self.getDirectory(params) | |
| searchDB.save_query(params.get('query'), params.get('action')) | |
| except: | |
| return | |
| def getDirectory(self, params): | |
| try: | |
| if params.get('next_pages'): self.next_pages = params.get('next_pages') | |
| if params.get('total_pages'): self.total_pages = params.get('total_pages') | |
| if params.get('list'): self.list = params.get('list') | |
| self.worker() | |
| if self.list == None or len(self.list) == 0: #nichts gefunden | |
| return control.infoDialog("Nichts gefunden", time=2000) | |
| self.Directory(self.list) | |
| return self.list | |
| except: | |
| return | |
| def search(self): | |
| # TODO different search providers | |
| #navigator.navigator().addDirectoryItem("DB für Suche auswählen", 'tvChangeSearchDB', self.activeSearchDB + '.png', 'DefaultTVShows.png', isFolder=False) | |
| navigator.navigator().addDirectoryItem("[B]Serien - neue Suche %s[/B]" % self.activeSearchDB, 'searchNew&table=tvshows', self.activeSearchDB + '_search.png', 'DefaultAddonsSearch.png', | |
| isFolder=False, context=('Einstellungen', 'addonSettings')) | |
| match = searchDB.getSearchTerms('tvshows') | |
| lst = [] | |
| delete_option = False | |
| for index, i in enumerate(match): | |
| term = control.py2_encode(i['query']) | |
| if term not in lst: | |
| delete_option = True | |
| navigator.navigator().addDirectoryItem(term, 'tvshows&page=1&query=%s' % control.quote_plus(term), '_search.png', | |
| 'DefaultAddonsSearch.png', isFolder=True, | |
| context=("Suchanfrage löschen", 'searchDelTerm&table=tvshows&name=%s' % index)) | |
| lst += [(term)] | |
| if delete_option: | |
| navigator.navigator().addDirectoryItem("[B]Suchverlauf löschen[/B]", 'searchClear&table=tvshows', 'tools.png', 'DefaultAddonProgram.png', isFolder=False) | |
| navigator.navigator()._endDirectory('', False) # addons videos files | |
| # TODO different search providers | |
| # def setSearchDB(self, new=''): | |
| # if control.getSetting('active.SearchDB.tvshow'): | |
| # _searchDB = control.getSetting('active.SearchDB.tvshow') | |
| # if new != '': | |
| # control.setSetting('active.SearchDB.tvshow', new) | |
| # _searchDB = new | |
| # self.activeSearchDB = _searchDB | |
| # else: | |
| # control.setSetting('active.SearchDB.tvshow', 'tmdb') | |
| # self.activeSearchDB = 'tmdb' | |
| # | |
| # def changeSearchDB(self): | |
| # active = control.getSetting('active.SearchDB.tvshow') | |
| # data = [] | |
| # for i in ['tmdb', 'trakt']: | |
| # if i == active: continue | |
| # data.append('wechseln zu ' + i.upper()) | |
| # index = control.dialog.contextmenu(data) | |
| # if index == -1: | |
| # return | |
| # term = data[index].lower().split()[-1] | |
| # self.setSearchDB(term) | |
| # url = '%s?action=tvSearch' % sys.argv[0] | |
| # control.execute('Container.Update(%s)' % url) | |
| def Directory(self, items): | |
| if items is None or len(items) == 0: | |
| control.idle() | |
| sys.exit() | |
| sysaddon = sys.argv[0] | |
| syshandle = int(sys.argv[1]) | |
| addonPoster, addonBanner = control.addonPoster(), control.addonBanner() | |
| addonFanart, settingFanart = control.addonFanart(), control.getSetting('fanart') | |
| watchedMenu = "In %s [I]Gesehen[/I]" % control.addonName | |
| unwatchedMenu = "In %s [I]Ungesehen[/I]" % control.addonName | |
| hasYouTube = False | |
| if control.condVisibility('System.HasAddon(plugin.video.youtube)'): | |
| try: | |
| import xbmcvfs, json as _json | |
| _f = xbmcvfs.File('special://profile/addon_data/plugin.video.youtube/api_keys.json') | |
| _data = _json.loads(_f.read()) | |
| _f.close() | |
| hasYouTube = bool(_data.get('keys', {}).get('user', {}).get('api_key')) | |
| except Exception: | |
| pass | |
| for i in items: | |
| try: | |
| meta = dict((k, v) for k, v in iteritems(i)) | |
| title = i['title'] if 'title' in i else i['originaltitle'] | |
| if not title.isascii(): continue | |
| try: | |
| label = '%s (%s)' % (title, i['year']) # show in list | |
| except: | |
| label = title | |
| if 'premiered' in i: | |
| if datetime.datetime(*(time.strptime(i['premiered'], "%Y-%m-%d")[0:6])) > datetime.datetime.now(): | |
| label = '[COLOR=red][I]{}[/I][/COLOR]'.format(label) # ffcc0000 | |
| else: | |
| label = '[COLOR=red][I]{}[/I][/COLOR]'.format(label) | |
| poster = i['poster'] if 'poster' in i and 'http' in i['poster'] else addonPoster | |
| fanart = i['fanart'] if 'fanart' in i and 'http' in i['fanart'] else addonFanart | |
| meta.update({'poster': poster}) | |
| meta.update({'fanart': fanart}) | |
| sysmeta = dict((k, v) for k, v in iteritems(meta)) | |
| systitle = sysname = title | |
| sysmeta.update({'systitle': systitle}) | |
| sysmeta.update({'sysname': sysname}) | |
| _sysmeta = control.quote_plus(json.dumps(sysmeta)) | |
| item = control.item(label=label, offscreen=True) | |
| item.setArt({'poster': poster, 'banner': addonBanner}) | |
| if settingFanart == 'true': item.setProperty('Fanart_Image', fanart) | |
| cm = [] | |
| try: | |
| playcount = i['playcount'] if sysmeta['playcount'] == 0 else 1 | |
| if playcount == 1: | |
| cm.append((unwatchedMenu, 'RunPlugin(%s?action=UpdatePlayCount&meta=%s&playCount=0)' % (sysaddon, _sysmeta))) | |
| meta.update({'playcount': 1, 'overlay': 7}) | |
| sysmeta.update({'playcount': 1, 'overlay': 7}) | |
| else: | |
| cm.append((watchedMenu, 'RunPlugin(%s?action=UpdatePlayCount&meta=%s&playCount=1)' % (sysaddon, _sysmeta))) | |
| meta.update({'playcount': 0, 'overlay': 6}) | |
| sysmeta.update({'playcount': 0, 'overlay': 6}) | |
| except: | |
| pass | |
| if hasYouTube: | |
| cm.append(('Trailer ansehen', 'RunPlugin(%s?action=playTrailer&tmdb_id=%s&mediatype=tv)' % (sysaddon, sysmeta['tmdb_id']))) | |
| cm.append(('Einstellungen', 'RunPlugin(%s?action=addonSettings)' % sysaddon)) | |
| item.addContextMenuItems(cm) | |
| sysmeta = control.quote_plus(json.dumps(sysmeta)) | |
| url = '%s?action=seasons&sysmeta=%s' % (sysaddon, sysmeta) | |
| if 'plot' in i: plot = i['plot'] | |
| else: plot = '' | |
| votes = '' | |
| if 'rating' in i and i['rating'] != '': | |
| if 'votes' in i: votes = '(%s)' % str(i['votes']).replace(',', '') | |
| plot = '[COLOR blue]Bewertung : %.1f %s[/COLOR]%s%s' % (float(i['rating']), votes, "\n\n", plot) | |
| meta.update({'plot': plot}) | |
| aActors = [] | |
| if 'cast' in i and i['cast']: aActors = i['cast'] | |
| ## supported infolabels: https://codedocs.xyz/AlwinEsch/kodi/group__python__xbmcgui__listitem.html#ga0b71166869bda87ad744942888fb5f14 | |
| # remove unsupported infolabels | |
| meta.pop('cast', None) # ersetzt durch item.setCast(i['cast']) | |
| meta.pop('fanart', None) | |
| meta.pop('poster', None) | |
| meta.pop('imdb_id', None) | |
| meta.pop('tvdb_id', None) | |
| meta.pop('tmdb_id', None) | |
| meta.pop('number_of_seasons', None) | |
| meta.pop('originallanguage', None) | |
| meta.pop('budget', None) | |
| meta.pop('revenue', None) | |
| meta.pop('aliases', None) | |
| meta.pop('backdrop_url', None) | |
| meta.pop('cover_url', None) | |
| # gefakte Video/Audio Infos | |
| # video_streaminfo = {'codec': 'h264', "width": 1920, "height": 1080} | |
| # audio_streaminfo = {'codec': 'dts', 'channels': 6, 'language': 'de'} | |
| video_streaminfo = {} | |
| audio_streaminfo = {} | |
| if int(getKodiVersion()) <= 19: | |
| if aActors: item.setCast(aActors) | |
| item.setInfo(type='Video', infoLabels=meta) | |
| item.addStreamInfo('video', video_streaminfo) | |
| item.addStreamInfo('audio', audio_streaminfo) | |
| else: | |
| info_tag = ListItemInfoTag(item, 'video') | |
| info_tag.set_info(meta) | |
| stream_details = { | |
| 'video': [video_streaminfo], | |
| 'audio': [audio_streaminfo]} | |
| info_tag.set_stream_details(stream_details) | |
| info_tag.set_cast(aActors) | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True) | |
| except Exception as e: | |
| print(e) | |
| pass | |
| # nächste Seite | |
| try: | |
| self.next_pages = self.next_pages + 1 | |
| if self.next_pages <= self.total_pages: | |
| if self.query: | |
| url = '%s?action=tvshows&url=&page=%s&query=%s' % (sys.argv[0], self.next_pages, self.query ) | |
| else: | |
| url = '%s?action=listings' % sys.argv[0] | |
| url += '&media_type=%s' % _params.get('media_type') | |
| url += '&next_pages=%s' % self.next_pages | |
| url += '&url=%s' % control.quote_plus(_params.get('url')) | |
| item = control.item(label="Nächste Seite") | |
| icon = control.addonNext() | |
| item.setArt({'icon': icon, 'thumb': icon, 'poster': icon, 'banner': icon}) | |
| if not addonFanart is None: item.setProperty('Fanart_Image', addonFanart) | |
| # -> gesehen/ungesehen im cm und "Keine Informationen verfügbar" ausblenden (abhängig von control.content() ) | |
| video_streaminfo = {'overlay': 4, 'plot': ' '} # alt255 | |
| if int(getKodiVersion()) <= 19: | |
| item.setInfo('video', video_streaminfo) | |
| else: | |
| stream_details = {'video': [video_streaminfo]} | |
| info_tag = ListItemInfoTag(item, 'video') | |
| info_tag.set_stream_details(stream_details) | |
| control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True) | |
| except: | |
| pass | |
| control.content(syshandle, 'tvshows') # movies tvshows | |
| #TODO | |
| control.plugincategory(syshandle, control.addonVersion) | |
| control.endofdirectory(syshandle, cacheToDisc=True) # False -> wegen Playcount | |
| def worker(self): | |
| self.meta = [] | |
| with ThreadPoolExecutor() as executor: | |
| executor.map(self.super_meta, self.list) | |
| self.meta = sorted(self.meta, key=lambda k: k['title']) | |
| self.list = [i for i in self.meta] # falls noch eine Filterfunktion kommt | |
| self.list = [i for i in self.list if not i['plot'].strip() == '' and not i['poster'] == control.addonPoster()] # - Filter | |
| def super_meta(self, id): | |
| try: | |
| meta = cTMDB().get_meta('tvshow', '', '', id, advanced='true') | |
| if not 'poster' in meta or meta['poster'] == '': | |
| if meta['tvdb_id']: | |
| poster = art.getTvShows_art(meta['tmdb_id'], meta['tvdb_id']) | |
| meta.update({'poster': poster}) | |
| try: | |
| playcount = playcountDB.getPlaycount('tvshow', 'title', meta['title'], None, None) | |
| playcount = playcount if playcount else 0 | |
| overlay = 7 if playcount > 0 else 6 | |
| meta.update({'playcount': playcount, 'overlay': overlay}) | |
| except: | |
| pass | |
| self.meta.append(meta) | |
| return meta | |
| except: | |
| pass |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment