Changeset 42560


Ignore:
Timestamp:
Jul 27, 2018, 9:47:40 PM (14 months ago)
Author:
obi
Message:

tithek hoster fix streammago streamcherry

Location:
titan/mediathek/localhoster
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • titan/mediathek/localhoster/lib/common.py

    r41172 r42560  
    1717"""
    1818import os
     19from net import Net, get_ua  # @UnusedImport
    1920#from lib import log_utils  # @UnusedImport
    2021#from lib.net import Net  # @UnusedImport
     
    3435
    3536
    36 
     37RAND_UA = get_ua()
    3738IE_USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko'
    3839#FF_USER_AGENT = 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.7.3000 Chrome/30.0.1599.101 Safari/537.36'
  • titan/mediathek/localhoster/lib/net.py

    r39400 r42560  
    1616    along with this program.  If not, see <http://www.gnu.org/licenses/>.
    1717'''
    18 
     18import random
    1919import cookielib
    2020import gzip
     
    2424import urllib2
    2525import socket
    26 from urlparse import urlparse
    27 from urlparse import urlunparse
    2826import time
    29 
    30 class HeadRequest(urllib2.Request):
    31     '''A Request class that sends HEAD requests'''
    32     def get_method(self):
    33         return 'HEAD'
     27#import kodi
     28
     29# Set Global timeout - Useful for slow connections and Putlocker.
     30socket.setdefaulttimeout(10)
     31
     32BR_VERS = [
     33    ['%s.0' % i for i in xrange(18, 50)],
     34    ['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111', '39.0.2171.71', '39.0.2171.95', '39.0.2171.99', '40.0.2214.93', '40.0.2214.111',
     35     '40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124', '44.0.2403.155', '44.0.2403.157', '45.0.2454.101', '45.0.2454.85', '46.0.2490.71',
     36     '46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80', '48.0.2564.116', '49.0.2623.112', '50.0.2661.86'],
     37    ['11.0'],
     38    ['8.0', '9.0', '10.0', '10.6']]
     39WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1', 'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
     40FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
     41RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
     42            'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
     43            'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko',
     44            'Mozilla/5.0 (compatible; MSIE {br_ver}; {win_ver}{feature}; Trident/6.0)']
     45def get_ua():
     46#    try: last_gen = int(kodi.get_setting('last_ua_create'))
     47    try: last_gen = 0
     48    except: last_gen = 0
     49#    if not kodi.get_setting('current_ua') or last_gen < (time.time() - (7 * 24 * 60 * 60)):
     50#    if not last_gen < (time.time() - (7 * 24 * 60 * 60)):
     51    index = random.randrange(len(RAND_UAS))
     52    versions = {'win_ver': random.choice(WIN_VERS), 'feature': random.choice(FEATURES), 'br_ver': random.choice(BR_VERS[index])}
     53    user_agent = RAND_UAS[index].format(**versions)
     54        # logger.log('Creating New User Agent: %s' % (user_agent), log_utils.LOGDEBUG)
     55#        kodi.set_setting('current_ua', user_agent)
     56#        kodi.set_setting('last_ua_create', str(int(time.time())))
     57#    else:
     58#        user_agent = kodi.get_setting('current_ua')
     59    return user_agent
    3460
    3561class Net:
    3662    '''
    3763    This class wraps :mod:`urllib2` and provides an easy way to make http
    38     requests while taking care of cookies, proxies, gzip compression and 
     64    requests while taking care of cookies, proxies, gzip compression and
    3965    character encoding.
    40    
     66
    4167    Example::
    42    
     68
    4369        from addon.common.net import Net
    4470        net = Net()
     
    4672        print response.content
    4773    '''
    48     IE_USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko'
    49     FF_USER_AGENT = 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'
    50     IOS_USER_AGENT = 'Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25'
    51     ANDROID_USER_AGENT = 'Mozilla/5.0 (Linux; Android 4.4.2; Nexus 4 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.114 Mobile Safari/537.36'
    52 
    53     _cj = cookielib.MozillaCookieJar()
    54 
     74
     75    _cj = cookielib.LWPCookieJar()
    5576    _proxy = None
    56     _user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36'
    57     _accept = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
     77    _user_agent = 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'
    5878    _http_debug = False
    59     _socket_timeout = 60
    60 
    61     def __init__(self, cookie_file='', proxy='', user_agent='',
    62                  http_debug=False, accept=_accept, socket_timeout=_socket_timeout, cloudflare=False):
     79
     80    def __init__(self, cookie_file='', proxy='', user_agent='', http_debug=False):
    6381        '''
    6482        Kwargs:
    6583            cookie_file (str): Full path to a file to be used to load and save
    6684            cookies to.
    67            
    68             proxy (str): Proxy setting (eg. 
     85
     86            proxy (str): Proxy setting (eg.
    6987            ``'http://user:pass@example.com:1234'``)
    70            
    71             user_agent (str): String to use as the User Agent header. If not 
     88
     89            user_agent (str): String to use as the User Agent header. If not
    7290            supplied the class will use a default user agent (chrome)
    73            
     91
    7492            http_debug (bool): Set ``True`` to have HTTP header info written to
    7593            the XBMC log for all requests.
    76            
    77             accept (str) : String to use as HTTP Request Accept header.
    78            
    79             socket_timeout (int): time in seconds for socket connections to wait until time out
    80 
    81             cloudflare (bool): Set ``True`` to check all requests that raise HTTPError 503 for Cloudflare challenge and solve
    82             This can be changed per request as well, see http_GET, http_PUSH
    83         '''
    84    
    85         #Set socket timeout - Useful for slow connections
    86         socket.setdefaulttimeout(socket_timeout)
    87 
    88         # empty jar for each instance rather than scope of the import
    89         self._cloudflare_jar = cookielib.MozillaCookieJar()
    90 
    91         self.cloudflare = cloudflare
     94        '''
    9295        if cookie_file:
    9396            self.set_cookies(cookie_file)
     
    98101        self._http_debug = http_debug
    99102        self._update_opener()
    100        
    101    
     103
    102104    def set_cookies(self, cookie_file):
    103105        '''
    104106        Set the cookie file and try to load cookies from it if it exists.
    105        
     107
    106108        Args:
    107109            cookie_file (str): Full path to a file to be used to load and save
     
    114116        except:
    115117            return False
    116        
    117    
    118     def get_cookies(self):
     118
     119    def get_cookies(self, as_dict=False):
    119120        '''Returns A dictionary containing all cookie information by domain.'''
    120         return self._cj._cookies
    121 
     121        if as_dict:
     122            return dict((cookie.name, cookie.value) for cookie in self._cj)
     123        else:
     124            return self._cj._cookies
    122125
    123126    def save_cookies(self, cookie_file):
    124127        '''
    125128        Saves cookies to a file.
    126        
     129
    127130        Args:
    128131            cookie_file (str): Full path to a file to save cookies to.
    129132        '''
    130         self._cj.save(cookie_file, ignore_discard=True)       
    131 
    132        
     133        self._cj.save(cookie_file, ignore_discard=True)
     134
    133135    def set_proxy(self, proxy):
    134136        '''
    135137        Args:
    136             proxy (str): Proxy setting (eg. 
     138            proxy (str): Proxy setting (eg.
    137139            ``'http://user:pass@example.com:1234'``)
    138140        '''
     
    140142        self._update_opener()
    141143
    142        
    143144    def get_proxy(self):
    144145        '''Returns string containing proxy details.'''
    145146        return self._proxy
    146        
    147        
     147
    148148    def set_user_agent(self, user_agent):
    149149        '''
     
    153153        self._user_agent = user_agent
    154154
    155        
    156155    def get_user_agent(self):
    157156        '''Returns user agent string.'''
    158157        return self._user_agent
    159158
    160 
    161     def _update_opener(self, cloudflare_jar=False):
    162         """
     159    def _update_opener(self):
     160        '''
    163161        Builds and installs a new opener to be used by all future calls to
    164162        :func:`urllib2.urlopen`.
    165         """
     163        '''
    166164        if self._http_debug:
    167165            http = urllib2.HTTPHandler(debuglevel=1)
     
    169167            http = urllib2.HTTPHandler()
    170168
    171         if cloudflare_jar:
    172             self._cloudflare_jar = cookielib.MozillaCookieJar()
    173             jar = self._cloudflare_jar
    174         else:
    175             jar = self._cj
    176 
    177169        if self._proxy:
    178             opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar),
     170            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
    179171                                          urllib2.ProxyHandler({'http':
    180172                                                                self._proxy}),
     
    183175
    184176        else:
    185             opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar),
     177            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
    186178                                          urllib2.HTTPBasicAuthHandler(),
    187179                                          http)
    188180        urllib2.install_opener(opener)
    189181
    190 
    191     def _parseJSString(self, s):
    192         """
    193         lambda
    194         plugin.video.genesis\resources\lib\libraries\cloudflare.py
    195         https://offshoregit.com/lambda81/
    196         """
    197         try:
    198             offset=1 if s[0]=='+' else 0
    199             val = int(eval(s.replace('!+[]','1').replace('!![]','1').replace('[]','0').replace('(','str(')[offset:]))
    200             return val
    201         except:
    202             raise Exception
    203 
    204 
    205     def _cloudflare_challenge(self, url, challenge, form_data={}, headers={}, compression=True):
    206         """
    207         Use _set_cloudflare to call this, not intended to be called directly.
    208         Solve challenge and make request with cloudflare cookie jar
    209 
    210         Part from:
    211         lambda
    212         plugin.video.genesis\resources\lib\libraries\cloudflare.py
    213         https://offshoregit.com/lambda81/
    214         """
    215         jschl = re.compile('name="jschl_vc" value="(.+?)"/>').findall(challenge)[0]
    216         init = re.compile('setTimeout\(function\(\){\s*.*?.*:(.*?)};').findall(challenge)[0]
    217         builder = re.compile(r"challenge-form\'\);\s*(.*)a.v").findall(challenge)[0]
    218         decrypt_val = self._parseJSString(init)
    219         lines = builder.split(';')
    220 
    221         for line in lines:
    222             if len(line)>0 and '=' in line:
    223                 sections=line.split('=')
    224                 line_val = self._parseJSString(sections[1])
    225                 decrypt_val = int(eval(str(decrypt_val)+sections[0][-1]+str(line_val)))
    226 
    227         path = urlparse(url).path
    228         netloc = urlparse(url).netloc
    229         if not netloc:
    230             netloc = path
    231 
    232         answer = decrypt_val + len(netloc)
    233 
    234         url = url.rstrip('/')
    235         query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (url, jschl, answer)
    236 
    237         if 'type="hidden" name="pass"' in challenge:
    238             passval = re.compile('name="pass" value="(.*?)"').findall(challenge)[0]
    239             query = '%s/cdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % \
    240                     (url, urllib.quote_plus(passval), jschl, answer)
    241             time.sleep(9)
    242 
    243         self._update_opener(cloudflare_jar=True)
    244         req = urllib2.Request(query)
     182    def http_GET(self, url, headers={}, compression=True):
     183        '''
     184        Perform an HTTP GET request.
     185
     186        Args:
     187            url (str): The URL to GET.
     188
     189        Kwargs:
     190            headers (dict): A dictionary describing any headers you would like
     191            to add to the request. (eg. ``{'X-Test': 'testing'}``)
     192
     193            compression (bool): If ``True`` (default), try to use gzip
     194            compression.
     195
     196        Returns:
     197            An :class:`HttpResponse` object containing headers and other
     198            meta-information about the page and the page content.
     199        '''
     200        return self._fetch(url, headers=headers, compression=compression)
     201
     202    def http_POST(self, url, form_data, headers={}, compression=True):
     203        '''
     204        Perform an HTTP POST request.
     205
     206        Args:
     207            url (str): The URL to POST.
     208
     209            form_data (dict): A dictionary of form data to POST.
     210
     211        Kwargs:
     212            headers (dict): A dictionary describing any headers you would like
     213            to add to the request. (eg. ``{'X-Test': 'testing'}``)
     214
     215            compression (bool): If ``True`` (default), try to use gzip
     216            compression.
     217
     218        Returns:
     219            An :class:`HttpResponse` object containing headers and other
     220            meta-information about the page and the page content.
     221        '''
     222        return self._fetch(url, form_data, headers=headers, compression=compression)
     223
     224    def http_HEAD(self, url, headers={}):
     225        '''
     226        Perform an HTTP HEAD request.
     227
     228        Args:
     229            url (str): The URL to GET.
     230
     231        Kwargs:
     232            headers (dict): A dictionary describing any headers you would like
     233            to add to the request. (eg. ``{'X-Test': 'testing'}``)
     234
     235        Returns:
     236            An :class:`HttpResponse` object containing headers and other
     237            meta-information about the page.
     238        '''
     239        request = urllib2.Request(url)
     240        request.get_method = lambda: 'HEAD'
     241        request.add_header('User-Agent', self._user_agent)
     242        for key in headers:
     243            request.add_header(key, headers[key])
     244        response = urllib2.urlopen(request)
     245        return HttpResponse(response)
     246
     247    def _fetch(self, url, form_data={}, headers={}, compression=True):
     248        '''
     249        Perform an HTTP GET or POST request.
     250
     251        Args:
     252            url (str): The URL to GET or POST.
     253
     254            form_data (dict): A dictionary of form data to POST. If empty, the
     255            request will be a GET, if it contains form data it will be a POST.
     256
     257        Kwargs:
     258            headers (dict): A dictionary describing any headers you would like
     259            to add to the request. (eg. ``{'X-Test': 'testing'}``)
     260
     261            compression (bool): If ``True`` (default), try to use gzip
     262            compression.
     263
     264        Returns:
     265            An :class:`HttpResponse` object containing headers and other
     266            meta-information about the page and the page content.
     267        '''
     268        req = urllib2.Request(url)
    245269        if form_data:
    246             form_data = urllib.urlencode(form_data)
    247             req = urllib2.Request(query, form_data)
     270            if isinstance(form_data, basestring):
     271                form_data = form_data
     272            else:
     273                form_data = urllib.urlencode(form_data, True)
     274            req = urllib2.Request(url, form_data)
    248275        req.add_header('User-Agent', self._user_agent)
    249         for k, v in headers.items():
    250             req.add_header(k, v)
     276        for key in headers:
     277            req.add_header(key, headers[key])
    251278        if compression:
    252279            req.add_header('Accept-Encoding', 'gzip')
    253         try:
    254             response = urllib2.urlopen(req)
    255         except urllib2.HTTPError as e:
    256             pass
    257 
    258 
    259     def _set_cloudflare(self, url, challenge, form_data={}, headers={}, compression=True):
    260         """
    261         Entry Point for _cloudflare_challenge
    262         Calls cloudflare_challenge on netloc, not full url w/ path
    263         Puts any cloudflare cookies in the main cookie jar
    264         Args:
    265             url (str): The URL to site of potential Cloudflare IUA.
    266 
    267             challenge (str): html contents of the page that raised 503, containing potential Cloudflare IUA Challenge
    268         Kwargs:
    269             form_data (dict): A dictionary of form data if pass-through from POST.
    270 
    271             headers (dict): A dictionary describing any headers you would like
    272             to add to the request. (eg. ``{'X-Test': 'testing'}``)
    273 
    274             compression (bool): If ``True`` (default), try to use gzip
    275             compression.
    276         """
    277         netloc = urlparse(url).netloc
    278         if not netloc:
    279             netloc = urlparse(url).path
    280         cloudflare_url = urlunparse((urlparse(url).scheme, netloc, '', '', '', ''))
    281         try:
    282             self._cloudflare_challenge(cloudflare_url, challenge, form_data, headers, compression)
    283             for c in self._cloudflare_jar:
    284                 self._cj.set_cookie(c)
    285             self._update_opener()
    286         except:
    287             # make sure we update to main jar
    288             self._update_opener()
    289             raise Exception
    290 
    291 
    292     def url_with_headers(self, url, referer=None, user_agent=None, cookies=None, proxy=None, connection_timeout=None,
    293                          encoding='', accept_charset='', sslcipherlist='', noshout='false', seekable='1'):
    294         '''
    295         Return url with Referer, User-Agent, Cookies, Proxy, Connection-Timeout, Encoding, Accept-Charset,
    296         SSLCipherList, NoShout and Seekable
    297         Based on: https://github.com/xbmc/xbmc/blob/master/xbmc/filesystem/CurlFile.cpp#L782
    298         Args:
    299             url (str): The URL to append headers to.
    300 
    301         Kwargs:
    302             referer (str): If None (default), urlunparse((urlparse(url).scheme, netloc, path, '', '', '')) is used and append if set
    303 
    304             user_agent (str): If None (default), self._user_agent is used and append if set
    305 
    306             cookies (bool): If ``None`` (default), use self.cloudflare as bool (False as default)
    307             Append cookies to URL as well
    308 
    309             proxy (str): If None (default), self.proxy is used and append if set
    310 
    311             connection_timeout (str): If None (default), self._socket_timeout is used and append if set
    312 
    313             encoding (str): append if set
    314 
    315             accept_charset (str): append if set
    316 
    317             sslcipherlist (str): append if set
    318 
    319             noshout (str): 'true'/'false', skip shout, append if 'true' ('false' is kodi default)
    320 
    321             seekable (str): '0'/'1', append if 0 ('1' is kodi default)
    322         Returns:
    323             http://example.com/myimage.png|Referer=%%%%%&User-Agent=%%%%%...
    324         '''
    325         kodi_schemes = ('special', 'plugin', 'script', 'profile')
    326         if ('://' not in url) or (url.startswith(kodi_schemes)):
    327             # don't waste time and return url
    328             return url
    329 
    330         _tmp = re.search('(.+?)(?:\|.*|$)', url)
    331         if _tmp:
    332             # trim any headers that may already be attached to url
    333             url = _tmp.group(1)
    334 
    335         if referer is not None:
    336             try:
    337                 referer = str(referer)
    338             except:
    339                 referer = None
    340         if referer is None:
    341             path = urlparse(url).path
    342             netloc = urlparse(url).netloc
    343             if not netloc:
    344                 netloc = path
    345                 path = ''
    346             referer = urlunparse((urlparse(url).scheme, netloc, path, '', '', ''))
    347             if referer == url:
    348                 index = path.rfind('/')
    349                 if index >= 0:
    350                     referer = urlunparse((urlparse(url).scheme, netloc, path[:index], '', '', ''))
    351         if user_agent is None:
    352             user_agent = self._user_agent
    353         else:
    354             try:
    355                 user_agent = str(user_agent)
    356             except:
    357                 user_agent = self._user_agent
    358         if cookies is None:
    359             cookies = self.cloudflare
    360         if proxy is None:
    361             proxy = self._proxy
    362         if connection_timeout is None:
    363             connection_timeout = self._socket_timeout
    364         try:
    365             connection_timeout = str(connection_timeout)
    366         except:
    367             connection_timeout = None
    368         try:
    369             if str(seekable) != '0':
    370                 seekable = None
    371         except:
    372             seekable = None
    373         try:
    374             if str(noshout).lower() != 'true':
    375                 noshout = None
    376         except:
    377             noshout = None
    378 
    379         url += '|Referer=' + urllib.quote_plus(referer) + '&User-Agent=' + urllib.quote_plus(user_agent)
    380         if proxy:
    381             try:
    382                 url += '&HTTPProxy=' + urllib.quote_plus(str(proxy))
    383             except:
    384                 pass
    385         if connection_timeout:
    386             url += '&Connection-Timeout=' + urllib.quote_plus(connection_timeout)
    387         if encoding:
    388             try:
    389                 url += '&Encoding=' + urllib.quote_plus(str(encoding))
    390             except:
    391                 pass
    392         if accept_charset:
    393             try:
    394                 url += '&Accept-Charset=' + urllib.quote_plus(str(accept_charset))
    395             except:
    396                 pass
    397         if sslcipherlist:
    398             try:
    399                 url += '&SSLCipherList=' + urllib.quote_plus(str(sslcipherlist))
    400             except:
    401                 pass
    402         if noshout:
    403             url += '&NoShout=' + urllib.quote_plus(str(noshout).lower())
    404         if seekable:
    405             url += '&Seekable=' + urllib.quote_plus(str(seekable))
    406         if cookies:
    407             cookie_string = ''
    408             for c in self._cj:
    409                 if c.domain and (c.domain.lstrip('.') in url):
    410                     cookie_string += '%s=%s;' % (c.name, c.value)
    411             if cookie_string:
    412                 url += '&Cookie=' + urllib.quote_plus(cookie_string)
    413         return url
    414 
    415 
    416     def http_GET(self, url, headers={}, compression=True, cloudflare=None):
    417         '''
    418         Perform an HTTP GET request.
    419        
    420         Args:
    421             url (str): The URL to GET.
    422            
    423         Kwargs:
    424             headers (dict): A dictionary describing any headers you would like
    425             to add to the request. (eg. ``{'X-Test': 'testing'}``)
    426 
    427             compression (bool): If ``True`` (default), try to use gzip
    428             compression.
    429 
    430             cloudflare (bool): If ``None`` (default), use self.cloudflare as bool (False as default)
    431             On HTTPError 503 check for Cloudflare challenge and solve
    432         Returns:
    433             An :class:`HttpResponse` object containing headers and other
    434             meta-information about the page and the page content.
    435         '''
    436         if cloudflare is None:
    437             cloudflare = self.cloudflare
    438         return self._fetch(url, headers=headers, compression=compression, cloudflare=cloudflare)
    439        
    440 
    441     def http_POST(self, url, form_data, headers={}, compression=True, cloudflare=None):
    442         '''
    443         Perform an HTTP POST request.
    444        
    445         Args:
    446             url (str): The URL to POST.
    447            
    448             form_data (dict): A dictionary of form data to POST.
    449            
    450         Kwargs:
    451             headers (dict): A dictionary describing any headers you would like
    452             to add to the request. (eg. ``{'X-Test': 'testing'}``)
    453 
    454             compression (bool): If ``True`` (default), try to use gzip
    455             compression.
    456 
    457             cloudflare (bool): If ``None`` (default), use self.cloudflare as bool (False as default)
    458             On HTTPError 503 check for Cloudflare challenge and solve
    459         Returns:
    460             An :class:`HttpResponse` object containing headers and other
    461             meta-information about the page and the page content.
    462         '''
    463         if cloudflare is None:
    464             cloudflare = self.cloudflare
    465         return self._fetch(url, form_data, headers=headers,
    466                            compression=compression, cloudflare=cloudflare)
    467 
    468    
    469     def http_HEAD(self, url, headers={}):
    470         '''
    471         Perform an HTTP HEAD request.
    472        
    473         Args:
    474             url (str): The URL to GET.
    475        
    476         Kwargs:
    477             headers (dict): A dictionary describing any headers you would like
    478             to add to the request. (eg. ``{'X-Test': 'testing'}``)
    479        
    480         Returns:
    481             An :class:`HttpResponse` object containing headers and other
    482             meta-information about the page.
    483         '''
    484         req = HeadRequest(url)
    485         req.add_header('User-Agent', self._user_agent)
    486         req.add_header('Accept', self._accept)
    487         for k, v in headers.items():
    488             req.add_header(k, v)
     280        req.add_unredirected_header('Host', req.get_host())
    489281        response = urllib2.urlopen(req)
    490282        return HttpResponse(response)
    491283
    492 
    493     def _fetch(self, url, form_data={}, headers={}, compression=True, cloudflare=None):
    494         '''
    495         Perform an HTTP GET or POST request.
    496        
    497         Args:
    498             url (str): The URL to GET or POST.
    499            
    500             form_data (dict): A dictionary of form data to POST. If empty, the
    501             request will be a GET, if it contains form data it will be a POST.
    502            
    503         Kwargs:
    504             headers (dict): A dictionary describing any headers you would like
    505             to add to the request. (eg. ``{'X-Test': 'testing'}``)
    506 
    507             compression (bool): If ``True`` (default), try to use gzip
    508             compression.
    509 
    510             cloudflare (bool): If ``None`` (default), use self.cloudflare as bool (False as default)
    511             On HTTPError 503 check for Cloudflare challenge and solve
    512         Returns:
    513             An :class:`HttpResponse` object containing headers and other
    514             meta-information about the page and the page content.
    515         '''
    516         if cloudflare is None:
    517             cloudflare = self.cloudflare
    518         encoding = ''
    519         req = urllib2.Request(url)
    520         if form_data:
    521             form_data = urllib.urlencode(form_data)
    522             req = urllib2.Request(url, form_data)
    523         req.add_header('User-Agent', self._user_agent)
    524         for k, v in headers.items():
    525             req.add_header(k, v)
    526         if compression:
    527             req.add_header('Accept-Encoding', 'gzip')
    528         if not cloudflare:
    529             response = urllib2.urlopen(req)
    530             return HttpResponse(response)
    531         else:
    532             try:
    533                 response = urllib2.urlopen(req)
    534                 return HttpResponse(response)
    535             except urllib2.HTTPError as e:
    536                 if e.code == 503:
    537                     try:
    538                         self._set_cloudflare(url, e.read(), form_data, headers, compression)
    539                     except:
    540                         raise urllib2.HTTPError, e
    541                     req = urllib2.Request(url)
    542                     if form_data:
    543                         form_data = urllib.urlencode(form_data)
    544                         req = urllib2.Request(url, form_data)
    545                     req.add_header('User-Agent', self._user_agent)
    546                     for k, v in headers.items():
    547                         req.add_header(k, v)
    548                     if compression:
    549                         req.add_header('Accept-Encoding', 'gzip')
    550                     response = urllib2.urlopen(req)
    551                     return HttpResponse(response)
    552                 else:
    553                     raise urllib2.HTTPError, e
    554 
    555 
    556284class HttpResponse:
    557285    '''
    558     This class represents a response from an HTTP request.
    559    
     286    This class represents a resoponse from an HTTP request.
     287
    560288    The content is examined and every attempt is made to properly encode it to
    561289    Unicode.
    562    
     290
    563291    .. seealso::
    564         :meth:`Net.http_GET`, :meth:`Net.http_HEAD` and :meth:`Net.http_POST` 
     292        :meth:`Net.http_GET`, :meth:`Net.http_HEAD` and :meth:`Net.http_POST`
    565293    '''
    566    
     294
    567295    content = ''
    568     '''Unicode encoded string containing the body of the response.'''
    569    
    570    
     296    '''Unicode encoded string containing the body of the reposne.'''
     297
    571298    def __init__(self, response):
    572299        '''
     
    576303        '''
    577304        self._response = response
    578         html = response.read()
     305
     306    @property
     307    def content(self):
     308        html = self._response.read()
     309        encoding = None
    579310        try:
    580             if response.headers['content-encoding'].lower() == 'gzip':
     311            if self._response.headers['content-encoding'].lower() == 'gzip':
    581312                html = gzip.GzipFile(fileobj=StringIO.StringIO(html)).read()
    582313        except:
    583314            pass
    584        
     315
    585316        try:
    586             content_type = response.headers['content-type']
     317            content_type = self._response.headers['content-type']
    587318            if 'charset=' in content_type:
    588319                encoding = content_type.split('charset=')[-1]
     
    590321            pass
    591322
    592         r = re.search('<meta\s+http-equiv="Content-Type"\s+content="(?:.+?);' +
    593                       '\s+charset=(.+?)"', html, re.IGNORECASE)
     323        r = re.search('<meta\s+http-equiv="Content-Type"\s+content="(?:.+?);\s+charset=(.+?)"', html, re.IGNORECASE)
    594324        if r:
    595             encoding = r.group(1)
    596                    
    597         try:
    598             html = unicode(html, encoding)
    599         except:
    600             pass
     325            encoding = r.group(1)
    601326       
    602         #try:
    603         #    if response.headers['content-encoding'].lower() == 'gzip':
    604         #        r = re.search('<meta\s+http-equiv="Content-Type"\s+content="(?:.+?);' + '\s+charset=(.+?)"', html, re.IGNORECASE)
    605         #        if r:
    606         #               encoding = r.group(1)
    607         #               try:
    608         #                       html = unicode(html, encoding)
    609         #               except:
    610         #                       pass
    611         #except:
    612         #    pass
    613            
    614         self.content = html
    615    
    616    
    617     def get_headers(self):
    618         '''Returns a List of headers returned by the server.'''
    619         return self._response.info().headers
    620    
     327        if encoding is not None:
     328            try: html = html.decode(encoding)
     329            except: pass
     330        return html
    621331       
     332    def get_headers(self, as_dict=False):
     333        '''Returns headers returned by the server.
     334        If as_dict is True, headers are returned as a dictionary otherwise a list'''
     335        if as_dict:
     336            return dict([(item[0].title(), item[1]) for item in self._response.info().items()])
     337        else:
     338            return self._response.info().headers
     339
    622340    def get_url(self):
    623341        '''
    624         Return the URL of the resource retrieved, commonly used to determine if 
     342        Return the URL of the resource retrieved, commonly used to determine if
    625343        a redirect was followed.
    626344        '''
  • titan/mediathek/localhoster/streamango.py

    r41179 r42560  
    4545    def get_media_url(self, host, media_id):
    4646        web_url = self.get_url(host, media_id)
    47 
    48 #        headers = {'User-Agent': common.RAND_UA}
    49         headers = {'User-Agent': common.FF_USER_AGENT}
    50 
     47        headers = {'User-Agent': common.RAND_UA}
     48#        headers = {'User-Agent': common.FF_USER_AGENT}
    5149        html = self.net.http_GET(web_url, headers=headers).content
    52 #        print "html", html.encode('utf8')
    5350
    5451        if html:
    55             if re.search('>Sorry!<', html):
    56                 print "errormsg=Sorry!\n%s" % (str(re.compile('<p class="lead">*(.+?)</p>').findall(html)[0]))
    57 #                print 'errormsg=File was deleted.'
    58             else:
    59                 encoded = re.search('''srces\.push\({type:"video/mp4",src:\w+\('([^']+)',(\d+)''', html)
    60                 if encoded:
    61                     source = self.decode(encoded.group(1), int(encoded.group(2)))
    62                     if source:
    63                         source = "http:%s" % source if source.startswith("//") else source
    64                         source = source.split("/")
    65                         if not source[-1].isdigit():
    66                           source[-1] = re.sub('[^\d]', '', source[-1])
    67                         source = "/".join(source)
    68                         headers.update({'Referer': web_url})
    69 #                        return source + helpers.append_headers(headers)
    70                         print source + helpers.append_headers(headers)
    71        
    72 #        raise ResolverError("Unable to locate video")
    73                 else:
    74                     print 'errormsg=Unable to locate encoded video'
    75         else:
    76             print 'errormsg=Error 404 Website not found !'
     52#           srces.push( {type:"video/mp4",src:d('keDN2p3bx6LdzqrO1JcSxaDTkZoRyuDbzaLN0KzR2qHQza7NyJ/U4N8Lld4IltoMlN3Cn98Sl90JkN=SlpHQ/K3YnqnW3uENltwA',93),height:360,bitrate:576});
     53            encoded = re.search('''srces\.push\({type:"video/mp4",src:\w+\('([^']+)',(\d+)''', html)
     54            if not encoded:
     55                encoded = re.search('''srces\.push\( {type:"video/mp4",src:\w+\('([^']+)',(\d+)''', html)
     56
     57            if encoded:
     58                source = self.decode(encoded.group(1), int(encoded.group(2)))
     59                if source:
     60                    source = "http:%s" % source if source.startswith("//") else source
     61                    source = source.split("/")
     62                    if not source[-1].isdigit():
     63                      source[-1] = re.sub('[^\d]', '', source[-1])
     64                    source = "/".join(source)
     65
     66                    headers.update({'Referer': web_url})
     67                    print source + helpers.append_headers(headers)
    7768
    7869#<h1 style="text-align: center !important;">Sorry!</h1>
     
    114105
    115106    def get_url(self, host, media_id):
     107        if host.lower() == 'streamango.com':
     108            host = 'fruitstreams.com'
    116109        return 'http://%s/embed/%s' % (host, media_id)
    117110
Note: See TracChangeset for help on using the changeset viewer.