source: titan/mediathek/localhoster/lib/net.py @ 42560

Last change on this file since 42560 was 42560, checked in by obi, 17 months ago

tithek hoster fix streammago streamcherry

File size: 12.4 KB
Line 
1'''
2    common XBMC Module
3    Copyright (C) 2011 t0mm0
4
5    This program is free software: you can redistribute it and/or modify
6    it under the terms of the GNU General Public License as published by
7    the Free Software Foundation, either version 3 of the License, or
8    (at your option) any later version.
9
10    This program is distributed in the hope that it will be useful,
11    but WITHOUT ANY WARRANTY; without even the implied warranty of
12    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13    GNU General Public License for more details.
14
15    You should have received a copy of the GNU General Public License
16    along with this program.  If not, see <http://www.gnu.org/licenses/>.
17'''
18import random
19import cookielib
20import gzip
21import re
22import StringIO
23import urllib
24import urllib2
25import socket
26import time
27#import kodi
28
29# Set Global timeout - Useful for slow connections and Putlocker.
30socket.setdefaulttimeout(10)
31
32BR_VERS = [
33    ['%s.0' % i for i in xrange(18, 50)],
34    ['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111', '39.0.2171.71', '39.0.2171.95', '39.0.2171.99', '40.0.2214.93', '40.0.2214.111',
35     '40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124', '44.0.2403.155', '44.0.2403.157', '45.0.2454.101', '45.0.2454.85', '46.0.2490.71',
36     '46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80', '48.0.2564.116', '49.0.2623.112', '50.0.2661.86'],
37    ['11.0'],
38    ['8.0', '9.0', '10.0', '10.6']]
39WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1', 'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
40FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
41RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
42            'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
43            'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko',
44            'Mozilla/5.0 (compatible; MSIE {br_ver}; {win_ver}{feature}; Trident/6.0)']
45def get_ua():
46#    try: last_gen = int(kodi.get_setting('last_ua_create'))
47    try: last_gen = 0
48    except: last_gen = 0
49#    if not kodi.get_setting('current_ua') or last_gen < (time.time() - (7 * 24 * 60 * 60)):
50#    if not last_gen < (time.time() - (7 * 24 * 60 * 60)):
51    index = random.randrange(len(RAND_UAS))
52    versions = {'win_ver': random.choice(WIN_VERS), 'feature': random.choice(FEATURES), 'br_ver': random.choice(BR_VERS[index])}
53    user_agent = RAND_UAS[index].format(**versions)
54        # logger.log('Creating New User Agent: %s' % (user_agent), log_utils.LOGDEBUG)
55#        kodi.set_setting('current_ua', user_agent)
56#        kodi.set_setting('last_ua_create', str(int(time.time())))
57#    else:
58#        user_agent = kodi.get_setting('current_ua')
59    return user_agent
60
61class Net:
62    '''
63    This class wraps :mod:`urllib2` and provides an easy way to make http
64    requests while taking care of cookies, proxies, gzip compression and
65    character encoding.
66
67    Example::
68
69        from addon.common.net import Net
70        net = Net()
71        response = net.http_GET('http://xbmc.org')
72        print response.content
73    '''
74
75    _cj = cookielib.LWPCookieJar()
76    _proxy = None
77    _user_agent = 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0'
78    _http_debug = False
79
80    def __init__(self, cookie_file='', proxy='', user_agent='', http_debug=False):
81        '''
82        Kwargs:
83            cookie_file (str): Full path to a file to be used to load and save
84            cookies to.
85
86            proxy (str): Proxy setting (eg.
87            ``'http://user:pass@example.com:1234'``)
88
89            user_agent (str): String to use as the User Agent header. If not
90            supplied the class will use a default user agent (chrome)
91
92            http_debug (bool): Set ``True`` to have HTTP header info written to
93            the XBMC log for all requests.
94        '''
95        if cookie_file:
96            self.set_cookies(cookie_file)
97        if proxy:
98            self.set_proxy(proxy)
99        if user_agent:
100            self.set_user_agent(user_agent)
101        self._http_debug = http_debug
102        self._update_opener()
103
104    def set_cookies(self, cookie_file):
105        '''
106        Set the cookie file and try to load cookies from it if it exists.
107
108        Args:
109            cookie_file (str): Full path to a file to be used to load and save
110            cookies to.
111        '''
112        try:
113            self._cj.load(cookie_file, ignore_discard=True)
114            self._update_opener()
115            return True
116        except:
117            return False
118
119    def get_cookies(self, as_dict=False):
120        '''Returns A dictionary containing all cookie information by domain.'''
121        if as_dict:
122            return dict((cookie.name, cookie.value) for cookie in self._cj)
123        else:
124            return self._cj._cookies
125
126    def save_cookies(self, cookie_file):
127        '''
128        Saves cookies to a file.
129
130        Args:
131            cookie_file (str): Full path to a file to save cookies to.
132        '''
133        self._cj.save(cookie_file, ignore_discard=True)
134
135    def set_proxy(self, proxy):
136        '''
137        Args:
138            proxy (str): Proxy setting (eg.
139            ``'http://user:pass@example.com:1234'``)
140        '''
141        self._proxy = proxy
142        self._update_opener()
143
144    def get_proxy(self):
145        '''Returns string containing proxy details.'''
146        return self._proxy
147
148    def set_user_agent(self, user_agent):
149        '''
150        Args:
151            user_agent (str): String to use as the User Agent header.
152        '''
153        self._user_agent = user_agent
154
155    def get_user_agent(self):
156        '''Returns user agent string.'''
157        return self._user_agent
158
159    def _update_opener(self):
160        '''
161        Builds and installs a new opener to be used by all future calls to
162        :func:`urllib2.urlopen`.
163        '''
164        if self._http_debug:
165            http = urllib2.HTTPHandler(debuglevel=1)
166        else:
167            http = urllib2.HTTPHandler()
168
169        if self._proxy:
170            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
171                                          urllib2.ProxyHandler({'http':
172                                                                self._proxy}),
173                                          urllib2.HTTPBasicAuthHandler(),
174                                          http)
175
176        else:
177            opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj),
178                                          urllib2.HTTPBasicAuthHandler(),
179                                          http)
180        urllib2.install_opener(opener)
181
182    def http_GET(self, url, headers={}, compression=True):
183        '''
184        Perform an HTTP GET request.
185
186        Args:
187            url (str): The URL to GET.
188
189        Kwargs:
190            headers (dict): A dictionary describing any headers you would like
191            to add to the request. (eg. ``{'X-Test': 'testing'}``)
192
193            compression (bool): If ``True`` (default), try to use gzip
194            compression.
195
196        Returns:
197            An :class:`HttpResponse` object containing headers and other
198            meta-information about the page and the page content.
199        '''
200        return self._fetch(url, headers=headers, compression=compression)
201
202    def http_POST(self, url, form_data, headers={}, compression=True):
203        '''
204        Perform an HTTP POST request.
205
206        Args:
207            url (str): The URL to POST.
208
209            form_data (dict): A dictionary of form data to POST.
210
211        Kwargs:
212            headers (dict): A dictionary describing any headers you would like
213            to add to the request. (eg. ``{'X-Test': 'testing'}``)
214
215            compression (bool): If ``True`` (default), try to use gzip
216            compression.
217
218        Returns:
219            An :class:`HttpResponse` object containing headers and other
220            meta-information about the page and the page content.
221        '''
222        return self._fetch(url, form_data, headers=headers, compression=compression)
223
224    def http_HEAD(self, url, headers={}):
225        '''
226        Perform an HTTP HEAD request.
227
228        Args:
229            url (str): The URL to GET.
230
231        Kwargs:
232            headers (dict): A dictionary describing any headers you would like
233            to add to the request. (eg. ``{'X-Test': 'testing'}``)
234
235        Returns:
236            An :class:`HttpResponse` object containing headers and other
237            meta-information about the page.
238        '''
239        request = urllib2.Request(url)
240        request.get_method = lambda: 'HEAD'
241        request.add_header('User-Agent', self._user_agent)
242        for key in headers:
243            request.add_header(key, headers[key])
244        response = urllib2.urlopen(request)
245        return HttpResponse(response)
246
247    def _fetch(self, url, form_data={}, headers={}, compression=True):
248        '''
249        Perform an HTTP GET or POST request.
250
251        Args:
252            url (str): The URL to GET or POST.
253
254            form_data (dict): A dictionary of form data to POST. If empty, the
255            request will be a GET, if it contains form data it will be a POST.
256
257        Kwargs:
258            headers (dict): A dictionary describing any headers you would like
259            to add to the request. (eg. ``{'X-Test': 'testing'}``)
260
261            compression (bool): If ``True`` (default), try to use gzip
262            compression.
263
264        Returns:
265            An :class:`HttpResponse` object containing headers and other
266            meta-information about the page and the page content.
267        '''
268        req = urllib2.Request(url)
269        if form_data:
270            if isinstance(form_data, basestring):
271                form_data = form_data
272            else:
273                form_data = urllib.urlencode(form_data, True)
274            req = urllib2.Request(url, form_data)
275        req.add_header('User-Agent', self._user_agent)
276        for key in headers:
277            req.add_header(key, headers[key])
278        if compression:
279            req.add_header('Accept-Encoding', 'gzip')
280        req.add_unredirected_header('Host', req.get_host())
281        response = urllib2.urlopen(req)
282        return HttpResponse(response)
283
284class HttpResponse:
285    '''
286    This class represents a resoponse from an HTTP request.
287
288    The content is examined and every attempt is made to properly encode it to
289    Unicode.
290
291    .. seealso::
292        :meth:`Net.http_GET`, :meth:`Net.http_HEAD` and :meth:`Net.http_POST`
293    '''
294
295    content = ''
296    '''Unicode encoded string containing the body of the reposne.'''
297
298    def __init__(self, response):
299        '''
300        Args:
301            response (:class:`mimetools.Message`): The object returned by a call
302            to :func:`urllib2.urlopen`.
303        '''
304        self._response = response
305
306    @property
307    def content(self):
308        html = self._response.read()
309        encoding = None
310        try:
311            if self._response.headers['content-encoding'].lower() == 'gzip':
312                html = gzip.GzipFile(fileobj=StringIO.StringIO(html)).read()
313        except:
314            pass
315
316        try:
317            content_type = self._response.headers['content-type']
318            if 'charset=' in content_type:
319                encoding = content_type.split('charset=')[-1]
320        except:
321            pass
322
323        r = re.search('<meta\s+http-equiv="Content-Type"\s+content="(?:.+?);\s+charset=(.+?)"', html, re.IGNORECASE)
324        if r:
325            encoding = r.group(1)
326       
327        if encoding is not None:
328            try: html = html.decode(encoding)
329            except: pass
330        return html
331       
332    def get_headers(self, as_dict=False):
333        '''Returns headers returned by the server.
334        If as_dict is True, headers are returned as a dictionary otherwise a list'''
335        if as_dict:
336            return dict([(item[0].title(), item[1]) for item in self._response.info().items()])
337        else:
338            return self._response.info().headers
339
340    def get_url(self):
341        '''
342        Return the URL of the resource retrieved, commonly used to determine if
343        a redirect was followed.
344        '''
345        return self._response.geturl()
Note: See TracBrowser for help on using the repository browser.