30ffbcd3e83ae6b0f6578db6c5e1e951e3fdfb10
[vuplus_dvbapp-plugin] / rsdownloader / src / Request.py
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4
5 """
6 authored by: RaNaN
7 """
8 import base64
9 import cookielib
10 import time
11 import urllib
12 import urllib2
13 from gzip import GzipFile
14
15 from Keepalive import HTTPHandler
16 from cStringIO import StringIO
17
18 """
19     handles all outgoing HTTP-Requests of the Server
20     Usage: create Request Instance
21     use retrieveURL and call it with a url at least
22     additionaly you can firstly pass the get and secondly the post data in form of a dictonary
23     when the last argument is true the handler simulate a http referer with the last called url.
24     retrieveUrl returns response as string
25
26 """
27 class AbortDownload(Exception):
28     pass
29
30 class Request:
31     def __init__(self):
32
33         self.dl_time = 0
34         self.dl_finished = 0
35         self.dl_size = 0
36         self.dl_arrived = 0
37         self.dl = False
38
39         self.abort = False
40
41         self.cookies = []
42         self.lastURL = None
43         self.cj = cookielib.CookieJar()
44         handler = HTTPHandler()
45         self.opener = urllib2.build_opener(handler, urllib2.HTTPCookieProcessor(self.cj))
46         self.downloader = urllib2.build_opener()
47         #self.opener.add_handler()
48
49         self.opener.addheaders = [
50         ("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"),
51         ("Accept-Encoding", "deflate"),
52         ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
53         ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7"),
54         ("Connection", "keep-alive"),
55         ("Keep-Alive", "300")]
56
57         self.downloader.addheaders = [
58         ("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"),
59         ("Accept-Encoding", "deflate"),
60         ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
61         ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7")]
62
63
64     def load(self, url, get={}, post={}, ref=True, cookies=False):
65
66         if post:
67             post = urllib.urlencode(post)
68         else:
69             post = None
70
71         if get:
72             get = urllib.urlencode(get)
73         else:
74             get = ""
75
76         url = url + get
77         req = urllib2.Request(url, data=post)
78
79         if ref and self.lastURL is not None:
80             req.add_header("Referer", self.lastURL)
81
82         if cookies:
83             self.add_cookies(req)
84             #add cookies
85
86         rep = self.opener.open(req)
87
88         for cookie in self.cj.make_cookies(rep, req):
89             self.cookies.append(cookie)
90
91         output = rep.read()
92
93         if rep.headers.has_key("content-encoding"):
94             if rep.headers["content-encoding"] == "gzip":
95                 output = GzipFile('', 'r', 0, StringIO(output)).read()
96
97         self.lastURL = url
98
99         return output
100
101     def add_auth(self, user, pw):
102         self.downloader.addheaders.append(['Authorization', 'Basic ' + base64.encodestring(user + ':' + pw)[:-1]])
103
104     def add_cookies(self, req):
105         cookie_head = ""
106         for cookie in self.cookies:
107             cookie_head += cookie.name + "=" + cookie.value + "; "
108         req.add_header("Cookie", cookie_head)
109     #def download(url, filename, reporthook = None, data = None): #default von urlretrieve auch None?
110         #  return self.downloader.urlretrieve(url, filename, reporthook, data)
111
112     def clear_cookies(self):
113         del self.cookies[:]
114
115     def add_proxy(self, protocol, adress):
116         handler = urllib2.ProxyHandler({protocol: adress})
117         self.opener.add_handler(handler)
118         self.downloader.add_handler(handler)
119
120     def download(self, url, filename, get={}, post={}, ref=True, cookies=False):
121
122         if post:
123             post = urllib.urlencode(post)
124         else:
125             post = None
126
127         if get:
128             get = urllib.urlencode(get)
129         else:
130             get = ""
131
132         url = url + get
133         req = urllib2.Request(url, data=post)
134
135         if ref and self.lastURL is not None:
136             req.add_header("Referer", self.lastURL)
137
138         if cookies:
139             self.add_cookies(req)
140             #add cookies
141             rep = self.opener.open(req)
142
143             for cookie in self.cj.make_cookies(rep, req):
144                 self.cookies.append(cookie)
145
146         if not self.dl:
147             self.dl = True
148             file = open(filename, 'wb')
149             conn = self.downloader.open(req, post)
150             if conn.headers.has_key("content-length"):
151                 self.dl_size = int(conn.headers["content-length"])
152             else:
153                 self.dl_size = 0
154             self.dl_arrived = 0
155             self.dl_time = time.time()
156             for chunk in conn:
157                 if self.abort: raise AbortDownload
158                 self.dl_arrived += len(chunk)
159                 file.write(chunk)
160
161             file.close()
162             self.dl = False
163             self.dl_finished = time.time()
164             return True
165
166     def get_speed(self):
167         try:
168             return (self.dl_arrived / ((time.time() if self.dl else self.dl_finished)  - self.dl_time)) / 1024
169         except:
170             return 0
171
172     def get_ETA(self):
173         try:
174             return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time))
175         except:
176             return 0
177
178     def kB_left(self):
179         return (self.dl_size - self.dl_arrived) / 1024
180
181 if __name__ == "__main__":
182     import doctest
183     doctest.testmod()