+++ /dev/null
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the
-# Free Software Foundation, Inc.,
-# 59 Temple Place, Suite 330,
-# Boston, MA 02111-1307 USA
-
-# This file is part of urlgrabber, a high-level cross-protocol url-grabber
-# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
-
-"""An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
-
->>> import urllib2
->>> from keepalive import HTTPHandler
->>> keepalive_handler = HTTPHandler()
->>> opener = urllib2.build_opener(keepalive_handler)
->>> urllib2.install_opener(opener)
->>>
->>> fo = urllib2.urlopen('http://www.python.org')
-
-If a connection to a given host is requested, and all of the existing
-connections are still in use, another connection will be opened. If
-the handler tries to use an existing connection but it fails in some
-way, it will be closed and removed from the pool.
-
-To remove the handler, simply re-run build_opener with no arguments, and
-install that opener.
-
-You can explicitly close connections by using the close_connection()
-method of the returned file-like object (described below) or you can
-use the handler methods:
-
- close_connection(host)
- close_all()
- open_connections()
-
-NOTE: using the close_connection and close_all methods of the handler
-should be done with care when using multiple threads.
- * there is nothing that prevents another thread from creating new
- connections immediately after connections are closed
- * no checks are done to prevent in-use connections from being closed
-
->>> keepalive_handler.close_all()
-
-EXTRA ATTRIBUTES AND METHODS
-
- Upon a status of 200, the object returned has a few additional
- attributes and methods, which should not be used if you want to
- remain consistent with the normal urllib2-returned objects:
-
- close_connection() - close the connection to the host
- readlines() - you know, readlines()
- status - the return status (ie 404)
- reason - english translation of status (ie 'File not found')
-
- If you want the best of both worlds, use this inside an
- AttributeError-catching try:
-
- >>> try: status = fo.status
- >>> except AttributeError: status = None
-
- Unfortunately, these are ONLY there if status == 200, so it's not
- easy to distinguish between non-200 responses. The reason is that
- urllib2 tries to do clever things with error codes 301, 302, 401,
- and 407, and it wraps the object upon return.
-
- For python versions earlier than 2.4, you can avoid this fancy error
- handling by setting the module-level global HANDLE_ERRORS to zero.
- You see, prior to 2.4, it's the HTTP Handler's job to determine what
- to handle specially, and what to just pass up. HANDLE_ERRORS == 0
- means "pass everything up". In python 2.4, however, this job no
- longer belongs to the HTTP Handler and is now done by a NEW handler,
- HTTPErrorProcessor. Here's the bottom line:
-
- python version < 2.4
- HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
- errors
- HANDLE_ERRORS == 0 pass everything up, error processing is
- left to the calling code
- python version >= 2.4
- HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
- HANDLE_ERRORS == 0 (default) pass everything up, let the
- other handlers (specifically,
- HTTPErrorProcessor) decide what to do
-
- In practice, setting the variable either way makes little difference
- in python 2.4, so for the most consistent behavior across versions,
- you probably just want to use the defaults, which will give you
- exceptions on errors.
-
-"""
-
-# $Id$
-
-import urllib2
-import httplib
-import socket
-import thread
-
-DEBUG = None
-
-import sslfactory
-
-import sys
-if sys.version_info < (2, 4): HANDLE_ERRORS = 1
-else: HANDLE_ERRORS = 0
-
-class ConnectionManager:
- """
- The connection manager must be able to:
- * keep track of all existing
- """
- def __init__(self):
- self._lock = thread.allocate_lock()
- self._hostmap = {} # map hosts to a list of connections
- self._connmap = {} # map connections to host
- self._readymap = {} # map connection to ready state
-
- def add(self, host, connection, ready):
- self._lock.acquire()
- try:
- if not self._hostmap.has_key(host): self._hostmap[host] = []
- self._hostmap[host].append(connection)
- self._connmap[connection] = host
- self._readymap[connection] = ready
- finally:
- self._lock.release()
-
- def remove(self, connection):
- self._lock.acquire()
- try:
- try:
- host = self._connmap[connection]
- except KeyError:
- pass
- else:
- del self._connmap[connection]
- del self._readymap[connection]
- self._hostmap[host].remove(connection)
- if not self._hostmap[host]: del self._hostmap[host]
- finally:
- self._lock.release()
-
- def set_ready(self, connection, ready):
- try: self._readymap[connection] = ready
- except KeyError: pass
-
- def get_ready_conn(self, host):
- conn = None
- self._lock.acquire()
- try:
- if self._hostmap.has_key(host):
- for c in self._hostmap[host]:
- if self._readymap[c]:
- self._readymap[c] = 0
- conn = c
- break
- finally:
- self._lock.release()
- return conn
-
- def get_all(self, host=None):
- if host:
- return list(self._hostmap.get(host, []))
- else:
- return dict(self._hostmap)
-
-class KeepAliveHandler:
- def __init__(self):
- self._cm = ConnectionManager()
-
- #### Connection Management
- def open_connections(self):
- """return a list of connected hosts and the number of connections
- to each. [('foo.com:80', 2), ('bar.org', 1)]"""
- return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
-
- def close_connection(self, host):
- """close connection(s) to <host>
- host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
- no error occurs if there is no connection to that host."""
- for h in self._cm.get_all(host):
- self._cm.remove(h)
- h.close()
-
- def close_all(self):
- """close all open connections"""
- for host, conns in self._cm.get_all().items():
- for h in conns:
- self._cm.remove(h)
- h.close()
-
- def _request_closed(self, request, host, connection):
- """tells us that this request is now closed and the the
- connection is ready for another request"""
- self._cm.set_ready(connection, 1)
-
- def _remove_connection(self, host, connection, close=0):
- if close: connection.close()
- self._cm.remove(connection)
-
- #### Transaction Execution
- def do_open(self, req):
- host = req.get_host()
- if not host:
- raise urllib2.URLError('no host given')
-
- try:
- h = self._cm.get_ready_conn(host)
- while h:
- r = self._reuse_connection(h, req, host)
-
- # if this response is non-None, then it worked and we're
- # done. Break out, skipping the else block.
- if r: break
-
- # connection is bad - possibly closed by server
- # discard it and ask for the next free connection
- h.close()
- self._cm.remove(h)
- h = self._cm.get_ready_conn(host)
- else:
- # no (working) free connections were found. Create a new one.
- h = self._get_connection(host)
- if DEBUG: DEBUG.info("creating new connection to %s (%d)",
- host, id(h))
- self._cm.add(host, h, 0)
- self._start_transaction(h, req)
- r = h.getresponse()
- except (socket.error, httplib.HTTPException), err:
- raise urllib2.URLError(err)
-
- # if not a persistent connection, don't try to reuse it
- if r.will_close: self._cm.remove(h)
-
- if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
- r._handler = self
- r._host = host
- r._url = req.get_full_url()
- r._connection = h
- r.code = r.status
- r.headers = r.msg
- r.msg = r.reason
-
- if r.status == 200 or not HANDLE_ERRORS:
- return r
- else:
- return self.parent.error('http', req, r,
- r.status, r.msg, r.headers)
-
- def _reuse_connection(self, h, req, host):
- """start the transaction with a re-used connection
- return a response object (r) upon success or None on failure.
- This DOES not close or remove bad connections in cases where
- it returns. However, if an unexpected exception occurs, it
- will close and remove the connection before re-raising.
- """
- try:
- self._start_transaction(h, req)
- r = h.getresponse()
- # note: just because we got something back doesn't mean it
- # worked. We'll check the version below, too.
- except (socket.error, httplib.HTTPException):
- r = None
- except:
- # adding this block just in case we've missed
- # something we will still raise the exception, but
- # lets try and close the connection and remove it
- # first. We previously got into a nasty loop
- # where an exception was uncaught, and so the
- # connection stayed open. On the next try, the
- # same exception was raised, etc. The tradeoff is
- # that it's now possible this call will raise
- # a DIFFERENT exception
- if DEBUG: DEBUG.error("unexpected exception - closing " + \
- "connection to %s (%d)", host, id(h))
- self._cm.remove(h)
- h.close()
- raise
-
- if r is None or r.version == 9:
- # httplib falls back to assuming HTTP 0.9 if it gets a
- # bad header back. This is most likely to happen if
- # the socket has been closed by the server since we
- # last used the connection.
- if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)",
- host, id(h))
- r = None
- else:
- if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h))
-
- return r
-
- def _start_transaction(self, h, req):
- try:
- if req.has_data():
- data = req.get_data()
- h.putrequest('POST', req.get_selector(), skip_accept_encoding=1)
- if not req.headers.has_key('Content-type'):
- h.putheader('Content-type',
- 'application/x-www-form-urlencoded')
- if not req.headers.has_key('Content-length'):
- h.putheader('Content-length', '%d' % len(data))
- else:
- h.putrequest('GET', req.get_selector(), skip_accept_encoding=1)
- except (socket.error, httplib.HTTPException), err:
- raise urllib2.URLError(err)
-
- for args in self.parent.addheaders:
- h.putheader(*args)
- for k, v in req.headers.items():
- h.putheader(k, v)
- h.endheaders()
- if req.has_data():
- h.send(data)
-
- def _get_connection(self, host):
- return NotImplementedError
-
-class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
- def __init__(self):
- KeepAliveHandler.__init__(self)
-
- def http_open(self, req):
- return self.do_open(req)
-
- def _get_connection(self, host):
- return HTTPConnection(host)
-
-class HTTPSHandler(KeepAliveHandler, urllib2.HTTPSHandler):
- def __init__(self, ssl_factory=None):
- KeepAliveHandler.__init__(self)
- if not ssl_factory:
- ssl_factory = sslfactory.get_factory()
- self._ssl_factory = ssl_factory
-
- def https_open(self, req):
- return self.do_open(req)
-
- def _get_connection(self, host):
- return self._ssl_factory.get_https_connection(host)
-
-class HTTPResponse(httplib.HTTPResponse):
- # we need to subclass HTTPResponse in order to
- # 1) add readline() and readlines() methods
- # 2) add close_connection() methods
- # 3) add info() and geturl() methods
-
- # in order to add readline(), read must be modified to deal with a
- # buffer. example: readline must read a buffer and then spit back
- # one line at a time. The only real alternative is to read one
- # BYTE at a time (ick). Once something has been read, it can't be
- # put back (ok, maybe it can, but that's even uglier than this),
- # so if you THEN do a normal read, you must first take stuff from
- # the buffer.
-
- # the read method wraps the original to accomodate buffering,
- # although read() never adds to the buffer.
- # Both readline and readlines have been stolen with almost no
- # modification from socket.py
-
-
- def __init__(self, sock, debuglevel=0, strict=0, method=None):
- if method: # the httplib in python 2.3 uses the method arg
- httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
- else: # 2.2 doesn't
- httplib.HTTPResponse.__init__(self, sock, debuglevel)
- self.fileno = sock.fileno
- self.code = None
- self._rbuf = ''
- self._rbufsize = 8096
- self._handler = None # inserted by the handler later
- self._host = None # (same)
- self._url = None # (same)
- self._connection = None # (same)
-
- _raw_read = httplib.HTTPResponse.read
-
- def close(self):
- if self.fp:
- self.fp.close()
- self.fp = None
- if self._handler:
- self._handler._request_closed(self, self._host,
- self._connection)
-
- def close_connection(self):
- self._handler._remove_connection(self._host, self._connection, close=1)
- self.close()
-
- def info(self):
- return self.headers
-
- def geturl(self):
- return self._url
-
- def read(self, amt=None):
- # the _rbuf test is only in this first if for speed. It's not
- # logically necessary
- if self._rbuf and not amt is None:
- L = len(self._rbuf)
- if amt > L:
- amt -= L
- else:
- s = self._rbuf[:amt]
- self._rbuf = self._rbuf[amt:]
- return s
-
- s = self._rbuf + self._raw_read(amt)
- self._rbuf = ''
- return s
-
- def readline(self, limit=-1):
- data = ""
- i = self._rbuf.find('\n')
- while i < 0 and not (0 < limit <= len(self._rbuf)):
- new = self._raw_read(self._rbufsize)
- if not new: break
- i = new.find('\n')
- if i >= 0: i = i + len(self._rbuf)
- self._rbuf = self._rbuf + new
- if i < 0: i = len(self._rbuf)
- else: i = i+1
- if 0 <= limit < len(self._rbuf): i = limit
- data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
- return data
-
- def readlines(self, sizehint = 0):
- total = 0
- list = []
- while 1:
- line = self.readline()
- if not line: break
- list.append(line)
- total += len(line)
- if sizehint and total >= sizehint:
- break
- return list
-
-
-class HTTPConnection(httplib.HTTPConnection):
- # use the modified response class
- response_class = HTTPResponse
-
-class HTTPSConnection(httplib.HTTPSConnection):
- response_class = HTTPResponse
-
-#########################################################################
-##### TEST FUNCTIONS
-#########################################################################
-
-def error_handler(url):
- global HANDLE_ERRORS
- orig = HANDLE_ERRORS
- keepalive_handler = HTTPHandler()
- opener = urllib2.build_opener(keepalive_handler)
- urllib2.install_opener(opener)
- pos = {0: 'off', 1: 'on'}
- for i in (0, 1):
- print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
- HANDLE_ERRORS = i
- try:
- fo = urllib2.urlopen(url)
- foo = fo.read()
- fo.close()
- try: status, reason = fo.status, fo.reason
- except AttributeError: status, reason = None, None
- except IOError, e:
- print " EXCEPTION: %s" % e
- raise
- else:
- print " status = %s, reason = %s" % (status, reason)
- HANDLE_ERRORS = orig
- hosts = keepalive_handler.open_connections()
- print "open connections:", hosts
- keepalive_handler.close_all()
-
-def continuity(url):
- import md5
- format = '%25s: %s'
-
- # first fetch the file with the normal http handler
- opener = urllib2.build_opener()
- urllib2.install_opener(opener)
- fo = urllib2.urlopen(url)
- foo = fo.read()
- fo.close()
- m = md5.new(foo)
- print format % ('normal urllib', m.hexdigest())
-
- # now install the keepalive handler and try again
- opener = urllib2.build_opener(HTTPHandler())
- urllib2.install_opener(opener)
-
- fo = urllib2.urlopen(url)
- foo = fo.read()
- fo.close()
- m = md5.new(foo)
- print format % ('keepalive read', m.hexdigest())
-
- fo = urllib2.urlopen(url)
- foo = ''
- while 1:
- f = fo.readline()
- if f: foo = foo + f
- else: break
- fo.close()
- m = md5.new(foo)
- print format % ('keepalive readline', m.hexdigest())
-
-def comp(N, url):
- print ' making %i connections to:\n %s' % (N, url)
-
- sys.stdout.write(' first using the normal urllib handlers')
- # first use normal opener
- opener = urllib2.build_opener()
- urllib2.install_opener(opener)
- t1 = fetch(N, url)
- print ' TIME: %.3f s' % t1
-
- sys.stdout.write(' now using the keepalive handler ')
- # now install the keepalive handler and try again
- opener = urllib2.build_opener(HTTPHandler())
- urllib2.install_opener(opener)
- t2 = fetch(N, url)
- print ' TIME: %.3f s' % t2
- print ' improvement factor: %.2f' % (t1/t2, )
-
-def fetch(N, url, delay=0):
- import time
- lens = []
- starttime = time.time()
- for i in range(N):
- if delay and i > 0: time.sleep(delay)
- fo = urllib2.urlopen(url)
- foo = fo.read()
- fo.close()
- lens.append(len(foo))
- diff = time.time() - starttime
-
- j = 0
- for i in lens[1:]:
- j = j + 1
- if not i == lens[0]:
- print "WARNING: inconsistent length on read %i: %i" % (j, i)
-
- return diff
-
-def test_timeout(url):
- global DEBUG
- dbbackup = DEBUG
- class FakeLogger:
- def debug(self, msg, *args): print msg % args
- info = warning = error = debug
- DEBUG = FakeLogger()
- print " fetching the file to establish a connection"
- fo = urllib2.urlopen(url)
- data1 = fo.read()
- fo.close()
-
- i = 20
- print " waiting %i seconds for the server to close the connection" % i
- while i > 0:
- sys.stdout.write('\r %2i' % i)
- sys.stdout.flush()
- time.sleep(1)
- i -= 1
- sys.stderr.write('\r')
-
- print " fetching the file a second time"
- fo = urllib2.urlopen(url)
- data2 = fo.read()
- fo.close()
-
- if data1 == data2:
- print ' data are identical'
- else:
- print ' ERROR: DATA DIFFER'
-
- DEBUG = dbbackup
-
-
-def test(url, N=10):
- print "checking error hander (do this on a non-200)"
- try: error_handler(url)
- except IOError, e:
- print "exiting - exception will prevent further tests"
- sys.exit()
- print
- print "performing continuity test (making sure stuff isn't corrupted)"
- continuity(url)
- print
- print "performing speed comparison"
- comp(N, url)
- print
- print "performing dropped-connection check"
- test_timeout(url)
-
-if __name__ == '__main__':
- import time
- import sys
- try:
- N = int(sys.argv[1])
- url = sys.argv[2]
- except:
- print "%s <integer> <url>" % sys.argv[0]
- else:
- test(url, N)
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc.,
+# 59 Temple Place, Suite 330,
+# Boston, MA 02111-1307 USA
+
+# This file is part of urlgrabber, a high-level cross-protocol url-grabber
+# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
+
+"""An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
+
+>>> import urllib2
+>>> from keepalive import HTTPHandler
+>>> keepalive_handler = HTTPHandler()
+>>> opener = urllib2.build_opener(keepalive_handler)
+>>> urllib2.install_opener(opener)
+>>>
+>>> fo = urllib2.urlopen('http://www.python.org')
+
+If a connection to a given host is requested, and all of the existing
+connections are still in use, another connection will be opened. If
+the handler tries to use an existing connection but it fails in some
+way, it will be closed and removed from the pool.
+
+To remove the handler, simply re-run build_opener with no arguments, and
+install that opener.
+
+You can explicitly close connections by using the close_connection()
+method of the returned file-like object (described below) or you can
+use the handler methods:
+
+ close_connection(host)
+ close_all()
+ open_connections()
+
+NOTE: using the close_connection and close_all methods of the handler
+should be done with care when using multiple threads.
+ * there is nothing that prevents another thread from creating new
+ connections immediately after connections are closed
+ * no checks are done to prevent in-use connections from being closed
+
+>>> keepalive_handler.close_all()
+
+EXTRA ATTRIBUTES AND METHODS
+
+ Upon a status of 200, the object returned has a few additional
+ attributes and methods, which should not be used if you want to
+ remain consistent with the normal urllib2-returned objects:
+
+ close_connection() - close the connection to the host
+ readlines() - you know, readlines()
+ status - the return status (ie 404)
+ reason - english translation of status (ie 'File not found')
+
+ If you want the best of both worlds, use this inside an
+ AttributeError-catching try:
+
+ >>> try: status = fo.status
+ >>> except AttributeError: status = None
+
+ Unfortunately, these are ONLY there if status == 200, so it's not
+ easy to distinguish between non-200 responses. The reason is that
+ urllib2 tries to do clever things with error codes 301, 302, 401,
+ and 407, and it wraps the object upon return.
+
+ For python versions earlier than 2.4, you can avoid this fancy error
+ handling by setting the module-level global HANDLE_ERRORS to zero.
+ You see, prior to 2.4, it's the HTTP Handler's job to determine what
+ to handle specially, and what to just pass up. HANDLE_ERRORS == 0
+ means "pass everything up". In python 2.4, however, this job no
+ longer belongs to the HTTP Handler and is now done by a NEW handler,
+ HTTPErrorProcessor. Here's the bottom line:
+
+ python version < 2.4
+ HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
+ errors
+ HANDLE_ERRORS == 0 pass everything up, error processing is
+ left to the calling code
+ python version >= 2.4
+ HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
+ HANDLE_ERRORS == 0 (default) pass everything up, let the
+ other handlers (specifically,
+ HTTPErrorProcessor) decide what to do
+
+ In practice, setting the variable either way makes little difference
+ in python 2.4, so for the most consistent behavior across versions,
+ you probably just want to use the defaults, which will give you
+ exceptions on errors.
+
+"""
+
+# $Id$
+
+import urllib2
+import httplib
+import socket
+import thread
+
+DEBUG = None
+
+import sslfactory
+
+import sys
+if sys.version_info < (2, 4): HANDLE_ERRORS = 1
+else: HANDLE_ERRORS = 0
+
+class ConnectionManager:
+ """
+ The connection manager must be able to:
+ * keep track of all existing
+ """
+ def __init__(self):
+ self._lock = thread.allocate_lock()
+ self._hostmap = {} # map hosts to a list of connections
+ self._connmap = {} # map connections to host
+ self._readymap = {} # map connection to ready state
+
+ def add(self, host, connection, ready):
+ self._lock.acquire()
+ try:
+ if not self._hostmap.has_key(host): self._hostmap[host] = []
+ self._hostmap[host].append(connection)
+ self._connmap[connection] = host
+ self._readymap[connection] = ready
+ finally:
+ self._lock.release()
+
+ def remove(self, connection):
+ self._lock.acquire()
+ try:
+ try:
+ host = self._connmap[connection]
+ except KeyError:
+ pass
+ else:
+ del self._connmap[connection]
+ del self._readymap[connection]
+ self._hostmap[host].remove(connection)
+ if not self._hostmap[host]: del self._hostmap[host]
+ finally:
+ self._lock.release()
+
+ def set_ready(self, connection, ready):
+ try: self._readymap[connection] = ready
+ except KeyError: pass
+
+ def get_ready_conn(self, host):
+ conn = None
+ self._lock.acquire()
+ try:
+ if self._hostmap.has_key(host):
+ for c in self._hostmap[host]:
+ if self._readymap[c]:
+ self._readymap[c] = 0
+ conn = c
+ break
+ finally:
+ self._lock.release()
+ return conn
+
+ def get_all(self, host=None):
+ if host:
+ return list(self._hostmap.get(host, []))
+ else:
+ return dict(self._hostmap)
+
+class KeepAliveHandler:
+ def __init__(self):
+ self._cm = ConnectionManager()
+
+ #### Connection Management
+ def open_connections(self):
+ """return a list of connected hosts and the number of connections
+ to each. [('foo.com:80', 2), ('bar.org', 1)]"""
+ return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
+
+ def close_connection(self, host):
+ """close connection(s) to <host>
+ host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
+ no error occurs if there is no connection to that host."""
+ for h in self._cm.get_all(host):
+ self._cm.remove(h)
+ h.close()
+
+ def close_all(self):
+ """close all open connections"""
+ for host, conns in self._cm.get_all().items():
+ for h in conns:
+ self._cm.remove(h)
+ h.close()
+
+ def _request_closed(self, request, host, connection):
+ """tells us that this request is now closed and the the
+ connection is ready for another request"""
+ self._cm.set_ready(connection, 1)
+
+ def _remove_connection(self, host, connection, close=0):
+ if close: connection.close()
+ self._cm.remove(connection)
+
+ #### Transaction Execution
+ def do_open(self, req):
+ host = req.get_host()
+ if not host:
+ raise urllib2.URLError('no host given')
+
+ try:
+ h = self._cm.get_ready_conn(host)
+ while h:
+ r = self._reuse_connection(h, req, host)
+
+ # if this response is non-None, then it worked and we're
+ # done. Break out, skipping the else block.
+ if r: break
+
+ # connection is bad - possibly closed by server
+ # discard it and ask for the next free connection
+ h.close()
+ self._cm.remove(h)
+ h = self._cm.get_ready_conn(host)
+ else:
+ # no (working) free connections were found. Create a new one.
+ h = self._get_connection(host)
+ if DEBUG: DEBUG.info("creating new connection to %s (%d)",
+ host, id(h))
+ self._cm.add(host, h, 0)
+ self._start_transaction(h, req)
+ r = h.getresponse()
+ except (socket.error, httplib.HTTPException), err:
+ raise urllib2.URLError(err)
+
+ # if not a persistent connection, don't try to reuse it
+ if r.will_close: self._cm.remove(h)
+
+ if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
+ r._handler = self
+ r._host = host
+ r._url = req.get_full_url()
+ r._connection = h
+ r.code = r.status
+ r.headers = r.msg
+ r.msg = r.reason
+
+ if r.status == 200 or not HANDLE_ERRORS:
+ return r
+ else:
+ return self.parent.error('http', req, r,
+ r.status, r.msg, r.headers)
+
+ def _reuse_connection(self, h, req, host):
+ """start the transaction with a re-used connection
+ return a response object (r) upon success or None on failure.
+ This DOES not close or remove bad connections in cases where
+ it returns. However, if an unexpected exception occurs, it
+ will close and remove the connection before re-raising.
+ """
+ try:
+ self._start_transaction(h, req)
+ r = h.getresponse()
+ # note: just because we got something back doesn't mean it
+ # worked. We'll check the version below, too.
+ except (socket.error, httplib.HTTPException):
+ r = None
+ except:
+ # adding this block just in case we've missed
+ # something we will still raise the exception, but
+ # lets try and close the connection and remove it
+ # first. We previously got into a nasty loop
+ # where an exception was uncaught, and so the
+ # connection stayed open. On the next try, the
+ # same exception was raised, etc. The tradeoff is
+ # that it's now possible this call will raise
+ # a DIFFERENT exception
+ if DEBUG: DEBUG.error("unexpected exception - closing " + \
+ "connection to %s (%d)", host, id(h))
+ self._cm.remove(h)
+ h.close()
+ raise
+
+ if r is None or r.version == 9:
+ # httplib falls back to assuming HTTP 0.9 if it gets a
+ # bad header back. This is most likely to happen if
+ # the socket has been closed by the server since we
+ # last used the connection.
+ if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)",
+ host, id(h))
+ r = None
+ else:
+ if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h))
+
+ return r
+
+ def _start_transaction(self, h, req):
+ try:
+ if req.has_data():
+ data = req.get_data()
+ h.putrequest('POST', req.get_selector(), skip_accept_encoding=1)
+ if not req.headers.has_key('Content-type'):
+ h.putheader('Content-type',
+ 'application/x-www-form-urlencoded')
+ if not req.headers.has_key('Content-length'):
+ h.putheader('Content-length', '%d' % len(data))
+ else:
+ h.putrequest('GET', req.get_selector(), skip_accept_encoding=1)
+ except (socket.error, httplib.HTTPException), err:
+ raise urllib2.URLError(err)
+
+ for args in self.parent.addheaders:
+ h.putheader(*args)
+ for k, v in req.headers.items():
+ h.putheader(k, v)
+ h.endheaders()
+ if req.has_data():
+ h.send(data)
+
+ def _get_connection(self, host):
+ return NotImplementedError
+
+class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
+ def __init__(self):
+ KeepAliveHandler.__init__(self)
+
+ def http_open(self, req):
+ return self.do_open(req)
+
+ def _get_connection(self, host):
+ return HTTPConnection(host)
+
+class HTTPSHandler(KeepAliveHandler, urllib2.HTTPSHandler):
+ def __init__(self, ssl_factory=None):
+ KeepAliveHandler.__init__(self)
+ if not ssl_factory:
+ ssl_factory = sslfactory.get_factory()
+ self._ssl_factory = ssl_factory
+
+ def https_open(self, req):
+ return self.do_open(req)
+
+ def _get_connection(self, host):
+ return self._ssl_factory.get_https_connection(host)
+
+class HTTPResponse(httplib.HTTPResponse):
+ # we need to subclass HTTPResponse in order to
+ # 1) add readline() and readlines() methods
+ # 2) add close_connection() methods
+ # 3) add info() and geturl() methods
+
+ # in order to add readline(), read must be modified to deal with a
+ # buffer. example: readline must read a buffer and then spit back
+ # one line at a time. The only real alternative is to read one
+ # BYTE at a time (ick). Once something has been read, it can't be
+ # put back (ok, maybe it can, but that's even uglier than this),
+ # so if you THEN do a normal read, you must first take stuff from
+ # the buffer.
+
+ # the read method wraps the original to accomodate buffering,
+ # although read() never adds to the buffer.
+ # Both readline and readlines have been stolen with almost no
+ # modification from socket.py
+
+
+ def __init__(self, sock, debuglevel=0, strict=0, method=None):
+ if method: # the httplib in python 2.3 uses the method arg
+ httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
+ else: # 2.2 doesn't
+ httplib.HTTPResponse.__init__(self, sock, debuglevel)
+ self.fileno = sock.fileno
+ self.code = None
+ self._rbuf = ''
+ self._rbufsize = 8096
+ self._handler = None # inserted by the handler later
+ self._host = None # (same)
+ self._url = None # (same)
+ self._connection = None # (same)
+
+ _raw_read = httplib.HTTPResponse.read
+
+ def close(self):
+ if self.fp:
+ self.fp.close()
+ self.fp = None
+ if self._handler:
+ self._handler._request_closed(self, self._host,
+ self._connection)
+
+ def close_connection(self):
+ self._handler._remove_connection(self._host, self._connection, close=1)
+ self.close()
+
+ def info(self):
+ return self.headers
+
+ def geturl(self):
+ return self._url
+
+ def read(self, amt=None):
+ # the _rbuf test is only in this first if for speed. It's not
+ # logically necessary
+ if self._rbuf and not amt is None:
+ L = len(self._rbuf)
+ if amt > L:
+ amt -= L
+ else:
+ s = self._rbuf[:amt]
+ self._rbuf = self._rbuf[amt:]
+ return s
+
+ s = self._rbuf + self._raw_read(amt)
+ self._rbuf = ''
+ return s
+
+ def readline(self, limit=-1):
+ data = ""
+ i = self._rbuf.find('\n')
+ while i < 0 and not (0 < limit <= len(self._rbuf)):
+ new = self._raw_read(self._rbufsize)
+ if not new: break
+ i = new.find('\n')
+ if i >= 0: i = i + len(self._rbuf)
+ self._rbuf = self._rbuf + new
+ if i < 0: i = len(self._rbuf)
+ else: i = i+1
+ if 0 <= limit < len(self._rbuf): i = limit
+ data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
+ return data
+
+ def readlines(self, sizehint = 0):
+ total = 0
+ list = []
+ while 1:
+ line = self.readline()
+ if not line: break
+ list.append(line)
+ total += len(line)
+ if sizehint and total >= sizehint:
+ break
+ return list
+
+
+class HTTPConnection(httplib.HTTPConnection):
+ # use the modified response class
+ response_class = HTTPResponse
+
+class HTTPSConnection(httplib.HTTPSConnection):
+ response_class = HTTPResponse
+
+#########################################################################
+##### TEST FUNCTIONS
+#########################################################################
+
+def error_handler(url):
+ global HANDLE_ERRORS
+ orig = HANDLE_ERRORS
+ keepalive_handler = HTTPHandler()
+ opener = urllib2.build_opener(keepalive_handler)
+ urllib2.install_opener(opener)
+ pos = {0: 'off', 1: 'on'}
+ for i in (0, 1):
+ print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
+ HANDLE_ERRORS = i
+ try:
+ fo = urllib2.urlopen(url)
+ foo = fo.read()
+ fo.close()
+ try: status, reason = fo.status, fo.reason
+ except AttributeError: status, reason = None, None
+ except IOError, e:
+ print " EXCEPTION: %s" % e
+ raise
+ else:
+ print " status = %s, reason = %s" % (status, reason)
+ HANDLE_ERRORS = orig
+ hosts = keepalive_handler.open_connections()
+ print "open connections:", hosts
+ keepalive_handler.close_all()
+
+def continuity(url):
+ import md5
+ format = '%25s: %s'
+
+ # first fetch the file with the normal http handler
+ opener = urllib2.build_opener()
+ urllib2.install_opener(opener)
+ fo = urllib2.urlopen(url)
+ foo = fo.read()
+ fo.close()
+ m = md5.new(foo)
+ print format % ('normal urllib', m.hexdigest())
+
+ # now install the keepalive handler and try again
+ opener = urllib2.build_opener(HTTPHandler())
+ urllib2.install_opener(opener)
+
+ fo = urllib2.urlopen(url)
+ foo = fo.read()
+ fo.close()
+ m = md5.new(foo)
+ print format % ('keepalive read', m.hexdigest())
+
+ fo = urllib2.urlopen(url)
+ foo = ''
+ while 1:
+ f = fo.readline()
+ if f: foo = foo + f
+ else: break
+ fo.close()
+ m = md5.new(foo)
+ print format % ('keepalive readline', m.hexdigest())
+
+def comp(N, url):
+ print ' making %i connections to:\n %s' % (N, url)
+
+ sys.stdout.write(' first using the normal urllib handlers')
+ # first use normal opener
+ opener = urllib2.build_opener()
+ urllib2.install_opener(opener)
+ t1 = fetch(N, url)
+ print ' TIME: %.3f s' % t1
+
+ sys.stdout.write(' now using the keepalive handler ')
+ # now install the keepalive handler and try again
+ opener = urllib2.build_opener(HTTPHandler())
+ urllib2.install_opener(opener)
+ t2 = fetch(N, url)
+ print ' TIME: %.3f s' % t2
+ print ' improvement factor: %.2f' % (t1/t2, )
+
+def fetch(N, url, delay=0):
+ import time
+ lens = []
+ starttime = time.time()
+ for i in range(N):
+ if delay and i > 0: time.sleep(delay)
+ fo = urllib2.urlopen(url)
+ foo = fo.read()
+ fo.close()
+ lens.append(len(foo))
+ diff = time.time() - starttime
+
+ j = 0
+ for i in lens[1:]:
+ j = j + 1
+ if not i == lens[0]:
+ print "WARNING: inconsistent length on read %i: %i" % (j, i)
+
+ return diff
+
+def test_timeout(url):
+ global DEBUG
+ dbbackup = DEBUG
+ class FakeLogger:
+ def debug(self, msg, *args): print msg % args
+ info = warning = error = debug
+ DEBUG = FakeLogger()
+ print " fetching the file to establish a connection"
+ fo = urllib2.urlopen(url)
+ data1 = fo.read()
+ fo.close()
+
+ i = 20
+ print " waiting %i seconds for the server to close the connection" % i
+ while i > 0:
+ sys.stdout.write('\r %2i' % i)
+ sys.stdout.flush()
+ time.sleep(1)
+ i -= 1
+ sys.stderr.write('\r')
+
+ print " fetching the file a second time"
+ fo = urllib2.urlopen(url)
+ data2 = fo.read()
+ fo.close()
+
+ if data1 == data2:
+ print ' data are identical'
+ else:
+ print ' ERROR: DATA DIFFER'
+
+ DEBUG = dbbackup
+
+
+def test(url, N=10):
+ print "checking error hander (do this on a non-200)"
+ try: error_handler(url)
+ except IOError, e:
+ print "exiting - exception will prevent further tests"
+ sys.exit()
+ print
+ print "performing continuity test (making sure stuff isn't corrupted)"
+ continuity(url)
+ print
+ print "performing speed comparison"
+ comp(N, url)
+ print
+ print "performing dropped-connection check"
+ test_timeout(url)
+
+if __name__ == '__main__':
+ import time
+ import sys
+ try:
+ N = int(sys.argv[1])
+ url = sys.argv[2]
+ except:
+ print "%s <integer> <url>" % sys.argv[0]
+ else:
+ test(url, N)
\ No newline at end of file
-##
-## RS Downloader
-## by AliAbdul
-##
-##
-from base64 import encodestring
-from Components.ActionMap import ActionMap
-from Components.config import config, ConfigInteger, ConfigText, ConfigYesNo, ConfigClock, ConfigSubsection, getConfigListEntry
-from Components.ConfigList import ConfigListScreen
-from Components.Label import Label
-from Components.Language import language
-from Components.MenuList import MenuList
-from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
-from Components.ScrollLabel import ScrollLabel
-from decrypt import decrypt
-from enigma import eListboxPythonMultiContent, eTimer, gFont, RT_HALIGN_CENTER, RT_HALIGN_RIGHT
-from os import environ, listdir, remove
-from Plugins.Plugin import PluginDescriptor
-from Screens.ChoiceBox import ChoiceBox
-from Screens.MessageBox import MessageBox
-from Screens.Screen import Screen
-from Screens.VirtualKeyBoard import VirtualKeyBoard
-from time import localtime, sleep, strftime, time
-from Tools.Directories import resolveFilename, SCOPE_SKIN_IMAGE, SCOPE_LANGUAGE, SCOPE_PLUGINS
-from Tools.Downloader import HTTPProgressDownloader
-from Tools.LoadPixmap import LoadPixmap
-from twisted.internet import reactor
-from twisted.python import failure
-from twisted.web.client import getPage
-from urlparse import urlparse, urlunparse
-import gettext, re, socket, urllib2
-
-##############################################################################
-
-config.plugins.RSDownloader = ConfigSubsection()
-config.plugins.RSDownloader.onoff = ConfigYesNo(default=True)
-config.plugins.RSDownloader.username = ConfigText(default="", fixed_size=False)
-config.plugins.RSDownloader.password = ConfigText(default="", fixed_size=False)
-config.plugins.RSDownloader.lists_directory = ConfigText(default="/media/hdd/rs/lists/", fixed_size=False)
-config.plugins.RSDownloader.downloads_directory = ConfigText(default="/media/hdd/rs/downloads", fixed_size=False)
-config.plugins.RSDownloader.ignore_time = ConfigYesNo(default=False)
-config.plugins.RSDownloader.start_time = ConfigClock(default=time())
-config.plugins.RSDownloader.end_time = ConfigClock(default=time())
-config.plugins.RSDownloader.download_monday = ConfigYesNo(default=True)
-config.plugins.RSDownloader.download_tuesday = ConfigYesNo(default=True)
-config.plugins.RSDownloader.download_wednesday = ConfigYesNo(default=True)
-config.plugins.RSDownloader.download_thursday = ConfigYesNo(default=True)
-config.plugins.RSDownloader.download_friday = ConfigYesNo(default=True)
-config.plugins.RSDownloader.download_saturday = ConfigYesNo(default=True)
-config.plugins.RSDownloader.download_sunday = ConfigYesNo(default=True)
-config.plugins.RSDownloader.count_downloads = ConfigInteger(default=3, limits=(1, 6))
-config.plugins.RSDownloader.write_log = ConfigYesNo(default=True)
-config.plugins.RSDownloader.reconnect_fritz = ConfigYesNo(default=False)
-config.plugins.RSDownloader.autorestart_failed = ConfigYesNo(default=False)
-
-##############################################################################
-
-def localeInit():
- lang = language.getLanguage()
- environ["LANGUAGE"] = lang[:2]
- gettext.bindtextdomain("enigma2", resolveFilename(SCOPE_LANGUAGE))
- gettext.textdomain("enigma2")
- gettext.bindtextdomain("RSDownloader", "%s%s"%(resolveFilename(SCOPE_PLUGINS), "Extensions/RSDownloader/locale/"))
-
-def _(txt):
- t = gettext.dgettext("RSDownloader", txt)
- if t == txt:
- t = gettext.gettext(txt)
- return t
-
-localeInit()
-language.addCallback(localeInit)
-
-##############################################################################
-
-def writeLog(message):
- if config.plugins.RSDownloader.write_log.value:
- try:
- f = open("/tmp/rapidshare.log", "a")
- f.write(strftime("%c", localtime(time())) + " - " + message + "\n")
- f.close()
- except:
- pass
-
-##############################################################################
-
-def _parse(url):
- url = url.strip()
- parsed = urlparse(url)
- scheme = parsed[0]
- path = urlunparse(('','') + parsed[2:])
- host, port = parsed[1], 80
- if '@' in host:
- username, host = host.split('@')
- if ':' in username:
- username, password = username.split(':')
- else:
- password = ""
- else:
- username = ""
- password = ""
- if ':' in host:
- host, port = host.split(':')
- port = int(port)
- if path == "":
- path = "/"
- return scheme, host, port, path, username, password
-
-class ProgressDownload:
- def __init__(self, url, outputfile, contextFactory=None, *args, **kwargs):
- scheme, host, port, path, username, password = _parse(url)
- if username and password:
- url = scheme + '://' + host + ':' + str(port) + path
- basicAuth = encodestring("%s:%s"%(username, password))
- authHeader = "Basic " + basicAuth.strip()
- AuthHeaders = {"Authorization": authHeader}
- if kwargs.has_key("headers"):
- kwargs["headers"].update(AuthHeaders)
- else:
- kwargs["headers"] = AuthHeaders
- self.factory = HTTPProgressDownloader(url, outputfile, *args, **kwargs)
- self.connection = reactor.connectTCP(host, port, self.factory)
-
- def start(self):
- return self.factory.deferred
-
- def stop(self):
- self.connection.disconnect()
-
- def addProgress(self, progress_callback):
- self.factory.progress_callback = progress_callback
-
-##############################################################################
-
-def get(url):
- try:
- data = urllib2.urlopen(url)
- return data.read()
- except:
- return ""
-
-def post(url, data):
- try:
- return urllib2.urlopen(url, data).read()
- except:
- return ""
-
-def matchGet(rex, string):
- match = re.search(rex, string)
- if match:
- if len(match.groups()) == 0:
- return string[match.span()[0]:match.span()[1]]
- if len(match.groups()) == 1:
- return match.groups()[0]
- else:
- return False
-
-##############################################################################
-
-def reconnect(host='fritz.box', port=49000):
- http_body = '\r\n'.join((
- '<?xml version="1.0" encoding="utf-8"?>',
- '<s:Envelope s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/" xmlns:s="http://schemas.xmlsoap.org/soap/envelope/">',
- ' <s:Body>',
- ' <u:ForceTermination xmlns:u="urn:schemas-upnp-org:service:WANIPConnection:1"/>',
- ' </s:Body>',
- '</s:Envelope>'))
- http_data = '\r\n'.join((
- 'POST /upnp/control/WANIPConn1 HTTP/1.1',
- 'Host: %s:%d'%(host, port),
- 'SoapAction: urn:schemas-upnp-org:service:WANIPConnection:1#ForceTermination',
- 'Content-Type: text/xml; charset="utf-8"',
- 'Content-Length: %d'%len(http_body),
- '',
- http_body))
- try:
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- s.connect((host, port))
- s.send(http_data)
- s.close()
- except:
- pass
-
-##############################################################################
-
-class RSDownload:
- def __init__(self, url):
- writeLog("Adding: %s"%url)
- self.url = url
- self.download = None
- self.downloading = False
- self.progress = 0
- self.size = 0
- self.status = _("Waiting")
- self.name = self.url.split("/")[-1]
-
- self.freeDownloadUrl = ""
- self.freeDownloadTimer = eTimer()
- self.freeDownloadTimer.callback.append(self.freeDownloadStart)
- self.checkTimer = eTimer()
- self.checkTimer.callback.append(self.doCheckTimer)
- self.restartFailedTimer = eTimer()
- self.restartFailedTimer.callback.append(self.restartFailedCheck)
-
- self.finishCallbacks = []
-
- def start(self):
- writeLog("Downloading: %s"%self.url)
- self.downloading = True
- self.progress = 0
- self.size = 0
- username = config.plugins.RSDownloader.username.value
- password = config.plugins.RSDownloader.password.value
- if self.url.__contains__("rapidshare.com") and username == "" and password == "":
- writeLog("Free RS-Download: %s"%self.url)
- self.status = _("Checking")
- if config.plugins.RSDownloader.reconnect_fritz.value:
- reconnect()
- sleep(3)
- data = get(self.url)
- url = matchGet('<form[^>]+action="([^"]+)', data)
- if not url:
- writeLog("Failed: %s"%self.url)
- self.httpFailed(True, "Failed to get download page url: %s"%self.url)
- else:
- data = post(url, "dl.start=Free")
- seconds = matchGet('var c=([0-9]+)', data)
- if not seconds:
- self.httpFailed(True, "Failed to get download page url: %s"%self.url)
- else:
- writeLog("Free RS-download... must wait %s seconds: %s"%(seconds, self.url))
- self.status = "%s %s"%(_("Waiting"), seconds)
- url = matchGet('"dlf" action="([^"]+)', data)
- if not url:
- self.httpFailed(True, "Failed to get download page url: %s"%self.url)
- else:
- self.freeDownloadUrl = url
- self.freeDownloadTimer.start((int(seconds) + 2) * 1000, 1)
- elif self.url.__contains__("youtube.com"):
- writeLog("Getting youtube video link: %s"%self.url)
- self.status = _("Checking")
- downloadLink = self.getYoutubeDownloadLink()
- if downloadLink:
- self.status = _("Downloading")
- writeLog("Downloading video: %s"%downloadLink)
- req = urllib2.Request(downloadLink)
- url_handle = urllib2.urlopen(req)
- headers = url_handle.info()
- if headers.getheader("content-type") == "video/mp4":
- ext = "mp4"
- else:
- ext = "flv"
- self.download = ProgressDownload(downloadLink, ("%s/%s.%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name, ext)).replace("//", "/"))
- self.download.addProgress(self.httpProgress)
- self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed)
- else:
- self.httpFailed(True, "Failed to get video url: %s"%self.url)
- else:
- if self.url.__contains__("rapidshare.com"):
- url = self.url.replace("http://", "http://" + username + ":" + password + "@")
- else:
- url = self.url
- self.status = _("Downloading")
- self.download = ProgressDownload(url, ("%s/%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name)).replace("//", "/").replace(".html", ""))
- self.download.addProgress(self.httpProgress)
- self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed)
-
- def freeDownloadStart(self):
- self.status = _("Downloading")
- self.download = ProgressDownload(self.freeDownloadUrl, ("%s/%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name)).replace("//", "/").replace(".html", ""))
- self.download.addProgress(self.httpProgress)
- self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed)
-
- def stop(self):
- self.progress = 0
- self.downloading = False
- self.status = _("Waiting")
- if self.download:
- writeLog("Stopping download: %s"%self.url)
- self.download.stop()
-
- def httpProgress(self, recvbytes, totalbytes):
- if self.size == 0:
- self.size = int((totalbytes / 1024) / 1024)
- self.progress = int(100.0 * float(recvbytes) / float(totalbytes))
- if self.progress == 100:
- writeLog("Finished: %s"%self.url)
- self.status = _("Finished")
- self.execFinishCallbacks()
-
- def httpFinished(self, string=""):
- if string is not None:
- writeLog("Failed: %s"%self.url)
- writeLog("Error: %s"%string)
- self.status = _("Checking")
- self.checkTimer.start(10000, 1)
-
- def doCheckTimer(self):
- if self.size == 0:
- self.status = _("Failed")
- if config.plugins.RSDownloader.autorestart_failed.value:
- self.restartFailedTimer.start(10000*60, 1)
- elif self.progress == 100:
- self.status = _("Finished")
- self.downloading = False
- self.execFinishCallbacks()
-
- def restartFailedCheck(self):
- if self.status == _("Failed"): # check if user didn't restart already
- self.download = None
- self.status = _("Waiting")
-
- def execFinishCallbacks(self):
- for x in self.finishCallbacks:
- x()
-
- def httpFailed(self, failure=None, error=""):
- if failure:
- if error == "":
- error = failure.getErrorMessage()
- if error != "" and not error.startswith("[Errno 2]"):
- writeLog("Failed: %s"%self.url)
- writeLog("Error: %s"%error)
- self.status = _("Checking")
- self.checkTimer.start(10000, 1)
-
- def getYoutubeDownloadLink(self):
- mrl = None
- html = get(self.url)
- if html != "":
- isHDAvailable = False
- video_id = None
- t = None
- reonecat = re.compile(r'<title>(.+?)</title>', re.DOTALL)
- titles = reonecat.findall(html)
- if titles:
- self.name = titles[0]
- if self.name.startswith("YouTube - "):
- self.name = (self.name[10:]).replace("&", "&")
- if html.__contains__("isHDAvailable = true"):
- isHDAvailable = True
- for line in html.split('\n'):
- if 'swfArgs' in line:
- line = line.strip().split()
- x = 0
- for thing in line:
- if 'video_id' in thing:
- video_id = line[x+1][1:-2]
- elif '"t":' == thing:
- t = line[x+1][1:-2]
- x += 1
- if video_id and t:
- if isHDAvailable == True:
- mrl = "http://www.youtube.com/get_video?video_id=%s&t=%s&fmt=22" % (video_id, t)
- else:
- mrl = "http://www.youtube.com/get_video?video_id=%s&t=%s&fmt=18" % (video_id, t)
- return mrl
-
-##############################################################################
-
-class RS:
- def __init__(self):
- self.downloads = []
- self.checkTimer = eTimer()
- self.checkTimer.callback.append(self.startDownloading)
- self.checkTimer.start(5000*60, False)
-
- def mayDownload(self):
- if config.plugins.RSDownloader.onoff.value == False:
- writeLog("RS Downloader is turned off...")
- return False
- elif config.plugins.RSDownloader.ignore_time.value:
- return True
- else:
- start = config.plugins.RSDownloader.start_time.value
- end = config.plugins.RSDownloader.end_time.value
- t = localtime()
- weekday = t[6]
- if weekday == 0 and config.plugins.RSDownloader.download_monday.value == False:
- return False
- elif weekday == 1 and config.plugins.RSDownloader.download_tuesday.value == False:
- return False
- elif weekday == 2 and config.plugins.RSDownloader.download_wednesday.value == False:
- return False
- elif weekday == 3 and config.plugins.RSDownloader.download_thursday.value == False:
- return False
- elif weekday == 4 and config.plugins.RSDownloader.download_friday.value == False:
- return False
- elif weekday == 5 and config.plugins.RSDownloader.download_saturday.value == False:
- return False
- elif weekday == 6 and config.plugins.RSDownloader.download_sunday.value == False:
- return False
- else:
- hour_now = t[3]
- minute_now = t[4]
- hour_start = start[0]
- minute_start = start[1]
- hour_end = end[0]
- minute_end = end[1]
- if start == end: # Same start and end-time
- return True
- elif hour_end < hour_start: # Different days!!!
- if hour_now > hour_start or hour_now < hour_end:
- return True
- elif hour_now == hour_start and minute_now > minute_start:
- return True
- elif hour_now == hour_end and minute_now < minute_end:
- return True
- else:
- return False
- elif hour_now > hour_start and hour_now < hour_end: # Same day...
- return True
- elif hour_now == hour_start and minute_now > minute_start: # Same day, same start-hour...
- return True
- elif hour_now == hour_end and minute_now < minute_end: # Same day, same end-hour...
- return True
- else:
- return False
-
- def allDownloadsFinished(self):
- allDone = True
- for download in self.downloads:
- if (download.status != _("Failed")) and (download.status != _("Finished")):
- allDone = False
- return allDone
-
- def startDownloading(self):
- if self.mayDownload() == True:
- if self.allDownloadsFinished() == True:
- self.readLists()
- downloadCount = 0
- for download in self.downloads:
- if download.downloading == True:
- downloadCount += 1 # Count the downloaded files
- if config.plugins.RSDownloader.username.value == "" and config.plugins.RSDownloader.password.value == "":
- if downloadCount < 1: # Allow one download if without account
- for download in self.downloads:
- if download.downloading == False and download.status.startswith(_("Waiting")):
- download.start() # Start first download in the list
- break
- else:
- mayDownloadCount = config.plugins.RSDownloader.count_downloads.value - downloadCount
- for download in self.downloads:
- if download.downloading == False:
- if mayDownloadCount > 0 and download.status == _("Waiting"):
- download.start()
- mayDownloadCount -= 1
-
- def addDownload(self, url):
- error = False
- for download in self.downloads:
- if download.url == url:
- error = True
- if error:
- return False
- else:
- download = RSDownload(url)
- download.finishCallbacks.append(self.cleanLists)
- self.downloads.append(download)
- return True
-
- def readLists(self):
- writeLog("Reading all lists...")
- path = config.plugins.RSDownloader.lists_directory.value
- if not path.endswith("/"):
- path = path + "/"
- writeLog("Directory: " + path)
- try:
- file_list = listdir(path)
- writeLog("Count of lists: " + str(len(file_list)))
- except:
- file_list = []
- writeLog("Could not find any list!")
- for x in file_list:
- list = path + x
- if list.endswith(".txt"):
- try:
- writeLog("Reading list %s..."%list)
- f = open(list, "r")
- count = 0
- for l in f:
- if l.startswith("http://"):
- if (self.addDownload(l.replace("\n", "").replace("\r", ""))) == True:
- count += 1
- f.close()
- if count == 0:
- writeLog("Empty list or downloads already in download list: %s"%list)
- else:
- writeLog("Added %d files from list %s..."%(count, list))
- except:
- writeLog("Error while reading list %s!"%list)
- else:
- writeLog("No *.txt file: %s!"%list)
-
- def cleanLists(self):
- writeLog("Cleaning lists...")
- path = config.plugins.RSDownloader.lists_directory.value
- if not path.endswith("/"):
- path = path + "/"
- try:
- file_list = listdir(path)
- except:
- file_list = []
- for x in file_list:
- list = path + x
- try:
- f = open(list, "r")
- content = f.read()
- f.close()
- for download in self.downloads:
- if download.status == _("Finished") and content.__contains__(download.url):
- content = content.replace(download.url, "")
- content = content.replace("\n\n", "\n").replace("\r\r", "\r")
- f = open(list, "w")
- f.write(content)
- f.close()
- except:
- writeLog("Error while cleaning list %s!"%list)
- self.startDownloading()
-
- def removeDownload(self, url):
- tmp = []
- for download in self.downloads:
- if download.url == url:
- download.stop()
- else:
- tmp.append(download)
- del self.downloads
- self.downloads = tmp
- self.removeFromLists(url)
-
- def removeFromLists(self, url):
- path = config.plugins.RSDownloader.lists_directory.value
- if not path.endswith("/"):
- path = path + "/"
- try:
- file_list = listdir(path)
- except:
- file_list = []
- for x in file_list:
- list = path + x
- try:
- f = open(list, "r")
- content = f.read()
- f.close()
- if content.__contains__(url):
- content = content.replace(url, "")
- content = content.replace("\n\n", "\n").replace("\r\r", "\r")
- f = open(list, "w")
- f.write(content)
- f.close()
- except:
- pass
-
- def clearFinishedDownload(self, url):
- idx = 0
- for x in self.downloads:
- if x.url == url:
- del self.downloads[idx]
- break
- else:
- idx += 1
-
- def clearFinishedDownloads(self):
- tmp = []
- for download in self.downloads:
- if download.status != _("Finished"):
- tmp.append(download)
- del self.downloads
- self.downloads = tmp
-
- def deleteFailedDownloads(self):
- tmp = []
- for download in self.downloads:
- if download.status == _("Failed"):
- self.removeFromLists(download.url)
- else:
- tmp.append(download)
- del self.downloads
- self.downloads = tmp
-
- def restartFailedDownloads(self):
- tmp = []
- for download in self.downloads:
- if download.status == _("Failed"):
- download.download = None
- download.downloading = False
- download.progress = 0
- download.size = 0
- download.status = _("Waiting")
- tmp.append(download)
- del self.downloads
- self.downloads = tmp
- self.startDownloading()
-
-rapidshare = RS()
-
-##############################################################################
-
-class ChangedScreen(Screen):
- def __init__(self, session, parent=None):
- Screen.__init__(self, session, parent)
- self.onLayoutFinish.append(self.setScreenTitle)
-
- def setScreenTitle(self):
- self.setTitle(_("RS Downloader"))
-
-##############################################################################
-
-class RSConfig(ConfigListScreen, ChangedScreen):
- skin = """
- <screen position="center,center" size="560,450" title="RS Downloader">
- <ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" transparent="1" alphatest="on" />
- <ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" transparent="1" alphatest="on" />
- <ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" transparent="1" alphatest="on" />
- <ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" transparent="1" alphatest="on" />
- <widget name="key_green" position="140,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
- <widget name="config" position="0,45" size="560,400" scrollbarMode="showOnDemand" />
- </screen>"""
-
- def __init__(self, session):
- ChangedScreen.__init__(self, session)
-
- self["key_green"] = Label(_("Save"))
-
- ConfigListScreen.__init__(self, [
- getConfigListEntry(_("Download in the background:"), config.plugins.RSDownloader.onoff),
- getConfigListEntry(_("Username:"), config.plugins.RSDownloader.username),
- getConfigListEntry(_("Password:"), config.plugins.RSDownloader.password),
- getConfigListEntry(_("Lists directory:"), config.plugins.RSDownloader.lists_directory),
- getConfigListEntry(_("Downloads directory:"), config.plugins.RSDownloader.downloads_directory),
- getConfigListEntry(_("Ignore download times:"), config.plugins.RSDownloader.ignore_time),
- getConfigListEntry(_("Allow downloading on monday:"), config.plugins.RSDownloader.download_monday),
- getConfigListEntry(_("Allow downloading on tuesday:"), config.plugins.RSDownloader.download_tuesday),
- getConfigListEntry(_("Allow downloading on wednesday:"), config.plugins.RSDownloader.download_wednesday),
- getConfigListEntry(_("Allow downloading on thursday:"), config.plugins.RSDownloader.download_thursday),
- getConfigListEntry(_("Allow downloading on friday:"), config.plugins.RSDownloader.download_friday),
- getConfigListEntry(_("Allow downloading on saturday:"), config.plugins.RSDownloader.download_saturday),
- getConfigListEntry(_("Allow downloading on sunday:"), config.plugins.RSDownloader.download_sunday),
- getConfigListEntry(_("Don't download before:"), config.plugins.RSDownloader.start_time),
- getConfigListEntry(_("Don't download after:"), config.plugins.RSDownloader.end_time),
- getConfigListEntry(_("Maximal downloads:"), config.plugins.RSDownloader.count_downloads),
- getConfigListEntry(_("Write log:"), config.plugins.RSDownloader.write_log),
- getConfigListEntry(_("Reconnect fritz.Box before downloading:"), config.plugins.RSDownloader.reconnect_fritz),
- getConfigListEntry(_("Restart failed after 10 minutes:"), config.plugins.RSDownloader.autorestart_failed)])
-
- self["actions"] = ActionMap(["OkCancelActions", "ColorActions"], {"green": self.save, "cancel": self.exit}, -1)
-
- def save(self):
- for x in self["config"].list:
- x[1].save()
- self.close()
-
- def exit(self):
- for x in self["config"].list:
- x[1].cancel()
- self.close()
-
-##############################################################################
-
-class RSSearch(Screen):
- skin = """
- <screen position="center,center" size="560,450" title="Searching... please wait!">
- <widget name="list" position="0,0" size="570,450" scrollbarMode="showOnDemand" />
- </screen>"""
-
- def __init__(self, session, searchFor):
- Screen.__init__(self, session)
- self.session = session
-
- self.searchFor = searchFor.replace(" ", "%2B")
- self.maxPage = 1
- self.curPage = 1
- self.files = []
-
- self["list"] = MenuList([])
-
- self["actions"] = ActionMap(["OkCancelActions", "InfobarChannelSelection"],
- {
- "historyBack": self.previousPage,
- "historyNext": self.nextPage,
- "ok": self.okClicked,
- "cancel": self.close
- }, -1)
-
- self.onLayoutFinish.append(self.search)
-
- def okClicked(self):
- if len(self.files) > 0:
- idx = self["list"].getSelectedIndex()
- url = self.files[idx]
- try:
- f = open(("%s/search.txt" % config.plugins.RSDownloader.lists_directory.value).replace("//", "/"), "a")
- f.write("%s\n"%url)
- f.close()
- self.session.open(MessageBox, (_("Added %s to the download-list.") % url), MessageBox.TYPE_INFO)
- except:
- self.session.open(MessageBox, (_("Error while adding %s to the download-list!") % url), MessageBox.TYPE_ERROR)
-
- def search(self):
- getPage("http://rapidshare-search-engine.com/index-s_submit=Search&sformval=1&s_type=0&what=1&s=%s&start=%d.html"%(self.searchFor, self.curPage)).addCallback(self.searchCallback).addErrback(self.searchError)
-
- def searchCallback(self, html=""):
- list = []
- files = []
-
- if html.__contains__("Nothing found, sorry."):
- self.session.open(MessageBox, (_("Error while searching http://rapidshare-search-engine.com!\n\nError: Nothing found, sorry.")), MessageBox.TYPE_ERROR)
- self.instance.setTitle(_("Nothing found, sorry."))
- else:
- tmp = html
- while tmp.__contains__("goPg('"):
- idx = tmp.index("goPg('")
- tmp = tmp[idx+6:]
- idx = tmp.index("'")
- pageNumber = tmp[:idx]
-
- try:
- pageNumber = int(pageNumber)
- if pageNumber > self.maxPage:
- self.maxPage = pageNumber
- except:
- pass
-
- self.instance.setTitle(_("Page %d / %d. Push < > to switch the page...")%(self.curPage, self.maxPage))
-
- while html.__contains__('title="Download"'):
- idx = html.index('title="Download"')
- html = html[idx:]
- idx = html.index('value="')
- html = html[idx+7:]
- idx = html.index('"')
- size = html[:idx]
- idx = html.index('http://rapidshare.com/')
- html = html[idx:]
- idx = html.index('"')
- url = html[:idx]
-
- files.append(url)
- try:
- urllist = url.split("/")
- idx = len(urllist) - 1
- name = urllist[idx]
- list.append("%s - %s"%(size, name))
- except:
- list.append("%s - %s"%(size, url))
-
- self.files = files
- self["list"].setList(list)
-
- def searchError(self, error=""):
- self.session.open(MessageBox, (_("Error while searching http://rapidshare-search-engine.com!\n\nError: %s")%str(error)), MessageBox.TYPE_ERROR)
-
- def previousPage(self):
- if self.curPage > 1:
- self.curPage -= 1
- self.instance.setTitle(_("Loading previous page... please wait!"))
- self.search()
-
- def nextPage(self):
- if self.curPage < self.maxPage:
- self.curPage += 1
- self.instance.setTitle(_("Loading next page... please wait!"))
- self.search()
-
-##############################################################################
-
-class RSLogScreen(ChangedScreen):
- skin = """
- <screen position="center,center" size="560,450" title="RS Downloader">
- <widget name="label" position="0,0" size="560,450" font="Regular;20" />
- </screen>"""
-
- def __init__(self, session):
- ChangedScreen.__init__(self, session)
-
- try:
- f = open("/tmp/rapidshare.log")
- log = f.read()
- f.close()
- except:
- log = ""
- self["label"] = ScrollLabel(log)
-
- self["actions"] = ActionMap(["WizardActions"],
- {
- "ok": self.close,
- "back": self.close,
- "up": self["label"].pageUp,
- "down": self["label"].pageDown,
- "left": self["label"].pageUp,
- "right": self["label"].pageDown
- }, -1)
-
-##############################################################################
-
-class RSContainerSelector(ChangedScreen):
- skin = """
- <screen position="center,center" size="560,450" title="RS Downloader">
- <widget name="list" position="0,0" size="560,450" />
- </screen>"""
-
- def __init__(self, session, list):
- ChangedScreen.__init__(self, session)
- self["list"] = MenuList(list)
- self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1)
-
- def okClicked(self):
- cur = self["list"].getCurrent()
- self.close(cur)
-
-##############################################################################
-
-class RSList(MenuList):
- def __init__(self, list):
- MenuList.__init__(self, list, False, eListboxPythonMultiContent)
- self.l.setItemHeight(25)
- self.l.setFont(0, gFont("Regular", 20))
-
-##############################################################################
-
-def RSListEntry(download):
- res = [(download)]
- res.append(MultiContentEntryText(pos=(0, 0), size=(170, 25), font=0, text=download.name))
- res.append(MultiContentEntryText(pos=(175, 0), size=(75, 25), font=0, text="%d%s"%(download.size, "MB"), flags=RT_HALIGN_CENTER))
- res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 9), size=(84, 7), png=LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/progress_bg.png"))))
- res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 10), size=(int(0.84 * download.progress), 5), png=LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/progress_small.png"))))
- res.append(MultiContentEntryText(pos=(360, 0), size=(60, 25), font=0, text="%d%s"%(download.progress, "%"), flags=RT_HALIGN_CENTER))
- res.append(MultiContentEntryText(pos=(420, 0), size=(140, 25), font=0, text=download.status, flags=RT_HALIGN_RIGHT))
- return res
-
-##############################################################################
-
-class RSMain(ChangedScreen):
- skin = """
- <screen position="center,center" size="560,450" title="RS Downloader">
- <ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" transparent="1" alphatest="on" />
- <ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" transparent="1" alphatest="on" />
- <ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" transparent="1" alphatest="on" />
- <ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" transparent="1" alphatest="on" />
- <ePixmap pixmap="skin_default/buttons/key_menu.png" position="10,420" size="35,25" transparent="1" alphatest="on" />
- <widget name="key_red" position="0,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
- <widget name="key_green" position="140,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
- <widget name="key_yellow" position="280,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
- <widget name="key_blue" position="420,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
- <widget name="key_menu" position="50,422" size="300,25" font="Regular;20" transparent="1" />
- <widget name="list" position="0,40" size="560,375" scrollbarMode="showNever" />
- </screen>"""
-
- def __init__(self, session):
- ChangedScreen.__init__(self, session)
- self.session = session
-
- self["key_red"] = Label(_("Delete"))
- self["key_green"] = Label(_("Search"))
- self["key_yellow"] = Label(_("Add"))
- self["key_blue"] = Label(_("Config"))
- self["key_menu"] = Label(_("Menu"))
- self["list"] = RSList([])
-
- self.refreshTimer = eTimer()
- self.refreshTimer.callback.append(self.updateList)
-
- self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "InfobarMenuActions"],
- {
- "mainMenu": self.menu,
- "cancel": self.close,
- "red": self.delete,
- "green": self.search,
- "yellow": self.add,
- "blue": self.config
- }, prio=-1)
-
- self.onLayoutFinish.append(self.updateList)
-
- def menu(self):
- list = []
- #TODO: Add sort list functions
- list.append((_("Delete download"), self.delete))
- list.append((_("Use search engine"), self.search))
- list.append((_("Add downloads from txt files"), self.add))
- list.append((_("Add files from container"), self.addContainer))
- list.append((_("Delete failed downloads"), self.deleteFailed))
- list.append((_("Restart failed downloads"), self.restartFailed))
- list.append((_("Clear finished downloads"), self.clearFinished))
- list.append((_("Show log"), self.showLog))
- list.append((_("Delete log"), self.deleteLog))
- list.append((_("Close plugin"), self.close))
- self.session.openWithCallback(self.menuCallback, ChoiceBox, title=_("Please choose a function..."), list=list)
-
- def menuCallback(self, callback=None):
- if callback is not None:
- callback[1]()
-
- def deleteFailed(self):
- rapidshare.deleteFailedDownloads()
-
- def restartFailed(self):
- rapidshare.restartFailedDownloads()
-
- def clearFinished(self):
- rapidshare.clearFinishedDownloads()
-
- def showLog(self):
- self.session.open(RSLogScreen)
-
- def deleteLog(self):
- try:
- remove("/tmp/rapidshare.log")
- except:
- pass
-
- def updateList(self):
- list = []
- for download in rapidshare.downloads:
- list.append(RSListEntry(download))
- self["list"].setList(list)
- self.refreshTimer.start(2000, 1)
-
- def delete(self):
- cur = self["list"].getCurrent()
- if cur:
- cur = cur[0]
- if cur.status == _("Finished"):
- rapidshare.clearFinishedDownload(cur.url)
- else:
- self.session.openWithCallback(self.deleteCallback, MessageBox, (_("Delete %s?")%cur.name))
-
- def deleteCallback(self, callback):
- if callback:
- rapidshare.removeDownload(self["list"].getCurrent()[0].url)
- self.refreshTimer.stop()
- self.updateList()
-
- def search(self):
- self.session.openWithCallback(self.searchCallback, VirtualKeyBoard, title=_("Search http://rapidshare-search-engine.com for:"))
-
- def searchCallback(self, callback):
- if callback is not None and callback != "":
- self.session.openWithCallback(self.searchScreenCallback, RSSearch, callback)
-
-
- def searchScreenCallback(self):
- self.refreshTimer.stop()
- rapidshare.startDownloading()
- self.updateList()
-
- def add(self):
- self.refreshTimer.stop()
- rapidshare.startDownloading()
- self.updateList()
-
- def config(self):
- self.session.openWithCallback(self.configCallback, RSConfig)
-
- def configCallback(self):
- if config.plugins.RSDownloader.onoff.value:
- rapidshare.startDownloading()
- else:
- for download in rapidshare.downloads:
- if download.downloading:
- download.stop()
- self.updateList()
-
- def addContainer(self):
- try:
- file_list = listdir(config.plugins.RSDownloader.lists_directory.value)
- except:
- file_list = []
- list = []
- for file in file_list:
- if file.lower().endswith(".ccf") or file.lower().endswith(".dlc") or file.lower().endswith(".rsdf"):
- list.append(file)
- list.sort()
- self.session.openWithCallback(self.addContainerCallback, RSContainerSelector, list)
-
- def addContainerCallback(self, callback=None):
- if callback:
- file = "%s/%s"%(config.plugins.RSDownloader.lists_directory.value, callback)
- file = file.replace("//", "/")
- links = decrypt(file)
- try:
- f = open(("%s/%s.txt" % (config.plugins.RSDownloader.lists_directory.value, callback)).replace("//", "/"), "w")
- for link in links:
- if link.endswith(".html"):
- link = link[:-5]
- elif link.endswith(".htm"):
- link = link[:-4]
- f.write("%s\n"%link)
- f.close()
- except:
- pass
- self.refreshTimer.stop()
- rapidshare.startDownloading()
- self.updateList()
-
-##############################################################################
-
-def autostart(reason, **kwargs):
- if reason == 0:
- rapidshare.startDownloading()
-
-##############################################################################
-
-def main(session, **kwargs):
- session.open(RSMain)
-
-##############################################################################
-
-def Plugins(**kwargs):
- return [
- PluginDescriptor(where=PluginDescriptor.WHERE_AUTOSTART, fnc=autostart),
- PluginDescriptor(name=_("RS Downloader"), description=_("Download files from rapidshare"), where=[PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_PLUGINMENU], icon="rs.png", fnc=main)]
-
+##\r
+## RS Downloader\r
+## by AliAbdul\r
+##\r
+##\r
+from base64 import encodestring\r
+from Components.ActionMap import ActionMap\r
+from Components.config import config, ConfigInteger, ConfigText, ConfigYesNo, ConfigClock, ConfigSubsection, getConfigListEntry\r
+from Components.ConfigList import ConfigListScreen\r
+from Components.Label import Label\r
+from Components.Language import language\r
+from Components.MenuList import MenuList\r
+from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest\r
+from Components.ScrollLabel import ScrollLabel\r
+from container.decrypt import decrypt\r
+from enigma import eListboxPythonMultiContent, eTimer, gFont, RT_HALIGN_CENTER, RT_HALIGN_RIGHT\r
+from os import environ, listdir, remove\r
+from Plugins.Plugin import PluginDescriptor\r
+from Screens.ChoiceBox import ChoiceBox\r
+from Screens.MessageBox import MessageBox\r
+from Screens.Screen import Screen\r
+from Screens.VirtualKeyBoard import VirtualKeyBoard\r
+from time import localtime, sleep, strftime, time\r
+from Tools.Directories import resolveFilename, SCOPE_SKIN_IMAGE, SCOPE_LANGUAGE, SCOPE_PLUGINS\r
+from Tools.Downloader import HTTPProgressDownloader\r
+from Tools.LoadPixmap import LoadPixmap\r
+from twisted.internet import reactor\r
+from twisted.python import failure\r
+from twisted.web.client import getPage\r
+from urlparse import urlparse, urlunparse\r
+import gettext, re, socket, urllib2\r
+\r
+##############################################################################\r
+\r
+config.plugins.RSDownloader = ConfigSubsection()\r
+config.plugins.RSDownloader.onoff = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.username = ConfigText(default="", fixed_size=False)\r
+config.plugins.RSDownloader.password = ConfigText(default="", fixed_size=False)\r
+config.plugins.RSDownloader.lists_directory = ConfigText(default="/media/hdd/rs/lists/", fixed_size=False)\r
+config.plugins.RSDownloader.downloads_directory = ConfigText(default="/media/hdd/rs/downloads", fixed_size=False)\r
+config.plugins.RSDownloader.ignore_time = ConfigYesNo(default=False)\r
+config.plugins.RSDownloader.start_time = ConfigClock(default=time())\r
+config.plugins.RSDownloader.end_time = ConfigClock(default=time())\r
+config.plugins.RSDownloader.download_monday = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.download_tuesday = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.download_wednesday = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.download_thursday = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.download_friday = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.download_saturday = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.download_sunday = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.count_downloads = ConfigInteger(default=3, limits=(1, 6))\r
+config.plugins.RSDownloader.write_log = ConfigYesNo(default=True)\r
+config.plugins.RSDownloader.reconnect_fritz = ConfigYesNo(default=False)\r
+config.plugins.RSDownloader.autorestart_failed = ConfigYesNo(default=False)\r
+\r
+##############################################################################\r
+\r
+def localeInit():\r
+ lang = language.getLanguage()\r
+ environ["LANGUAGE"] = lang[:2]\r
+ gettext.bindtextdomain("enigma2", resolveFilename(SCOPE_LANGUAGE))\r
+ gettext.textdomain("enigma2")\r
+ gettext.bindtextdomain("RSDownloader", "%s%s"%(resolveFilename(SCOPE_PLUGINS), "Extensions/RSDownloader/locale/"))\r
+\r
+def _(txt):\r
+ t = gettext.dgettext("RSDownloader", txt)\r
+ if t == txt:\r
+ t = gettext.gettext(txt)\r
+ return t\r
+\r
+localeInit()\r
+language.addCallback(localeInit)\r
+\r
+##############################################################################\r
+\r
+def writeLog(message):\r
+ if config.plugins.RSDownloader.write_log.value:\r
+ try:\r
+ f = open("/tmp/rapidshare.log", "a")\r
+ f.write(strftime("%c", localtime(time())) + " - " + message + "\n")\r
+ f.close()\r
+ except:\r
+ pass\r
+\r
+##############################################################################\r
+\r
+def _parse(url):\r
+ url = url.strip()\r
+ parsed = urlparse(url)\r
+ scheme = parsed[0]\r
+ path = urlunparse(('','') + parsed[2:])\r
+ host, port = parsed[1], 80\r
+ if '@' in host:\r
+ username, host = host.split('@')\r
+ if ':' in username:\r
+ username, password = username.split(':')\r
+ else:\r
+ password = ""\r
+ else:\r
+ username = ""\r
+ password = ""\r
+ if ':' in host:\r
+ host, port = host.split(':')\r
+ port = int(port)\r
+ if path == "":\r
+ path = "/"\r
+ return scheme, host, port, path, username, password\r
+\r
+class ProgressDownload:\r
+ def __init__(self, url, outputfile, contextFactory=None, *args, **kwargs):\r
+ scheme, host, port, path, username, password = _parse(url)\r
+ if username and password:\r
+ url = scheme + '://' + host + ':' + str(port) + path\r
+ basicAuth = encodestring("%s:%s"%(username, password))\r
+ authHeader = "Basic " + basicAuth.strip()\r
+ AuthHeaders = {"Authorization": authHeader}\r
+ if kwargs.has_key("headers"):\r
+ kwargs["headers"].update(AuthHeaders)\r
+ else:\r
+ kwargs["headers"] = AuthHeaders\r
+ self.factory = HTTPProgressDownloader(url, outputfile, *args, **kwargs)\r
+ self.connection = reactor.connectTCP(host, port, self.factory)\r
+\r
+ def start(self):\r
+ return self.factory.deferred\r
+\r
+ def stop(self):\r
+ self.connection.disconnect()\r
+\r
+ def addProgress(self, progress_callback):\r
+ self.factory.progress_callback = progress_callback\r
+\r
+##############################################################################\r
+\r
+def get(url):\r
+ try:\r
+ data = urllib2.urlopen(url)\r
+ return data.read()\r
+ except:\r
+ return ""\r
+ \r
+def post(url, data):\r
+ try:\r
+ return urllib2.urlopen(url, data).read()\r
+ except:\r
+ return ""\r
+\r
+def matchGet(rex, string):\r
+ match = re.search(rex, string)\r
+ if match:\r
+ if len(match.groups()) == 0:\r
+ return string[match.span()[0]:match.span()[1]]\r
+ if len(match.groups()) == 1:\r
+ return match.groups()[0]\r
+ else:\r
+ return False\r
+\r
+##############################################################################\r
+\r
+def reconnect(host='fritz.box', port=49000):\r
+ http_body = '\r\n'.join((\r
+ '<?xml version="1.0" encoding="utf-8"?>',\r
+ '<s:Envelope s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/" xmlns:s="http://schemas.xmlsoap.org/soap/envelope/">',\r
+ ' <s:Body>',\r
+ ' <u:ForceTermination xmlns:u="urn:schemas-upnp-org:service:WANIPConnection:1"/>',\r
+ ' </s:Body>',\r
+ '</s:Envelope>'))\r
+ http_data = '\r\n'.join((\r
+ 'POST /upnp/control/WANIPConn1 HTTP/1.1',\r
+ 'Host: %s:%d'%(host, port),\r
+ 'SoapAction: urn:schemas-upnp-org:service:WANIPConnection:1#ForceTermination',\r
+ 'Content-Type: text/xml; charset="utf-8"',\r
+ 'Content-Length: %d'%len(http_body),\r
+ '',\r
+ http_body))\r
+ try:\r
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\r
+ s.connect((host, port))\r
+ s.send(http_data)\r
+ s.close()\r
+ except:\r
+ pass\r
+\r
+##############################################################################\r
+\r
+class RSDownload:\r
+ def __init__(self, url):\r
+ writeLog("Adding: %s"%url)\r
+ self.url = url\r
+ self.download = None\r
+ self.downloading = False\r
+ self.progress = 0\r
+ self.size = 0\r
+ self.status = _("Waiting")\r
+ self.name = self.url.split("/")[-1]\r
+ \r
+ self.freeDownloadUrl = ""\r
+ self.freeDownloadTimer = eTimer()\r
+ self.freeDownloadTimer.callback.append(self.freeDownloadStart)\r
+ self.checkTimer = eTimer()\r
+ self.checkTimer.callback.append(self.doCheckTimer)\r
+ self.restartFailedTimer = eTimer()\r
+ self.restartFailedTimer.callback.append(self.restartFailedCheck)\r
+ \r
+ self.finishCallbacks = []\r
+\r
+ def start(self):\r
+ writeLog("Downloading: %s"%self.url)\r
+ self.downloading = True\r
+ self.progress = 0\r
+ self.size = 0\r
+ username = config.plugins.RSDownloader.username.value\r
+ password = config.plugins.RSDownloader.password.value\r
+ if self.url.__contains__("rapidshare.com") and username == "" and password == "":\r
+ writeLog("Free RS-Download: %s"%self.url)\r
+ self.status = _("Checking")\r
+ if config.plugins.RSDownloader.reconnect_fritz.value:\r
+ reconnect()\r
+ sleep(3)\r
+ data = get(self.url)\r
+ url = matchGet('<form[^>]+action="([^"]+)', data)\r
+ if not url:\r
+ writeLog("Failed: %s"%self.url)\r
+ self.httpFailed(True, "Failed to get download page url: %s"%self.url)\r
+ else:\r
+ data = post(url, "dl.start=Free")\r
+ seconds = matchGet('var c=([0-9]+)', data)\r
+ if not seconds:\r
+ self.httpFailed(True, "Failed to get download page url: %s"%self.url)\r
+ else:\r
+ writeLog("Free RS-download... must wait %s seconds: %s"%(seconds, self.url))\r
+ self.status = "%s %s"%(_("Waiting"), seconds)\r
+ url = matchGet('"dlf" action="([^"]+)', data)\r
+ if not url:\r
+ self.httpFailed(True, "Failed to get download page url: %s"%self.url)\r
+ else:\r
+ self.freeDownloadUrl = url\r
+ self.freeDownloadTimer.start((int(seconds) + 2) * 1000, 1)\r
+ elif self.url.__contains__("youtube.com"):\r
+ writeLog("Getting youtube video link: %s"%self.url)\r
+ self.status = _("Checking")\r
+ downloadLink = self.getYoutubeDownloadLink()\r
+ if downloadLink:\r
+ self.status = _("Downloading")\r
+ writeLog("Downloading video: %s"%downloadLink)\r
+ req = urllib2.Request(downloadLink)\r
+ url_handle = urllib2.urlopen(req)\r
+ headers = url_handle.info()\r
+ if headers.getheader("content-type") == "video/mp4":\r
+ ext = "mp4"\r
+ else:\r
+ ext = "flv"\r
+ self.download = ProgressDownload(downloadLink, ("%s/%s.%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name, ext)).replace("//", "/"))\r
+ self.download.addProgress(self.httpProgress)\r
+ self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed)\r
+ else:\r
+ self.httpFailed(True, "Failed to get video url: %s"%self.url)\r
+ else:\r
+ if self.url.__contains__("rapidshare.com"):\r
+ url = self.url.replace("http://", "http://" + username + ":" + password + "@")\r
+ else:\r
+ url = self.url\r
+ self.status = _("Downloading")\r
+ self.download = ProgressDownload(url, ("%s/%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name)).replace("//", "/").replace(".html", ""))\r
+ self.download.addProgress(self.httpProgress)\r
+ self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed)\r
+\r
+ def freeDownloadStart(self):\r
+ self.status = _("Downloading")\r
+ self.download = ProgressDownload(self.freeDownloadUrl, ("%s/%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name)).replace("//", "/").replace(".html", ""))\r
+ self.download.addProgress(self.httpProgress)\r
+ self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed)\r
+\r
+ def stop(self):\r
+ self.progress = 0\r
+ self.downloading = False\r
+ self.status = _("Waiting")\r
+ if self.download:\r
+ writeLog("Stopping download: %s"%self.url)\r
+ self.download.stop()\r
+\r
+ def httpProgress(self, recvbytes, totalbytes):\r
+ if self.size == 0:\r
+ self.size = int((totalbytes / 1024) / 1024)\r
+ self.progress = int(100.0 * float(recvbytes) / float(totalbytes))\r
+ if self.progress == 100:\r
+ writeLog("Finished: %s"%self.url)\r
+ self.status = _("Finished")\r
+ self.execFinishCallbacks()\r
+\r
+ def httpFinished(self, string=""):\r
+ if string is not None:\r
+ writeLog("Failed: %s"%self.url)\r
+ writeLog("Error: %s"%string)\r
+ self.status = _("Checking")\r
+ self.checkTimer.start(10000, 1)\r
+\r
+ def doCheckTimer(self):\r
+ if self.size == 0:\r
+ self.status = _("Failed")\r
+ if config.plugins.RSDownloader.autorestart_failed.value:\r
+ self.restartFailedTimer.start(10000*60, 1)\r
+ elif self.progress == 100:\r
+ self.status = _("Finished")\r
+ self.downloading = False\r
+ self.execFinishCallbacks()\r
+\r
+ def restartFailedCheck(self):\r
+ if self.status == _("Failed"): # check if user didn't restart already\r
+ self.download = None\r
+ self.status = _("Waiting")\r
+\r
+ def execFinishCallbacks(self):\r
+ for x in self.finishCallbacks:\r
+ x()\r
+\r
+ def httpFailed(self, failure=None, error=""):\r
+ if failure:\r
+ if error == "":\r
+ error = failure.getErrorMessage()\r
+ if error != "" and not error.startswith("[Errno 2]"):\r
+ writeLog("Failed: %s"%self.url)\r
+ writeLog("Error: %s"%error)\r
+ self.status = _("Checking")\r
+ self.checkTimer.start(10000, 1)\r
+\r
+ def getYoutubeDownloadLink(self):\r
+ mrl = None\r
+ html = get(self.url)\r
+ if html != "":\r
+ isHDAvailable = False\r
+ video_id = None\r
+ t = None\r
+ reonecat = re.compile(r'<title>(.+?)</title>', re.DOTALL)\r
+ titles = reonecat.findall(html)\r
+ if titles:\r
+ self.name = titles[0]\r
+ if self.name.startswith("YouTube - "):\r
+ self.name = (self.name[10:]).replace("&", "&")\r
+ if html.__contains__("isHDAvailable = true"):\r
+ isHDAvailable = True\r
+ for line in html.split('\n'):\r
+ if 'swfArgs' in line:\r
+ line = line.strip().split()\r
+ x = 0\r
+ for thing in line:\r
+ if 'video_id' in thing:\r
+ video_id = line[x+1][1:-2]\r
+ elif '"t":' == thing:\r
+ t = line[x+1][1:-2]\r
+ x += 1\r
+ if video_id and t:\r
+ if isHDAvailable == True:\r
+ mrl = "http://www.youtube.com/get_video?video_id=%s&t=%s&fmt=22" % (video_id, t)\r
+ else:\r
+ mrl = "http://www.youtube.com/get_video?video_id=%s&t=%s&fmt=18" % (video_id, t)\r
+ return mrl\r
+\r
+##############################################################################\r
+\r
+class RS:\r
+ def __init__(self):\r
+ self.downloads = []\r
+ self.checkTimer = eTimer()\r
+ self.checkTimer.callback.append(self.startDownloading)\r
+ self.checkTimer.start(5000*60, False)\r
+\r
+ def mayDownload(self):\r
+ if config.plugins.RSDownloader.onoff.value == False:\r
+ writeLog("RS Downloader is turned off...")\r
+ return False\r
+ elif config.plugins.RSDownloader.ignore_time.value:\r
+ return True\r
+ else:\r
+ start = config.plugins.RSDownloader.start_time.value\r
+ end = config.plugins.RSDownloader.end_time.value\r
+ t = localtime()\r
+ weekday = t[6]\r
+ if weekday == 0 and config.plugins.RSDownloader.download_monday.value == False:\r
+ return False\r
+ elif weekday == 1 and config.plugins.RSDownloader.download_tuesday.value == False:\r
+ return False\r
+ elif weekday == 2 and config.plugins.RSDownloader.download_wednesday.value == False:\r
+ return False\r
+ elif weekday == 3 and config.plugins.RSDownloader.download_thursday.value == False:\r
+ return False\r
+ elif weekday == 4 and config.plugins.RSDownloader.download_friday.value == False:\r
+ return False\r
+ elif weekday == 5 and config.plugins.RSDownloader.download_saturday.value == False:\r
+ return False\r
+ elif weekday == 6 and config.plugins.RSDownloader.download_sunday.value == False:\r
+ return False\r
+ else:\r
+ hour_now = t[3]\r
+ minute_now = t[4]\r
+ hour_start = start[0]\r
+ minute_start = start[1]\r
+ hour_end = end[0]\r
+ minute_end = end[1]\r
+ if start == end: # Same start and end-time\r
+ return True\r
+ elif hour_end < hour_start: # Different days!!!\r
+ if hour_now > hour_start or hour_now < hour_end:\r
+ return True\r
+ elif hour_now == hour_start and minute_now > minute_start:\r
+ return True\r
+ elif hour_now == hour_end and minute_now < minute_end:\r
+ return True\r
+ else:\r
+ return False\r
+ elif hour_now > hour_start and hour_now < hour_end: # Same day...\r
+ return True\r
+ elif hour_now == hour_start and minute_now > minute_start: # Same day, same start-hour...\r
+ return True\r
+ elif hour_now == hour_end and minute_now < minute_end: # Same day, same end-hour...\r
+ return True\r
+ else:\r
+ return False\r
+\r
+ def allDownloadsFinished(self):\r
+ allDone = True\r
+ for download in self.downloads:\r
+ if (download.status != _("Failed")) and (download.status != _("Finished")):\r
+ allDone = False\r
+ return allDone\r
+\r
+ def startDownloading(self):\r
+ if self.mayDownload() == True:\r
+ if self.allDownloadsFinished() == True:\r
+ self.readLists()\r
+ downloadCount = 0\r
+ for download in self.downloads:\r
+ if download.downloading == True:\r
+ downloadCount += 1 # Count the downloaded files\r
+ if config.plugins.RSDownloader.username.value == "" and config.plugins.RSDownloader.password.value == "":\r
+ if downloadCount < 1: # Allow one download if without account\r
+ for download in self.downloads:\r
+ if download.downloading == False and download.status.startswith(_("Waiting")):\r
+ download.start() # Start first download in the list\r
+ break\r
+ else:\r
+ mayDownloadCount = config.plugins.RSDownloader.count_downloads.value - downloadCount\r
+ for download in self.downloads:\r
+ if download.downloading == False:\r
+ if mayDownloadCount > 0 and download.status == _("Waiting"):\r
+ download.start()\r
+ mayDownloadCount -= 1\r
+\r
+ def addDownload(self, url):\r
+ error = False\r
+ for download in self.downloads:\r
+ if download.url == url:\r
+ error = True\r
+ if error:\r
+ return False\r
+ else:\r
+ download = RSDownload(url)\r
+ download.finishCallbacks.append(self.cleanLists)\r
+ self.downloads.append(download)\r
+ return True\r
+\r
+ def readLists(self):\r
+ writeLog("Reading all lists...")\r
+ path = config.plugins.RSDownloader.lists_directory.value\r
+ if not path.endswith("/"):\r
+ path = path + "/"\r
+ writeLog("Directory: " + path)\r
+ try:\r
+ file_list = listdir(path)\r
+ writeLog("Count of lists: " + str(len(file_list)))\r
+ except:\r
+ file_list = []\r
+ writeLog("Could not find any list!")\r
+ for x in file_list:\r
+ list = path + x\r
+ if list.endswith(".txt"):\r
+ try:\r
+ writeLog("Reading list %s..."%list)\r
+ f = open(list, "r")\r
+ count = 0\r
+ for l in f:\r
+ if l.startswith("http://"):\r
+ if (self.addDownload(l.replace("\n", "").replace("\r", ""))) == True:\r
+ count += 1\r
+ f.close()\r
+ if count == 0:\r
+ writeLog("Empty list or downloads already in download list: %s"%list)\r
+ else:\r
+ writeLog("Added %d files from list %s..."%(count, list))\r
+ except:\r
+ writeLog("Error while reading list %s!"%list)\r
+ else:\r
+ writeLog("No *.txt file: %s!"%list)\r
+\r
+ def cleanLists(self):\r
+ writeLog("Cleaning lists...")\r
+ path = config.plugins.RSDownloader.lists_directory.value\r
+ if not path.endswith("/"):\r
+ path = path + "/"\r
+ try:\r
+ file_list = listdir(path)\r
+ except:\r
+ file_list = []\r
+ for x in file_list:\r
+ list = path + x\r
+ try:\r
+ f = open(list, "r")\r
+ content = f.read()\r
+ f.close()\r
+ for download in self.downloads:\r
+ if download.status == _("Finished") and content.__contains__(download.url):\r
+ content = content.replace(download.url, "")\r
+ content = content.replace("\n\n", "\n").replace("\r\r", "\r")\r
+ f = open(list, "w")\r
+ f.write(content)\r
+ f.close()\r
+ except:\r
+ writeLog("Error while cleaning list %s!"%list)\r
+ self.startDownloading()\r
+\r
+ def removeDownload(self, url):\r
+ tmp = []\r
+ for download in self.downloads:\r
+ if download.url == url:\r
+ download.stop()\r
+ else:\r
+ tmp.append(download)\r
+ del self.downloads\r
+ self.downloads = tmp\r
+ self.removeFromLists(url)\r
+\r
+ def removeFromLists(self, url):\r
+ path = config.plugins.RSDownloader.lists_directory.value\r
+ if not path.endswith("/"):\r
+ path = path + "/"\r
+ try:\r
+ file_list = listdir(path)\r
+ except:\r
+ file_list = []\r
+ for x in file_list:\r
+ list = path + x\r
+ try:\r
+ f = open(list, "r")\r
+ content = f.read()\r
+ f.close()\r
+ if content.__contains__(url):\r
+ content = content.replace(url, "")\r
+ content = content.replace("\n\n", "\n").replace("\r\r", "\r")\r
+ f = open(list, "w")\r
+ f.write(content)\r
+ f.close()\r
+ except:\r
+ pass\r
+\r
+ def clearFinishedDownload(self, url):\r
+ idx = 0\r
+ for x in self.downloads:\r
+ if x.url == url:\r
+ del self.downloads[idx]\r
+ break\r
+ else:\r
+ idx += 1\r
+\r
+ def clearFinishedDownloads(self):\r
+ tmp = []\r
+ for download in self.downloads:\r
+ if download.status != _("Finished"):\r
+ tmp.append(download)\r
+ del self.downloads\r
+ self.downloads = tmp\r
+\r
+ def deleteFailedDownloads(self):\r
+ tmp = []\r
+ for download in self.downloads:\r
+ if download.status == _("Failed"):\r
+ self.removeFromLists(download.url)\r
+ else:\r
+ tmp.append(download)\r
+ del self.downloads\r
+ self.downloads = tmp\r
+\r
+ def restartFailedDownloads(self):\r
+ tmp = []\r
+ for download in self.downloads:\r
+ if download.status == _("Failed"):\r
+ download.download = None\r
+ download.downloading = False\r
+ download.progress = 0\r
+ download.size = 0\r
+ download.status = _("Waiting")\r
+ tmp.append(download)\r
+ del self.downloads\r
+ self.downloads = tmp\r
+ self.startDownloading()\r
+\r
+rapidshare = RS()\r
+\r
+##############################################################################\r
+\r
+class ChangedScreen(Screen):\r
+ def __init__(self, session, parent=None):\r
+ Screen.__init__(self, session, parent)\r
+ self.onLayoutFinish.append(self.setScreenTitle)\r
+\r
+ def setScreenTitle(self):\r
+ self.setTitle(_("RS Downloader"))\r
+\r
+##############################################################################\r
+\r
+class RSConfig(ConfigListScreen, ChangedScreen):\r
+ skin = """\r
+ <screen position="center,center" size="560,450" title="RS Downloader">\r
+ <ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" transparent="1" alphatest="on" />\r
+ <ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" transparent="1" alphatest="on" />\r
+ <ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" transparent="1" alphatest="on" />\r
+ <ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" transparent="1" alphatest="on" />\r
+ <widget name="key_green" position="140,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />\r
+ <widget name="config" position="0,45" size="560,400" scrollbarMode="showOnDemand" />\r
+ </screen>"""\r
+\r
+ def __init__(self, session):\r
+ ChangedScreen.__init__(self, session)\r
+ \r
+ self["key_green"] = Label(_("Save"))\r
+ \r
+ ConfigListScreen.__init__(self, [\r
+ getConfigListEntry(_("Download in the background:"), config.plugins.RSDownloader.onoff),\r
+ getConfigListEntry(_("Username:"), config.plugins.RSDownloader.username),\r
+ getConfigListEntry(_("Password:"), config.plugins.RSDownloader.password),\r
+ getConfigListEntry(_("Lists directory:"), config.plugins.RSDownloader.lists_directory),\r
+ getConfigListEntry(_("Downloads directory:"), config.plugins.RSDownloader.downloads_directory),\r
+ getConfigListEntry(_("Ignore download times:"), config.plugins.RSDownloader.ignore_time),\r
+ getConfigListEntry(_("Allow downloading on monday:"), config.plugins.RSDownloader.download_monday),\r
+ getConfigListEntry(_("Allow downloading on tuesday:"), config.plugins.RSDownloader.download_tuesday),\r
+ getConfigListEntry(_("Allow downloading on wednesday:"), config.plugins.RSDownloader.download_wednesday),\r
+ getConfigListEntry(_("Allow downloading on thursday:"), config.plugins.RSDownloader.download_thursday),\r
+ getConfigListEntry(_("Allow downloading on friday:"), config.plugins.RSDownloader.download_friday),\r
+ getConfigListEntry(_("Allow downloading on saturday:"), config.plugins.RSDownloader.download_saturday),\r
+ getConfigListEntry(_("Allow downloading on sunday:"), config.plugins.RSDownloader.download_sunday),\r
+ getConfigListEntry(_("Don't download before:"), config.plugins.RSDownloader.start_time),\r
+ getConfigListEntry(_("Don't download after:"), config.plugins.RSDownloader.end_time),\r
+ getConfigListEntry(_("Maximal downloads:"), config.plugins.RSDownloader.count_downloads),\r
+ getConfigListEntry(_("Write log:"), config.plugins.RSDownloader.write_log),\r
+ getConfigListEntry(_("Reconnect fritz.Box before downloading:"), config.plugins.RSDownloader.reconnect_fritz),\r
+ getConfigListEntry(_("Restart failed after 10 minutes:"), config.plugins.RSDownloader.autorestart_failed)])\r
+ \r
+ self["actions"] = ActionMap(["OkCancelActions", "ColorActions"], {"green": self.save, "cancel": self.exit}, -1)\r
+\r
+ def save(self):\r
+ for x in self["config"].list:\r
+ x[1].save()\r
+ self.close()\r
+\r
+ def exit(self):\r
+ for x in self["config"].list:\r
+ x[1].cancel()\r
+ self.close()\r
+\r
+##############################################################################\r
+\r
+class RSSearch(Screen):\r
+ skin = """\r
+ <screen position="center,center" size="560,450" title="Searching... please wait!">\r
+ <widget name="list" position="0,0" size="570,450" scrollbarMode="showOnDemand" />\r
+ </screen>"""\r
+\r
+ def __init__(self, session, searchFor):\r
+ Screen.__init__(self, session)\r
+ self.session = session\r
+ \r
+ self.searchFor = searchFor.replace(" ", "%2B")\r
+ self.maxPage = 1\r
+ self.curPage = 1\r
+ self.files = []\r
+ \r
+ self["list"] = MenuList([])\r
+ \r
+ self["actions"] = ActionMap(["OkCancelActions", "InfobarChannelSelection"],\r
+ {\r
+ "historyBack": self.previousPage,\r
+ "historyNext": self.nextPage,\r
+ "ok": self.okClicked,\r
+ "cancel": self.close\r
+ }, -1)\r
+ \r
+ self.onLayoutFinish.append(self.search)\r
+\r
+ def okClicked(self):\r
+ if len(self.files) > 0:\r
+ idx = self["list"].getSelectedIndex()\r
+ url = self.files[idx]\r
+ try:\r
+ f = open(("%s/search.txt" % config.plugins.RSDownloader.lists_directory.value).replace("//", "/"), "a")\r
+ f.write("%s\n"%url)\r
+ f.close()\r
+ self.session.open(MessageBox, (_("Added %s to the download-list.") % url), MessageBox.TYPE_INFO)\r
+ except:\r
+ self.session.open(MessageBox, (_("Error while adding %s to the download-list!") % url), MessageBox.TYPE_ERROR)\r
+\r
+ def search(self):\r
+ getPage("http://rapidshare-search-engine.com/index-s_submit=Search&sformval=1&s_type=0&what=1&s=%s&start=%d.html"%(self.searchFor, self.curPage)).addCallback(self.searchCallback).addErrback(self.searchError)\r
+\r
+ def searchCallback(self, html=""):\r
+ list = []\r
+ files = []\r
+ \r
+ if html.__contains__("Nothing found, sorry."):\r
+ self.session.open(MessageBox, (_("Error while searching http://rapidshare-search-engine.com!\n\nError: Nothing found, sorry.")), MessageBox.TYPE_ERROR)\r
+ self.instance.setTitle(_("Nothing found, sorry."))\r
+ else:\r
+ tmp = html\r
+ while tmp.__contains__("goPg('"):\r
+ idx = tmp.index("goPg('")\r
+ tmp = tmp[idx+6:]\r
+ idx = tmp.index("'")\r
+ pageNumber = tmp[:idx]\r
+ \r
+ try:\r
+ pageNumber = int(pageNumber)\r
+ if pageNumber > self.maxPage:\r
+ self.maxPage = pageNumber\r
+ except:\r
+ pass\r
+ \r
+ self.instance.setTitle(_("Page %d / %d. Push < > to switch the page...")%(self.curPage, self.maxPage))\r
+ \r
+ while html.__contains__('title="Download"'):\r
+ idx = html.index('title="Download"')\r
+ html = html[idx:]\r
+ idx = html.index('value="')\r
+ html = html[idx+7:]\r
+ idx = html.index('"')\r
+ size = html[:idx]\r
+ idx = html.index('http://rapidshare.com/')\r
+ html = html[idx:]\r
+ idx = html.index('"')\r
+ url = html[:idx]\r
+ \r
+ files.append(url) \r
+ try:\r
+ urllist = url.split("/")\r
+ idx = len(urllist) - 1\r
+ name = urllist[idx]\r
+ list.append("%s - %s"%(size, name))\r
+ except:\r
+ list.append("%s - %s"%(size, url))\r
+ \r
+ self.files = files\r
+ self["list"].setList(list)\r
+\r
+ def searchError(self, error=""):\r
+ self.session.open(MessageBox, (_("Error while searching http://rapidshare-search-engine.com!\n\nError: %s")%str(error)), MessageBox.TYPE_ERROR)\r
+\r
+ def previousPage(self):\r
+ if self.curPage > 1:\r
+ self.curPage -= 1\r
+ self.instance.setTitle(_("Loading previous page... please wait!"))\r
+ self.search()\r
+\r
+ def nextPage(self):\r
+ if self.curPage < self.maxPage:\r
+ self.curPage += 1\r
+ self.instance.setTitle(_("Loading next page... please wait!"))\r
+ self.search()\r
+\r
+##############################################################################\r
+\r
+class RSLogScreen(ChangedScreen):\r
+ skin = """\r
+ <screen position="center,center" size="560,450" title="RS Downloader">\r
+ <widget name="label" position="0,0" size="560,450" font="Regular;20" />\r
+ </screen>"""\r
+\r
+ def __init__(self, session):\r
+ ChangedScreen.__init__(self, session)\r
+ \r
+ try:\r
+ f = open("/tmp/rapidshare.log")\r
+ log = f.read()\r
+ f.close()\r
+ except:\r
+ log = ""\r
+ self["label"] = ScrollLabel(log)\r
+ \r
+ self["actions"] = ActionMap(["WizardActions"],\r
+ {\r
+ "ok": self.close,\r
+ "back": self.close,\r
+ "up": self["label"].pageUp,\r
+ "down": self["label"].pageDown,\r
+ "left": self["label"].pageUp,\r
+ "right": self["label"].pageDown\r
+ }, -1)\r
+\r
+##############################################################################\r
+\r
+class RSContainerSelector(ChangedScreen):\r
+ skin = """\r
+ <screen position="center,center" size="560,450" title="RS Downloader">\r
+ <widget name="list" position="0,0" size="560,450" />\r
+ </screen>"""\r
+\r
+ def __init__(self, session, list):\r
+ ChangedScreen.__init__(self, session)\r
+ self["list"] = MenuList(list)\r
+ self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1)\r
+\r
+ def okClicked(self):\r
+ cur = self["list"].getCurrent()\r
+ self.close(cur)\r
+\r
+##############################################################################\r
+\r
+class RSList(MenuList):\r
+ def __init__(self, list):\r
+ MenuList.__init__(self, list, False, eListboxPythonMultiContent)\r
+ self.l.setItemHeight(25)\r
+ self.l.setFont(0, gFont("Regular", 20))\r
+\r
+##############################################################################\r
+\r
+def RSListEntry(download):\r
+ res = [(download)]\r
+ res.append(MultiContentEntryText(pos=(0, 0), size=(170, 25), font=0, text=download.name))\r
+ res.append(MultiContentEntryText(pos=(175, 0), size=(75, 25), font=0, text="%d%s"%(download.size, "MB"), flags=RT_HALIGN_CENTER))\r
+ res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 9), size=(84, 7), png=LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/progress_bg.png"))))\r
+ res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 10), size=(int(0.84 * download.progress), 5), png=LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/progress_small.png"))))\r
+ res.append(MultiContentEntryText(pos=(360, 0), size=(60, 25), font=0, text="%d%s"%(download.progress, "%"), flags=RT_HALIGN_CENTER))\r
+ res.append(MultiContentEntryText(pos=(420, 0), size=(140, 25), font=0, text=download.status, flags=RT_HALIGN_RIGHT))\r
+ return res\r
+\r
+##############################################################################\r
+\r
+class RSMain(ChangedScreen):\r
+ skin = """\r
+ <screen position="center,center" size="560,450" title="RS Downloader">\r
+ <ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" transparent="1" alphatest="on" />\r
+ <ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" transparent="1" alphatest="on" />\r
+ <ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" transparent="1" alphatest="on" />\r
+ <ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" transparent="1" alphatest="on" />\r
+ <ePixmap pixmap="skin_default/buttons/key_menu.png" position="10,420" size="35,25" transparent="1" alphatest="on" />\r
+ <widget name="key_red" position="0,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />\r
+ <widget name="key_green" position="140,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />\r
+ <widget name="key_yellow" position="280,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />\r
+ <widget name="key_blue" position="420,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />\r
+ <widget name="key_menu" position="50,422" size="300,25" font="Regular;20" transparent="1" />\r
+ <widget name="list" position="0,40" size="560,375" scrollbarMode="showNever" />\r
+ </screen>"""\r
+\r
+ def __init__(self, session):\r
+ ChangedScreen.__init__(self, session)\r
+ self.session = session\r
+ \r
+ self["key_red"] = Label(_("Delete"))\r
+ self["key_green"] = Label(_("Search"))\r
+ self["key_yellow"] = Label(_("Add"))\r
+ self["key_blue"] = Label(_("Config"))\r
+ self["key_menu"] = Label(_("Menu"))\r
+ self["list"] = RSList([])\r
+ \r
+ self.refreshTimer = eTimer()\r
+ self.refreshTimer.callback.append(self.updateList)\r
+ \r
+ self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "InfobarMenuActions"],\r
+ {\r
+ "mainMenu": self.menu,\r
+ "cancel": self.close,\r
+ "red": self.delete,\r
+ "green": self.search,\r
+ "yellow": self.add,\r
+ "blue": self.config\r
+ }, prio=-1)\r
+ \r
+ self.onLayoutFinish.append(self.updateList)\r
+\r
+ def menu(self):\r
+ list = []\r
+ #TODO: Add sort list functions\r
+ list.append((_("Delete download"), self.delete))\r
+ list.append((_("Use search engine"), self.search))\r
+ list.append((_("Add downloads from txt files"), self.add))\r
+ list.append((_("Add files from container"), self.addContainer))\r
+ list.append((_("Delete failed downloads"), self.deleteFailed))\r
+ list.append((_("Restart failed downloads"), self.restartFailed))\r
+ list.append((_("Clear finished downloads"), self.clearFinished))\r
+ list.append((_("Show log"), self.showLog))\r
+ list.append((_("Delete log"), self.deleteLog))\r
+ list.append((_("Close plugin"), self.close))\r
+ self.session.openWithCallback(self.menuCallback, ChoiceBox, title=_("Please choose a function..."), list=list)\r
+\r
+ def menuCallback(self, callback=None):\r
+ if callback is not None:\r
+ callback[1]()\r
+\r
+ def deleteFailed(self):\r
+ rapidshare.deleteFailedDownloads()\r
+\r
+ def restartFailed(self):\r
+ rapidshare.restartFailedDownloads()\r
+\r
+ def clearFinished(self):\r
+ rapidshare.clearFinishedDownloads()\r
+\r
+ def showLog(self):\r
+ self.session.open(RSLogScreen)\r
+\r
+ def deleteLog(self):\r
+ try:\r
+ remove("/tmp/rapidshare.log")\r
+ except:\r
+ pass\r
+\r
+ def updateList(self):\r
+ list = []\r
+ for download in rapidshare.downloads:\r
+ list.append(RSListEntry(download))\r
+ self["list"].setList(list)\r
+ self.refreshTimer.start(2000, 1)\r
+\r
+ def delete(self):\r
+ cur = self["list"].getCurrent()\r
+ if cur:\r
+ cur = cur[0]\r
+ if cur.status == _("Finished"):\r
+ rapidshare.clearFinishedDownload(cur.url)\r
+ else:\r
+ self.session.openWithCallback(self.deleteCallback, MessageBox, (_("Delete %s?")%cur.name))\r
+\r
+ def deleteCallback(self, callback):\r
+ if callback:\r
+ rapidshare.removeDownload(self["list"].getCurrent()[0].url)\r
+ self.refreshTimer.stop()\r
+ self.updateList()\r
+\r
+ def search(self):\r
+ self.session.openWithCallback(self.searchCallback, VirtualKeyBoard, title=_("Search http://rapidshare-search-engine.com for:"))\r
+\r
+ def searchCallback(self, callback):\r
+ if callback is not None and callback != "":\r
+ self.session.openWithCallback(self.searchScreenCallback, RSSearch, callback)\r
+\r
+\r
+ def searchScreenCallback(self):\r
+ self.refreshTimer.stop()\r
+ rapidshare.startDownloading()\r
+ self.updateList()\r
+\r
+ def add(self):\r
+ self.refreshTimer.stop()\r
+ rapidshare.startDownloading()\r
+ self.updateList()\r
+\r
+ def config(self):\r
+ self.session.openWithCallback(self.configCallback, RSConfig)\r
+\r
+ def configCallback(self):\r
+ if config.plugins.RSDownloader.onoff.value:\r
+ rapidshare.startDownloading()\r
+ else:\r
+ for download in rapidshare.downloads:\r
+ if download.downloading:\r
+ download.stop()\r
+ self.updateList()\r
+\r
+ def addContainer(self):\r
+ try:\r
+ file_list = listdir(config.plugins.RSDownloader.lists_directory.value)\r
+ except:\r
+ file_list = []\r
+ list = []\r
+ for file in file_list:\r
+ if file.lower().endswith(".ccf") or file.lower().endswith(".dlc") or file.lower().endswith(".rsdf"):\r
+ list.append(file)\r
+ list.sort()\r
+ self.session.openWithCallback(self.addContainerCallback, RSContainerSelector, list)\r
+\r
+ def addContainerCallback(self, callback=None):\r
+ if callback:\r
+ file = "%s/%s"%(config.plugins.RSDownloader.lists_directory.value, callback)\r
+ file = file.replace("//", "/")\r
+ links = decrypt(file)\r
+ try:\r
+ f = open(("%s/%s.txt" % (config.plugins.RSDownloader.lists_directory.value, callback)).replace("//", "/"), "w")\r
+ for link in links:\r
+ if link.endswith(".html"):\r
+ link = link[:-5]\r
+ elif link.endswith(".htm"):\r
+ link = link[:-4]\r
+ f.write("%s\n"%link)\r
+ f.close()\r
+ except:\r
+ pass\r
+ self.refreshTimer.stop()\r
+ rapidshare.startDownloading()\r
+ self.updateList()\r
+\r
+##############################################################################\r
+\r
+def autostart(reason, **kwargs):\r
+ if reason == 0:\r
+ rapidshare.startDownloading()\r
+\r
+##############################################################################\r
+\r
+def main(session, **kwargs):\r
+ session.open(RSMain)\r
+\r
+##############################################################################\r
+\r
+def Plugins(**kwargs):\r
+ return [\r
+ PluginDescriptor(where=PluginDescriptor.WHERE_AUTOSTART, fnc=autostart),\r
+ PluginDescriptor(name=_("RS Downloader"), description=_("Download files from rapidshare"), where=[PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_PLUGINMENU], icon="rs.png", fnc=main)]\r
+\r