From: Nabil Hanna Date: Fri, 21 Aug 2009 07:31:04 +0000 (+0000) Subject: -Bring back the Plugin.py... needed for DLC container X-Git-Url: http://code.vuplus.com/gitweb/?a=commitdiff_plain;h=546a1b292456230d2caf62445f936b646f86ef8c;p=vuplus_dvbapp-plugin -Bring back the Plugin.py... needed for DLC container -Bring back linux line endings -Move container stuff in an own directory. This should fix the cvs-problem (plugin.py and Plugin.py) for windows --- diff --git a/configure.ac b/configure.ac index 2107839..5100b7e 100644 --- a/configure.ac +++ b/configure.ac @@ -167,6 +167,7 @@ googlemaps/src/Makefile rsdownloader/Makefile rsdownloader/po/Makefile rsdownloader/src/Makefile +rsdownloader/src/container/Makefile permanentclock/Makefile permanentclock/po/Makefile diff --git a/rsdownloader/src/DLC.pyc b/rsdownloader/src/DLC.pyc deleted file mode 100644 index 9b1e58d..0000000 Binary files a/rsdownloader/src/DLC.pyc and /dev/null differ diff --git a/rsdownloader/src/Keepalive.py b/rsdownloader/src/Keepalive.py deleted file mode 100644 index b86d488..0000000 --- a/rsdownloader/src/Keepalive.py +++ /dev/null @@ -1,620 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the -# Free Software Foundation, Inc., -# 59 Temple Place, Suite 330, -# Boston, MA 02111-1307 USA - -# This file is part of urlgrabber, a high-level cross-protocol url-grabber -# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko - -"""An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive. - ->>> import urllib2 ->>> from keepalive import HTTPHandler ->>> keepalive_handler = HTTPHandler() ->>> opener = urllib2.build_opener(keepalive_handler) ->>> urllib2.install_opener(opener) ->>> ->>> fo = urllib2.urlopen('http://www.python.org') - -If a connection to a given host is requested, and all of the existing -connections are still in use, another connection will be opened. If -the handler tries to use an existing connection but it fails in some -way, it will be closed and removed from the pool. - -To remove the handler, simply re-run build_opener with no arguments, and -install that opener. - -You can explicitly close connections by using the close_connection() -method of the returned file-like object (described below) or you can -use the handler methods: - - close_connection(host) - close_all() - open_connections() - -NOTE: using the close_connection and close_all methods of the handler -should be done with care when using multiple threads. - * there is nothing that prevents another thread from creating new - connections immediately after connections are closed - * no checks are done to prevent in-use connections from being closed - ->>> keepalive_handler.close_all() - -EXTRA ATTRIBUTES AND METHODS - - Upon a status of 200, the object returned has a few additional - attributes and methods, which should not be used if you want to - remain consistent with the normal urllib2-returned objects: - - close_connection() - close the connection to the host - readlines() - you know, readlines() - status - the return status (ie 404) - reason - english translation of status (ie 'File not found') - - If you want the best of both worlds, use this inside an - AttributeError-catching try: - - >>> try: status = fo.status - >>> except AttributeError: status = None - - Unfortunately, these are ONLY there if status == 200, so it's not - easy to distinguish between non-200 responses. The reason is that - urllib2 tries to do clever things with error codes 301, 302, 401, - and 407, and it wraps the object upon return. - - For python versions earlier than 2.4, you can avoid this fancy error - handling by setting the module-level global HANDLE_ERRORS to zero. - You see, prior to 2.4, it's the HTTP Handler's job to determine what - to handle specially, and what to just pass up. HANDLE_ERRORS == 0 - means "pass everything up". In python 2.4, however, this job no - longer belongs to the HTTP Handler and is now done by a NEW handler, - HTTPErrorProcessor. Here's the bottom line: - - python version < 2.4 - HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as - errors - HANDLE_ERRORS == 0 pass everything up, error processing is - left to the calling code - python version >= 2.4 - HANDLE_ERRORS == 1 pass up 200, treat the rest as errors - HANDLE_ERRORS == 0 (default) pass everything up, let the - other handlers (specifically, - HTTPErrorProcessor) decide what to do - - In practice, setting the variable either way makes little difference - in python 2.4, so for the most consistent behavior across versions, - you probably just want to use the defaults, which will give you - exceptions on errors. - -""" - -# $Id$ - -import urllib2 -import httplib -import socket -import thread - -DEBUG = None - -import sslfactory - -import sys -if sys.version_info < (2, 4): HANDLE_ERRORS = 1 -else: HANDLE_ERRORS = 0 - -class ConnectionManager: - """ - The connection manager must be able to: - * keep track of all existing - """ - def __init__(self): - self._lock = thread.allocate_lock() - self._hostmap = {} # map hosts to a list of connections - self._connmap = {} # map connections to host - self._readymap = {} # map connection to ready state - - def add(self, host, connection, ready): - self._lock.acquire() - try: - if not self._hostmap.has_key(host): self._hostmap[host] = [] - self._hostmap[host].append(connection) - self._connmap[connection] = host - self._readymap[connection] = ready - finally: - self._lock.release() - - def remove(self, connection): - self._lock.acquire() - try: - try: - host = self._connmap[connection] - except KeyError: - pass - else: - del self._connmap[connection] - del self._readymap[connection] - self._hostmap[host].remove(connection) - if not self._hostmap[host]: del self._hostmap[host] - finally: - self._lock.release() - - def set_ready(self, connection, ready): - try: self._readymap[connection] = ready - except KeyError: pass - - def get_ready_conn(self, host): - conn = None - self._lock.acquire() - try: - if self._hostmap.has_key(host): - for c in self._hostmap[host]: - if self._readymap[c]: - self._readymap[c] = 0 - conn = c - break - finally: - self._lock.release() - return conn - - def get_all(self, host=None): - if host: - return list(self._hostmap.get(host, [])) - else: - return dict(self._hostmap) - -class KeepAliveHandler: - def __init__(self): - self._cm = ConnectionManager() - - #### Connection Management - def open_connections(self): - """return a list of connected hosts and the number of connections - to each. [('foo.com:80', 2), ('bar.org', 1)]""" - return [(host, len(li)) for (host, li) in self._cm.get_all().items()] - - def close_connection(self, host): - """close connection(s) to - host is the host:port spec, as in 'www.cnn.com:8080' as passed in. - no error occurs if there is no connection to that host.""" - for h in self._cm.get_all(host): - self._cm.remove(h) - h.close() - - def close_all(self): - """close all open connections""" - for host, conns in self._cm.get_all().items(): - for h in conns: - self._cm.remove(h) - h.close() - - def _request_closed(self, request, host, connection): - """tells us that this request is now closed and the the - connection is ready for another request""" - self._cm.set_ready(connection, 1) - - def _remove_connection(self, host, connection, close=0): - if close: connection.close() - self._cm.remove(connection) - - #### Transaction Execution - def do_open(self, req): - host = req.get_host() - if not host: - raise urllib2.URLError('no host given') - - try: - h = self._cm.get_ready_conn(host) - while h: - r = self._reuse_connection(h, req, host) - - # if this response is non-None, then it worked and we're - # done. Break out, skipping the else block. - if r: break - - # connection is bad - possibly closed by server - # discard it and ask for the next free connection - h.close() - self._cm.remove(h) - h = self._cm.get_ready_conn(host) - else: - # no (working) free connections were found. Create a new one. - h = self._get_connection(host) - if DEBUG: DEBUG.info("creating new connection to %s (%d)", - host, id(h)) - self._cm.add(host, h, 0) - self._start_transaction(h, req) - r = h.getresponse() - except (socket.error, httplib.HTTPException), err: - raise urllib2.URLError(err) - - # if not a persistent connection, don't try to reuse it - if r.will_close: self._cm.remove(h) - - if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason) - r._handler = self - r._host = host - r._url = req.get_full_url() - r._connection = h - r.code = r.status - r.headers = r.msg - r.msg = r.reason - - if r.status == 200 or not HANDLE_ERRORS: - return r - else: - return self.parent.error('http', req, r, - r.status, r.msg, r.headers) - - def _reuse_connection(self, h, req, host): - """start the transaction with a re-used connection - return a response object (r) upon success or None on failure. - This DOES not close or remove bad connections in cases where - it returns. However, if an unexpected exception occurs, it - will close and remove the connection before re-raising. - """ - try: - self._start_transaction(h, req) - r = h.getresponse() - # note: just because we got something back doesn't mean it - # worked. We'll check the version below, too. - except (socket.error, httplib.HTTPException): - r = None - except: - # adding this block just in case we've missed - # something we will still raise the exception, but - # lets try and close the connection and remove it - # first. We previously got into a nasty loop - # where an exception was uncaught, and so the - # connection stayed open. On the next try, the - # same exception was raised, etc. The tradeoff is - # that it's now possible this call will raise - # a DIFFERENT exception - if DEBUG: DEBUG.error("unexpected exception - closing " + \ - "connection to %s (%d)", host, id(h)) - self._cm.remove(h) - h.close() - raise - - if r is None or r.version == 9: - # httplib falls back to assuming HTTP 0.9 if it gets a - # bad header back. This is most likely to happen if - # the socket has been closed by the server since we - # last used the connection. - if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)", - host, id(h)) - r = None - else: - if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h)) - - return r - - def _start_transaction(self, h, req): - try: - if req.has_data(): - data = req.get_data() - h.putrequest('POST', req.get_selector(), skip_accept_encoding=1) - if not req.headers.has_key('Content-type'): - h.putheader('Content-type', - 'application/x-www-form-urlencoded') - if not req.headers.has_key('Content-length'): - h.putheader('Content-length', '%d' % len(data)) - else: - h.putrequest('GET', req.get_selector(), skip_accept_encoding=1) - except (socket.error, httplib.HTTPException), err: - raise urllib2.URLError(err) - - for args in self.parent.addheaders: - h.putheader(*args) - for k, v in req.headers.items(): - h.putheader(k, v) - h.endheaders() - if req.has_data(): - h.send(data) - - def _get_connection(self, host): - return NotImplementedError - -class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler): - def __init__(self): - KeepAliveHandler.__init__(self) - - def http_open(self, req): - return self.do_open(req) - - def _get_connection(self, host): - return HTTPConnection(host) - -class HTTPSHandler(KeepAliveHandler, urllib2.HTTPSHandler): - def __init__(self, ssl_factory=None): - KeepAliveHandler.__init__(self) - if not ssl_factory: - ssl_factory = sslfactory.get_factory() - self._ssl_factory = ssl_factory - - def https_open(self, req): - return self.do_open(req) - - def _get_connection(self, host): - return self._ssl_factory.get_https_connection(host) - -class HTTPResponse(httplib.HTTPResponse): - # we need to subclass HTTPResponse in order to - # 1) add readline() and readlines() methods - # 2) add close_connection() methods - # 3) add info() and geturl() methods - - # in order to add readline(), read must be modified to deal with a - # buffer. example: readline must read a buffer and then spit back - # one line at a time. The only real alternative is to read one - # BYTE at a time (ick). Once something has been read, it can't be - # put back (ok, maybe it can, but that's even uglier than this), - # so if you THEN do a normal read, you must first take stuff from - # the buffer. - - # the read method wraps the original to accomodate buffering, - # although read() never adds to the buffer. - # Both readline and readlines have been stolen with almost no - # modification from socket.py - - - def __init__(self, sock, debuglevel=0, strict=0, method=None): - if method: # the httplib in python 2.3 uses the method arg - httplib.HTTPResponse.__init__(self, sock, debuglevel, method) - else: # 2.2 doesn't - httplib.HTTPResponse.__init__(self, sock, debuglevel) - self.fileno = sock.fileno - self.code = None - self._rbuf = '' - self._rbufsize = 8096 - self._handler = None # inserted by the handler later - self._host = None # (same) - self._url = None # (same) - self._connection = None # (same) - - _raw_read = httplib.HTTPResponse.read - - def close(self): - if self.fp: - self.fp.close() - self.fp = None - if self._handler: - self._handler._request_closed(self, self._host, - self._connection) - - def close_connection(self): - self._handler._remove_connection(self._host, self._connection, close=1) - self.close() - - def info(self): - return self.headers - - def geturl(self): - return self._url - - def read(self, amt=None): - # the _rbuf test is only in this first if for speed. It's not - # logically necessary - if self._rbuf and not amt is None: - L = len(self._rbuf) - if amt > L: - amt -= L - else: - s = self._rbuf[:amt] - self._rbuf = self._rbuf[amt:] - return s - - s = self._rbuf + self._raw_read(amt) - self._rbuf = '' - return s - - def readline(self, limit=-1): - data = "" - i = self._rbuf.find('\n') - while i < 0 and not (0 < limit <= len(self._rbuf)): - new = self._raw_read(self._rbufsize) - if not new: break - i = new.find('\n') - if i >= 0: i = i + len(self._rbuf) - self._rbuf = self._rbuf + new - if i < 0: i = len(self._rbuf) - else: i = i+1 - if 0 <= limit < len(self._rbuf): i = limit - data, self._rbuf = self._rbuf[:i], self._rbuf[i:] - return data - - def readlines(self, sizehint = 0): - total = 0 - list = [] - while 1: - line = self.readline() - if not line: break - list.append(line) - total += len(line) - if sizehint and total >= sizehint: - break - return list - - -class HTTPConnection(httplib.HTTPConnection): - # use the modified response class - response_class = HTTPResponse - -class HTTPSConnection(httplib.HTTPSConnection): - response_class = HTTPResponse - -######################################################################### -##### TEST FUNCTIONS -######################################################################### - -def error_handler(url): - global HANDLE_ERRORS - orig = HANDLE_ERRORS - keepalive_handler = HTTPHandler() - opener = urllib2.build_opener(keepalive_handler) - urllib2.install_opener(opener) - pos = {0: 'off', 1: 'on'} - for i in (0, 1): - print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i) - HANDLE_ERRORS = i - try: - fo = urllib2.urlopen(url) - foo = fo.read() - fo.close() - try: status, reason = fo.status, fo.reason - except AttributeError: status, reason = None, None - except IOError, e: - print " EXCEPTION: %s" % e - raise - else: - print " status = %s, reason = %s" % (status, reason) - HANDLE_ERRORS = orig - hosts = keepalive_handler.open_connections() - print "open connections:", hosts - keepalive_handler.close_all() - -def continuity(url): - import md5 - format = '%25s: %s' - - # first fetch the file with the normal http handler - opener = urllib2.build_opener() - urllib2.install_opener(opener) - fo = urllib2.urlopen(url) - foo = fo.read() - fo.close() - m = md5.new(foo) - print format % ('normal urllib', m.hexdigest()) - - # now install the keepalive handler and try again - opener = urllib2.build_opener(HTTPHandler()) - urllib2.install_opener(opener) - - fo = urllib2.urlopen(url) - foo = fo.read() - fo.close() - m = md5.new(foo) - print format % ('keepalive read', m.hexdigest()) - - fo = urllib2.urlopen(url) - foo = '' - while 1: - f = fo.readline() - if f: foo = foo + f - else: break - fo.close() - m = md5.new(foo) - print format % ('keepalive readline', m.hexdigest()) - -def comp(N, url): - print ' making %i connections to:\n %s' % (N, url) - - sys.stdout.write(' first using the normal urllib handlers') - # first use normal opener - opener = urllib2.build_opener() - urllib2.install_opener(opener) - t1 = fetch(N, url) - print ' TIME: %.3f s' % t1 - - sys.stdout.write(' now using the keepalive handler ') - # now install the keepalive handler and try again - opener = urllib2.build_opener(HTTPHandler()) - urllib2.install_opener(opener) - t2 = fetch(N, url) - print ' TIME: %.3f s' % t2 - print ' improvement factor: %.2f' % (t1/t2, ) - -def fetch(N, url, delay=0): - import time - lens = [] - starttime = time.time() - for i in range(N): - if delay and i > 0: time.sleep(delay) - fo = urllib2.urlopen(url) - foo = fo.read() - fo.close() - lens.append(len(foo)) - diff = time.time() - starttime - - j = 0 - for i in lens[1:]: - j = j + 1 - if not i == lens[0]: - print "WARNING: inconsistent length on read %i: %i" % (j, i) - - return diff - -def test_timeout(url): - global DEBUG - dbbackup = DEBUG - class FakeLogger: - def debug(self, msg, *args): print msg % args - info = warning = error = debug - DEBUG = FakeLogger() - print " fetching the file to establish a connection" - fo = urllib2.urlopen(url) - data1 = fo.read() - fo.close() - - i = 20 - print " waiting %i seconds for the server to close the connection" % i - while i > 0: - sys.stdout.write('\r %2i' % i) - sys.stdout.flush() - time.sleep(1) - i -= 1 - sys.stderr.write('\r') - - print " fetching the file a second time" - fo = urllib2.urlopen(url) - data2 = fo.read() - fo.close() - - if data1 == data2: - print ' data are identical' - else: - print ' ERROR: DATA DIFFER' - - DEBUG = dbbackup - - -def test(url, N=10): - print "checking error hander (do this on a non-200)" - try: error_handler(url) - except IOError, e: - print "exiting - exception will prevent further tests" - sys.exit() - print - print "performing continuity test (making sure stuff isn't corrupted)" - continuity(url) - print - print "performing speed comparison" - comp(N, url) - print - print "performing dropped-connection check" - test_timeout(url) - -if __name__ == '__main__': - import time - import sys - try: - N = int(sys.argv[1]) - url = sys.argv[2] - except: - print "%s " % sys.argv[0] - else: - test(url, N) \ No newline at end of file diff --git a/rsdownloader/src/Makefile.am b/rsdownloader/src/Makefile.am index 3e3a6e9..ca7c2c7 100644 --- a/rsdownloader/src/Makefile.am +++ b/rsdownloader/src/Makefile.am @@ -1,4 +1,4 @@ +SUBDIRS = container installdir = /usr/lib/enigma2/python/Plugins/Extensions/RSDownloader - install_PYTHON = *.py -install_DATA = *.png *.info *.txt DLC.pyc +install_DATA = rs.png maintainer.info LICENSE.txt diff --git a/rsdownloader/src/MultipartPostHandler.py b/rsdownloader/src/MultipartPostHandler.py deleted file mode 100644 index 6804bcc..0000000 --- a/rsdownloader/src/MultipartPostHandler.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -#### -# 02/2006 Will Holcomb -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# 7/26/07 Slightly modified by Brian Schneider -# in order to support unicode files ( multipart_encode function ) -""" -Usage: - Enables the use of multipart/form-data for posting forms - -Inspirations: - Upload files in python: - http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306 - urllib2_file: - Fabien Seisen: - -Example: - import MultipartPostHandler, urllib2, cookielib - - cookies = cookielib.CookieJar() - opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies), - MultipartPostHandler.MultipartPostHandler) - params = { "username" : "bob", "password" : "riviera", - "file" : open("filename", "rb") } - opener.open("http://wwww.bobsite.com/upload/", params) - -Further Example: - The main function of this file is a sample which downloads a page and - then uploads it to the W3C validator. -""" - -import urllib -import urllib2 -import mimetools, mimetypes -import os, stat -from cStringIO import StringIO - -class Callable: - def __init__(self, anycallable): - self.__call__ = anycallable - -# Controls how sequences are uncoded. If true, elements may be given multiple values by -# assigning a sequence. -doseq = 1 - -class MultipartPostHandler(urllib2.BaseHandler): - handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first - - def http_request(self, request): - data = request.get_data() - if data is not None and type(data) != str: - v_files = [] - v_vars = [] - try: - for(key, value) in data.items(): - if type(value) == file: - v_files.append((key, value)) - else: - v_vars.append((key, value)) - except TypeError: - systype, value, traceback = sys.exc_info() - raise TypeError, "not a valid non-string sequence or mapping object", traceback - - if len(v_files) == 0: - data = urllib.urlencode(v_vars, doseq) - else: - boundary, data = self.multipart_encode(v_vars, v_files) - - contenttype = 'multipart/form-data; boundary=%s' % boundary - if(request.has_header('Content-Type') - and request.get_header('Content-Type').find('multipart/form-data') != 0): - print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data') - request.add_unredirected_header('Content-Type', contenttype) - - request.add_data(data) - - return request - - def multipart_encode(vars, files, boundary = None, buf = None): - if boundary is None: - boundary = mimetools.choose_boundary() - if buf is None: - buf = StringIO() - for(key, value) in vars: - buf.write('--%s\r\n' % boundary) - buf.write('Content-Disposition: form-data; name="%s"' % key) - buf.write('\r\n\r\n' + value + '\r\n') - for(key, fd) in files: - file_size = os.fstat(fd.fileno())[stat.ST_SIZE] - filename = fd.name.split('/')[-1] - contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream' - buf.write('--%s\r\n' % boundary) - buf.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename)) - buf.write('Content-Type: %s\r\n' % contenttype) - # buffer += 'Content-Length: %s\r\n' % file_size - fd.seek(0) - buf.write('\r\n' + fd.read() + '\r\n') - buf.write('--' + boundary + '--\r\n\r\n') - buf = buf.getvalue() - return boundary, buf - multipart_encode = Callable(multipart_encode) - - https_request = http_request - -def main(): - import tempfile, sys - - validatorURL = "http://validator.w3.org/check" - opener = urllib2.build_opener(MultipartPostHandler) - - def validateFile(url): - temp = tempfile.mkstemp(suffix=".html") - os.write(temp[0], opener.open(url).read()) - params = { "ss" : "0", # show source - "doctype" : "Inline", - "uploaded_file" : open(temp[1], "rb") } - print opener.open(validatorURL, params).read() - os.remove(temp[1]) - - if len(sys.argv[1:]) > 0: - for arg in sys.argv[1:]: - validateFile(arg) - else: - validateFile("http://www.google.com") - -if __name__=="__main__": - main() \ No newline at end of file diff --git a/rsdownloader/src/Request.py b/rsdownloader/src/Request.py deleted file mode 100644 index 30ffbcd..0000000 --- a/rsdownloader/src/Request.py +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - - -""" -authored by: RaNaN -""" -import base64 -import cookielib -import time -import urllib -import urllib2 -from gzip import GzipFile - -from Keepalive import HTTPHandler -from cStringIO import StringIO - -""" - handles all outgoing HTTP-Requests of the Server - Usage: create Request Instance - use retrieveURL and call it with a url at least - additionaly you can firstly pass the get and secondly the post data in form of a dictonary - when the last argument is true the handler simulate a http referer with the last called url. - retrieveUrl returns response as string - -""" -class AbortDownload(Exception): - pass - -class Request: - def __init__(self): - - self.dl_time = 0 - self.dl_finished = 0 - self.dl_size = 0 - self.dl_arrived = 0 - self.dl = False - - self.abort = False - - self.cookies = [] - self.lastURL = None - self.cj = cookielib.CookieJar() - handler = HTTPHandler() - self.opener = urllib2.build_opener(handler, urllib2.HTTPCookieProcessor(self.cj)) - self.downloader = urllib2.build_opener() - #self.opener.add_handler() - - self.opener.addheaders = [ - ("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"), - ("Accept-Encoding", "deflate"), - ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), - ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7"), - ("Connection", "keep-alive"), - ("Keep-Alive", "300")] - - self.downloader.addheaders = [ - ("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"), - ("Accept-Encoding", "deflate"), - ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), - ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7")] - - - def load(self, url, get={}, post={}, ref=True, cookies=False): - - if post: - post = urllib.urlencode(post) - else: - post = None - - if get: - get = urllib.urlencode(get) - else: - get = "" - - url = url + get - req = urllib2.Request(url, data=post) - - if ref and self.lastURL is not None: - req.add_header("Referer", self.lastURL) - - if cookies: - self.add_cookies(req) - #add cookies - - rep = self.opener.open(req) - - for cookie in self.cj.make_cookies(rep, req): - self.cookies.append(cookie) - - output = rep.read() - - if rep.headers.has_key("content-encoding"): - if rep.headers["content-encoding"] == "gzip": - output = GzipFile('', 'r', 0, StringIO(output)).read() - - self.lastURL = url - - return output - - def add_auth(self, user, pw): - self.downloader.addheaders.append(['Authorization', 'Basic ' + base64.encodestring(user + ':' + pw)[:-1]]) - - def add_cookies(self, req): - cookie_head = "" - for cookie in self.cookies: - cookie_head += cookie.name + "=" + cookie.value + "; " - req.add_header("Cookie", cookie_head) - #def download(url, filename, reporthook = None, data = None): #default von urlretrieve auch None? - # return self.downloader.urlretrieve(url, filename, reporthook, data) - - def clear_cookies(self): - del self.cookies[:] - - def add_proxy(self, protocol, adress): - handler = urllib2.ProxyHandler({protocol: adress}) - self.opener.add_handler(handler) - self.downloader.add_handler(handler) - - def download(self, url, filename, get={}, post={}, ref=True, cookies=False): - - if post: - post = urllib.urlencode(post) - else: - post = None - - if get: - get = urllib.urlencode(get) - else: - get = "" - - url = url + get - req = urllib2.Request(url, data=post) - - if ref and self.lastURL is not None: - req.add_header("Referer", self.lastURL) - - if cookies: - self.add_cookies(req) - #add cookies - rep = self.opener.open(req) - - for cookie in self.cj.make_cookies(rep, req): - self.cookies.append(cookie) - - if not self.dl: - self.dl = True - file = open(filename, 'wb') - conn = self.downloader.open(req, post) - if conn.headers.has_key("content-length"): - self.dl_size = int(conn.headers["content-length"]) - else: - self.dl_size = 0 - self.dl_arrived = 0 - self.dl_time = time.time() - for chunk in conn: - if self.abort: raise AbortDownload - self.dl_arrived += len(chunk) - file.write(chunk) - - file.close() - self.dl = False - self.dl_finished = time.time() - return True - - def get_speed(self): - try: - return (self.dl_arrived / ((time.time() if self.dl else self.dl_finished) - self.dl_time)) / 1024 - except: - return 0 - - def get_ETA(self): - try: - return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time)) - except: - return 0 - - def kB_left(self): - return (self.dl_size - self.dl_arrived) / 1024 - -if __name__ == "__main__": - import doctest - doctest.testmod() diff --git a/rsdownloader/src/container/DLC.pyc b/rsdownloader/src/container/DLC.pyc new file mode 100644 index 0000000..9b1e58d Binary files /dev/null and b/rsdownloader/src/container/DLC.pyc differ diff --git a/rsdownloader/src/container/Keepalive.py b/rsdownloader/src/container/Keepalive.py new file mode 100644 index 0000000..b86d488 --- /dev/null +++ b/rsdownloader/src/container/Keepalive.py @@ -0,0 +1,620 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the +# Free Software Foundation, Inc., +# 59 Temple Place, Suite 330, +# Boston, MA 02111-1307 USA + +# This file is part of urlgrabber, a high-level cross-protocol url-grabber +# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko + +"""An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive. + +>>> import urllib2 +>>> from keepalive import HTTPHandler +>>> keepalive_handler = HTTPHandler() +>>> opener = urllib2.build_opener(keepalive_handler) +>>> urllib2.install_opener(opener) +>>> +>>> fo = urllib2.urlopen('http://www.python.org') + +If a connection to a given host is requested, and all of the existing +connections are still in use, another connection will be opened. If +the handler tries to use an existing connection but it fails in some +way, it will be closed and removed from the pool. + +To remove the handler, simply re-run build_opener with no arguments, and +install that opener. + +You can explicitly close connections by using the close_connection() +method of the returned file-like object (described below) or you can +use the handler methods: + + close_connection(host) + close_all() + open_connections() + +NOTE: using the close_connection and close_all methods of the handler +should be done with care when using multiple threads. + * there is nothing that prevents another thread from creating new + connections immediately after connections are closed + * no checks are done to prevent in-use connections from being closed + +>>> keepalive_handler.close_all() + +EXTRA ATTRIBUTES AND METHODS + + Upon a status of 200, the object returned has a few additional + attributes and methods, which should not be used if you want to + remain consistent with the normal urllib2-returned objects: + + close_connection() - close the connection to the host + readlines() - you know, readlines() + status - the return status (ie 404) + reason - english translation of status (ie 'File not found') + + If you want the best of both worlds, use this inside an + AttributeError-catching try: + + >>> try: status = fo.status + >>> except AttributeError: status = None + + Unfortunately, these are ONLY there if status == 200, so it's not + easy to distinguish between non-200 responses. The reason is that + urllib2 tries to do clever things with error codes 301, 302, 401, + and 407, and it wraps the object upon return. + + For python versions earlier than 2.4, you can avoid this fancy error + handling by setting the module-level global HANDLE_ERRORS to zero. + You see, prior to 2.4, it's the HTTP Handler's job to determine what + to handle specially, and what to just pass up. HANDLE_ERRORS == 0 + means "pass everything up". In python 2.4, however, this job no + longer belongs to the HTTP Handler and is now done by a NEW handler, + HTTPErrorProcessor. Here's the bottom line: + + python version < 2.4 + HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as + errors + HANDLE_ERRORS == 0 pass everything up, error processing is + left to the calling code + python version >= 2.4 + HANDLE_ERRORS == 1 pass up 200, treat the rest as errors + HANDLE_ERRORS == 0 (default) pass everything up, let the + other handlers (specifically, + HTTPErrorProcessor) decide what to do + + In practice, setting the variable either way makes little difference + in python 2.4, so for the most consistent behavior across versions, + you probably just want to use the defaults, which will give you + exceptions on errors. + +""" + +# $Id$ + +import urllib2 +import httplib +import socket +import thread + +DEBUG = None + +import sslfactory + +import sys +if sys.version_info < (2, 4): HANDLE_ERRORS = 1 +else: HANDLE_ERRORS = 0 + +class ConnectionManager: + """ + The connection manager must be able to: + * keep track of all existing + """ + def __init__(self): + self._lock = thread.allocate_lock() + self._hostmap = {} # map hosts to a list of connections + self._connmap = {} # map connections to host + self._readymap = {} # map connection to ready state + + def add(self, host, connection, ready): + self._lock.acquire() + try: + if not self._hostmap.has_key(host): self._hostmap[host] = [] + self._hostmap[host].append(connection) + self._connmap[connection] = host + self._readymap[connection] = ready + finally: + self._lock.release() + + def remove(self, connection): + self._lock.acquire() + try: + try: + host = self._connmap[connection] + except KeyError: + pass + else: + del self._connmap[connection] + del self._readymap[connection] + self._hostmap[host].remove(connection) + if not self._hostmap[host]: del self._hostmap[host] + finally: + self._lock.release() + + def set_ready(self, connection, ready): + try: self._readymap[connection] = ready + except KeyError: pass + + def get_ready_conn(self, host): + conn = None + self._lock.acquire() + try: + if self._hostmap.has_key(host): + for c in self._hostmap[host]: + if self._readymap[c]: + self._readymap[c] = 0 + conn = c + break + finally: + self._lock.release() + return conn + + def get_all(self, host=None): + if host: + return list(self._hostmap.get(host, [])) + else: + return dict(self._hostmap) + +class KeepAliveHandler: + def __init__(self): + self._cm = ConnectionManager() + + #### Connection Management + def open_connections(self): + """return a list of connected hosts and the number of connections + to each. [('foo.com:80', 2), ('bar.org', 1)]""" + return [(host, len(li)) for (host, li) in self._cm.get_all().items()] + + def close_connection(self, host): + """close connection(s) to + host is the host:port spec, as in 'www.cnn.com:8080' as passed in. + no error occurs if there is no connection to that host.""" + for h in self._cm.get_all(host): + self._cm.remove(h) + h.close() + + def close_all(self): + """close all open connections""" + for host, conns in self._cm.get_all().items(): + for h in conns: + self._cm.remove(h) + h.close() + + def _request_closed(self, request, host, connection): + """tells us that this request is now closed and the the + connection is ready for another request""" + self._cm.set_ready(connection, 1) + + def _remove_connection(self, host, connection, close=0): + if close: connection.close() + self._cm.remove(connection) + + #### Transaction Execution + def do_open(self, req): + host = req.get_host() + if not host: + raise urllib2.URLError('no host given') + + try: + h = self._cm.get_ready_conn(host) + while h: + r = self._reuse_connection(h, req, host) + + # if this response is non-None, then it worked and we're + # done. Break out, skipping the else block. + if r: break + + # connection is bad - possibly closed by server + # discard it and ask for the next free connection + h.close() + self._cm.remove(h) + h = self._cm.get_ready_conn(host) + else: + # no (working) free connections were found. Create a new one. + h = self._get_connection(host) + if DEBUG: DEBUG.info("creating new connection to %s (%d)", + host, id(h)) + self._cm.add(host, h, 0) + self._start_transaction(h, req) + r = h.getresponse() + except (socket.error, httplib.HTTPException), err: + raise urllib2.URLError(err) + + # if not a persistent connection, don't try to reuse it + if r.will_close: self._cm.remove(h) + + if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason) + r._handler = self + r._host = host + r._url = req.get_full_url() + r._connection = h + r.code = r.status + r.headers = r.msg + r.msg = r.reason + + if r.status == 200 or not HANDLE_ERRORS: + return r + else: + return self.parent.error('http', req, r, + r.status, r.msg, r.headers) + + def _reuse_connection(self, h, req, host): + """start the transaction with a re-used connection + return a response object (r) upon success or None on failure. + This DOES not close or remove bad connections in cases where + it returns. However, if an unexpected exception occurs, it + will close and remove the connection before re-raising. + """ + try: + self._start_transaction(h, req) + r = h.getresponse() + # note: just because we got something back doesn't mean it + # worked. We'll check the version below, too. + except (socket.error, httplib.HTTPException): + r = None + except: + # adding this block just in case we've missed + # something we will still raise the exception, but + # lets try and close the connection and remove it + # first. We previously got into a nasty loop + # where an exception was uncaught, and so the + # connection stayed open. On the next try, the + # same exception was raised, etc. The tradeoff is + # that it's now possible this call will raise + # a DIFFERENT exception + if DEBUG: DEBUG.error("unexpected exception - closing " + \ + "connection to %s (%d)", host, id(h)) + self._cm.remove(h) + h.close() + raise + + if r is None or r.version == 9: + # httplib falls back to assuming HTTP 0.9 if it gets a + # bad header back. This is most likely to happen if + # the socket has been closed by the server since we + # last used the connection. + if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)", + host, id(h)) + r = None + else: + if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h)) + + return r + + def _start_transaction(self, h, req): + try: + if req.has_data(): + data = req.get_data() + h.putrequest('POST', req.get_selector(), skip_accept_encoding=1) + if not req.headers.has_key('Content-type'): + h.putheader('Content-type', + 'application/x-www-form-urlencoded') + if not req.headers.has_key('Content-length'): + h.putheader('Content-length', '%d' % len(data)) + else: + h.putrequest('GET', req.get_selector(), skip_accept_encoding=1) + except (socket.error, httplib.HTTPException), err: + raise urllib2.URLError(err) + + for args in self.parent.addheaders: + h.putheader(*args) + for k, v in req.headers.items(): + h.putheader(k, v) + h.endheaders() + if req.has_data(): + h.send(data) + + def _get_connection(self, host): + return NotImplementedError + +class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler): + def __init__(self): + KeepAliveHandler.__init__(self) + + def http_open(self, req): + return self.do_open(req) + + def _get_connection(self, host): + return HTTPConnection(host) + +class HTTPSHandler(KeepAliveHandler, urllib2.HTTPSHandler): + def __init__(self, ssl_factory=None): + KeepAliveHandler.__init__(self) + if not ssl_factory: + ssl_factory = sslfactory.get_factory() + self._ssl_factory = ssl_factory + + def https_open(self, req): + return self.do_open(req) + + def _get_connection(self, host): + return self._ssl_factory.get_https_connection(host) + +class HTTPResponse(httplib.HTTPResponse): + # we need to subclass HTTPResponse in order to + # 1) add readline() and readlines() methods + # 2) add close_connection() methods + # 3) add info() and geturl() methods + + # in order to add readline(), read must be modified to deal with a + # buffer. example: readline must read a buffer and then spit back + # one line at a time. The only real alternative is to read one + # BYTE at a time (ick). Once something has been read, it can't be + # put back (ok, maybe it can, but that's even uglier than this), + # so if you THEN do a normal read, you must first take stuff from + # the buffer. + + # the read method wraps the original to accomodate buffering, + # although read() never adds to the buffer. + # Both readline and readlines have been stolen with almost no + # modification from socket.py + + + def __init__(self, sock, debuglevel=0, strict=0, method=None): + if method: # the httplib in python 2.3 uses the method arg + httplib.HTTPResponse.__init__(self, sock, debuglevel, method) + else: # 2.2 doesn't + httplib.HTTPResponse.__init__(self, sock, debuglevel) + self.fileno = sock.fileno + self.code = None + self._rbuf = '' + self._rbufsize = 8096 + self._handler = None # inserted by the handler later + self._host = None # (same) + self._url = None # (same) + self._connection = None # (same) + + _raw_read = httplib.HTTPResponse.read + + def close(self): + if self.fp: + self.fp.close() + self.fp = None + if self._handler: + self._handler._request_closed(self, self._host, + self._connection) + + def close_connection(self): + self._handler._remove_connection(self._host, self._connection, close=1) + self.close() + + def info(self): + return self.headers + + def geturl(self): + return self._url + + def read(self, amt=None): + # the _rbuf test is only in this first if for speed. It's not + # logically necessary + if self._rbuf and not amt is None: + L = len(self._rbuf) + if amt > L: + amt -= L + else: + s = self._rbuf[:amt] + self._rbuf = self._rbuf[amt:] + return s + + s = self._rbuf + self._raw_read(amt) + self._rbuf = '' + return s + + def readline(self, limit=-1): + data = "" + i = self._rbuf.find('\n') + while i < 0 and not (0 < limit <= len(self._rbuf)): + new = self._raw_read(self._rbufsize) + if not new: break + i = new.find('\n') + if i >= 0: i = i + len(self._rbuf) + self._rbuf = self._rbuf + new + if i < 0: i = len(self._rbuf) + else: i = i+1 + if 0 <= limit < len(self._rbuf): i = limit + data, self._rbuf = self._rbuf[:i], self._rbuf[i:] + return data + + def readlines(self, sizehint = 0): + total = 0 + list = [] + while 1: + line = self.readline() + if not line: break + list.append(line) + total += len(line) + if sizehint and total >= sizehint: + break + return list + + +class HTTPConnection(httplib.HTTPConnection): + # use the modified response class + response_class = HTTPResponse + +class HTTPSConnection(httplib.HTTPSConnection): + response_class = HTTPResponse + +######################################################################### +##### TEST FUNCTIONS +######################################################################### + +def error_handler(url): + global HANDLE_ERRORS + orig = HANDLE_ERRORS + keepalive_handler = HTTPHandler() + opener = urllib2.build_opener(keepalive_handler) + urllib2.install_opener(opener) + pos = {0: 'off', 1: 'on'} + for i in (0, 1): + print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i) + HANDLE_ERRORS = i + try: + fo = urllib2.urlopen(url) + foo = fo.read() + fo.close() + try: status, reason = fo.status, fo.reason + except AttributeError: status, reason = None, None + except IOError, e: + print " EXCEPTION: %s" % e + raise + else: + print " status = %s, reason = %s" % (status, reason) + HANDLE_ERRORS = orig + hosts = keepalive_handler.open_connections() + print "open connections:", hosts + keepalive_handler.close_all() + +def continuity(url): + import md5 + format = '%25s: %s' + + # first fetch the file with the normal http handler + opener = urllib2.build_opener() + urllib2.install_opener(opener) + fo = urllib2.urlopen(url) + foo = fo.read() + fo.close() + m = md5.new(foo) + print format % ('normal urllib', m.hexdigest()) + + # now install the keepalive handler and try again + opener = urllib2.build_opener(HTTPHandler()) + urllib2.install_opener(opener) + + fo = urllib2.urlopen(url) + foo = fo.read() + fo.close() + m = md5.new(foo) + print format % ('keepalive read', m.hexdigest()) + + fo = urllib2.urlopen(url) + foo = '' + while 1: + f = fo.readline() + if f: foo = foo + f + else: break + fo.close() + m = md5.new(foo) + print format % ('keepalive readline', m.hexdigest()) + +def comp(N, url): + print ' making %i connections to:\n %s' % (N, url) + + sys.stdout.write(' first using the normal urllib handlers') + # first use normal opener + opener = urllib2.build_opener() + urllib2.install_opener(opener) + t1 = fetch(N, url) + print ' TIME: %.3f s' % t1 + + sys.stdout.write(' now using the keepalive handler ') + # now install the keepalive handler and try again + opener = urllib2.build_opener(HTTPHandler()) + urllib2.install_opener(opener) + t2 = fetch(N, url) + print ' TIME: %.3f s' % t2 + print ' improvement factor: %.2f' % (t1/t2, ) + +def fetch(N, url, delay=0): + import time + lens = [] + starttime = time.time() + for i in range(N): + if delay and i > 0: time.sleep(delay) + fo = urllib2.urlopen(url) + foo = fo.read() + fo.close() + lens.append(len(foo)) + diff = time.time() - starttime + + j = 0 + for i in lens[1:]: + j = j + 1 + if not i == lens[0]: + print "WARNING: inconsistent length on read %i: %i" % (j, i) + + return diff + +def test_timeout(url): + global DEBUG + dbbackup = DEBUG + class FakeLogger: + def debug(self, msg, *args): print msg % args + info = warning = error = debug + DEBUG = FakeLogger() + print " fetching the file to establish a connection" + fo = urllib2.urlopen(url) + data1 = fo.read() + fo.close() + + i = 20 + print " waiting %i seconds for the server to close the connection" % i + while i > 0: + sys.stdout.write('\r %2i' % i) + sys.stdout.flush() + time.sleep(1) + i -= 1 + sys.stderr.write('\r') + + print " fetching the file a second time" + fo = urllib2.urlopen(url) + data2 = fo.read() + fo.close() + + if data1 == data2: + print ' data are identical' + else: + print ' ERROR: DATA DIFFER' + + DEBUG = dbbackup + + +def test(url, N=10): + print "checking error hander (do this on a non-200)" + try: error_handler(url) + except IOError, e: + print "exiting - exception will prevent further tests" + sys.exit() + print + print "performing continuity test (making sure stuff isn't corrupted)" + continuity(url) + print + print "performing speed comparison" + comp(N, url) + print + print "performing dropped-connection check" + test_timeout(url) + +if __name__ == '__main__': + import time + import sys + try: + N = int(sys.argv[1]) + url = sys.argv[2] + except: + print "%s " % sys.argv[0] + else: + test(url, N) \ No newline at end of file diff --git a/rsdownloader/src/container/Makefile.am b/rsdownloader/src/container/Makefile.am new file mode 100644 index 0000000..d16302e --- /dev/null +++ b/rsdownloader/src/container/Makefile.am @@ -0,0 +1,4 @@ +installdir = /usr/lib/enigma2/python/Plugins/Extensions/RSDownloader/container + +install_PYTHON = *.py +install_DATA = DLC.pyc diff --git a/rsdownloader/src/container/MultipartPostHandler.py b/rsdownloader/src/container/MultipartPostHandler.py new file mode 100644 index 0000000..6804bcc --- /dev/null +++ b/rsdownloader/src/container/MultipartPostHandler.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +#### +# 02/2006 Will Holcomb +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# 7/26/07 Slightly modified by Brian Schneider +# in order to support unicode files ( multipart_encode function ) +""" +Usage: + Enables the use of multipart/form-data for posting forms + +Inspirations: + Upload files in python: + http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306 + urllib2_file: + Fabien Seisen: + +Example: + import MultipartPostHandler, urllib2, cookielib + + cookies = cookielib.CookieJar() + opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies), + MultipartPostHandler.MultipartPostHandler) + params = { "username" : "bob", "password" : "riviera", + "file" : open("filename", "rb") } + opener.open("http://wwww.bobsite.com/upload/", params) + +Further Example: + The main function of this file is a sample which downloads a page and + then uploads it to the W3C validator. +""" + +import urllib +import urllib2 +import mimetools, mimetypes +import os, stat +from cStringIO import StringIO + +class Callable: + def __init__(self, anycallable): + self.__call__ = anycallable + +# Controls how sequences are uncoded. If true, elements may be given multiple values by +# assigning a sequence. +doseq = 1 + +class MultipartPostHandler(urllib2.BaseHandler): + handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first + + def http_request(self, request): + data = request.get_data() + if data is not None and type(data) != str: + v_files = [] + v_vars = [] + try: + for(key, value) in data.items(): + if type(value) == file: + v_files.append((key, value)) + else: + v_vars.append((key, value)) + except TypeError: + systype, value, traceback = sys.exc_info() + raise TypeError, "not a valid non-string sequence or mapping object", traceback + + if len(v_files) == 0: + data = urllib.urlencode(v_vars, doseq) + else: + boundary, data = self.multipart_encode(v_vars, v_files) + + contenttype = 'multipart/form-data; boundary=%s' % boundary + if(request.has_header('Content-Type') + and request.get_header('Content-Type').find('multipart/form-data') != 0): + print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data') + request.add_unredirected_header('Content-Type', contenttype) + + request.add_data(data) + + return request + + def multipart_encode(vars, files, boundary = None, buf = None): + if boundary is None: + boundary = mimetools.choose_boundary() + if buf is None: + buf = StringIO() + for(key, value) in vars: + buf.write('--%s\r\n' % boundary) + buf.write('Content-Disposition: form-data; name="%s"' % key) + buf.write('\r\n\r\n' + value + '\r\n') + for(key, fd) in files: + file_size = os.fstat(fd.fileno())[stat.ST_SIZE] + filename = fd.name.split('/')[-1] + contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream' + buf.write('--%s\r\n' % boundary) + buf.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename)) + buf.write('Content-Type: %s\r\n' % contenttype) + # buffer += 'Content-Length: %s\r\n' % file_size + fd.seek(0) + buf.write('\r\n' + fd.read() + '\r\n') + buf.write('--' + boundary + '--\r\n\r\n') + buf = buf.getvalue() + return boundary, buf + multipart_encode = Callable(multipart_encode) + + https_request = http_request + +def main(): + import tempfile, sys + + validatorURL = "http://validator.w3.org/check" + opener = urllib2.build_opener(MultipartPostHandler) + + def validateFile(url): + temp = tempfile.mkstemp(suffix=".html") + os.write(temp[0], opener.open(url).read()) + params = { "ss" : "0", # show source + "doctype" : "Inline", + "uploaded_file" : open(temp[1], "rb") } + print opener.open(validatorURL, params).read() + os.remove(temp[1]) + + if len(sys.argv[1:]) > 0: + for arg in sys.argv[1:]: + validateFile(arg) + else: + validateFile("http://www.google.com") + +if __name__=="__main__": + main() \ No newline at end of file diff --git a/rsdownloader/src/container/Plugin.py b/rsdownloader/src/container/Plugin.py new file mode 100644 index 0000000..c3b002c --- /dev/null +++ b/rsdownloader/src/container/Plugin.py @@ -0,0 +1,6 @@ +from Request import Request + +class Plugin(): + def __init__(self, parent): + self.req = Request() + diff --git a/rsdownloader/src/container/Request.py b/rsdownloader/src/container/Request.py new file mode 100644 index 0000000..30ffbcd --- /dev/null +++ b/rsdownloader/src/container/Request.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +""" +authored by: RaNaN +""" +import base64 +import cookielib +import time +import urllib +import urllib2 +from gzip import GzipFile + +from Keepalive import HTTPHandler +from cStringIO import StringIO + +""" + handles all outgoing HTTP-Requests of the Server + Usage: create Request Instance + use retrieveURL and call it with a url at least + additionaly you can firstly pass the get and secondly the post data in form of a dictonary + when the last argument is true the handler simulate a http referer with the last called url. + retrieveUrl returns response as string + +""" +class AbortDownload(Exception): + pass + +class Request: + def __init__(self): + + self.dl_time = 0 + self.dl_finished = 0 + self.dl_size = 0 + self.dl_arrived = 0 + self.dl = False + + self.abort = False + + self.cookies = [] + self.lastURL = None + self.cj = cookielib.CookieJar() + handler = HTTPHandler() + self.opener = urllib2.build_opener(handler, urllib2.HTTPCookieProcessor(self.cj)) + self.downloader = urllib2.build_opener() + #self.opener.add_handler() + + self.opener.addheaders = [ + ("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"), + ("Accept-Encoding", "deflate"), + ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), + ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7"), + ("Connection", "keep-alive"), + ("Keep-Alive", "300")] + + self.downloader.addheaders = [ + ("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"), + ("Accept-Encoding", "deflate"), + ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), + ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7")] + + + def load(self, url, get={}, post={}, ref=True, cookies=False): + + if post: + post = urllib.urlencode(post) + else: + post = None + + if get: + get = urllib.urlencode(get) + else: + get = "" + + url = url + get + req = urllib2.Request(url, data=post) + + if ref and self.lastURL is not None: + req.add_header("Referer", self.lastURL) + + if cookies: + self.add_cookies(req) + #add cookies + + rep = self.opener.open(req) + + for cookie in self.cj.make_cookies(rep, req): + self.cookies.append(cookie) + + output = rep.read() + + if rep.headers.has_key("content-encoding"): + if rep.headers["content-encoding"] == "gzip": + output = GzipFile('', 'r', 0, StringIO(output)).read() + + self.lastURL = url + + return output + + def add_auth(self, user, pw): + self.downloader.addheaders.append(['Authorization', 'Basic ' + base64.encodestring(user + ':' + pw)[:-1]]) + + def add_cookies(self, req): + cookie_head = "" + for cookie in self.cookies: + cookie_head += cookie.name + "=" + cookie.value + "; " + req.add_header("Cookie", cookie_head) + #def download(url, filename, reporthook = None, data = None): #default von urlretrieve auch None? + # return self.downloader.urlretrieve(url, filename, reporthook, data) + + def clear_cookies(self): + del self.cookies[:] + + def add_proxy(self, protocol, adress): + handler = urllib2.ProxyHandler({protocol: adress}) + self.opener.add_handler(handler) + self.downloader.add_handler(handler) + + def download(self, url, filename, get={}, post={}, ref=True, cookies=False): + + if post: + post = urllib.urlencode(post) + else: + post = None + + if get: + get = urllib.urlencode(get) + else: + get = "" + + url = url + get + req = urllib2.Request(url, data=post) + + if ref and self.lastURL is not None: + req.add_header("Referer", self.lastURL) + + if cookies: + self.add_cookies(req) + #add cookies + rep = self.opener.open(req) + + for cookie in self.cj.make_cookies(rep, req): + self.cookies.append(cookie) + + if not self.dl: + self.dl = True + file = open(filename, 'wb') + conn = self.downloader.open(req, post) + if conn.headers.has_key("content-length"): + self.dl_size = int(conn.headers["content-length"]) + else: + self.dl_size = 0 + self.dl_arrived = 0 + self.dl_time = time.time() + for chunk in conn: + if self.abort: raise AbortDownload + self.dl_arrived += len(chunk) + file.write(chunk) + + file.close() + self.dl = False + self.dl_finished = time.time() + return True + + def get_speed(self): + try: + return (self.dl_arrived / ((time.time() if self.dl else self.dl_finished) - self.dl_time)) / 1024 + except: + return 0 + + def get_ETA(self): + try: + return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time)) + except: + return 0 + + def kB_left(self): + return (self.dl_size - self.dl_arrived) / 1024 + +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/rsdownloader/src/container/__init__.py b/rsdownloader/src/container/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/rsdownloader/src/container/sslfactory.py b/rsdownloader/src/container/sslfactory.py new file mode 100644 index 0000000..14903cd --- /dev/null +++ b/rsdownloader/src/container/sslfactory.py @@ -0,0 +1,89 @@ +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the +# Free Software Foundation, Inc., +# 59 Temple Place, Suite 330, +# Boston, MA 02111-1307 USA + +# This file is part of urlgrabber, a high-level cross-protocol url-grabber + +import httplib +import urllib2 + +try: + from M2Crypto import SSL + from M2Crypto import httpslib + from M2Crypto import m2urllib2 + + have_m2crypto = True +except ImportError: + have_m2crypto = False + +DEBUG = None + +if have_m2crypto: + + class M2SSLFactory: + + def __init__(self, ssl_ca_cert, ssl_context): + self.ssl_context = self._get_ssl_context(ssl_ca_cert, ssl_context) + + def _get_ssl_context(self, ssl_ca_cert, ssl_context): + """ + Create an ssl context using the CA cert file or ssl context. + + The CA cert is used first if it was passed as an option. If not, + then the supplied ssl context is used. If no ssl context was supplied, + None is returned. + """ + if ssl_ca_cert: + context = SSL.Context() + context.load_verify_locations(ssl_ca_cert) + context.set_verify(SSL.verify_peer, -1) + return context + else: + return ssl_context + + def create_https_connection(self, host, response_class = None): + connection = httplib.HTTPSConnection(host, self.ssl_context) + if response_class: + connection.response_class = response_class + return connection + + def create_opener(self, *handlers): + return m2urllib2.build_opener(self.ssl_context, *handlers) + + +class SSLFactory: + + def create_https_connection(self, host, response_class = None): + connection = httplib.HTTPSConnection(host) + if response_class: + connection.response_class = response_class + return connection + + def create_opener(self, *handlers): + return urllib2.build_opener(*handlers) + + + +def get_factory(ssl_ca_cert = None, ssl_context = None): + """ Return an SSLFactory, based on if M2Crypto is available. """ + if have_m2crypto: + return M2SSLFactory(ssl_ca_cert, ssl_context) + else: + # Log here if someone provides the args but we don't use them. + if ssl_ca_cert or ssl_context: + if DEBUG: + DEBUG.warning("SSL arguments supplied, but M2Crypto is not available. " + "Using Python SSL.") + return SSLFactory() \ No newline at end of file diff --git a/rsdownloader/src/decrypt.py b/rsdownloader/src/decrypt.py deleted file mode 100644 index 296cf24..0000000 --- a/rsdownloader/src/decrypt.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/python -# based on code from pyLoad: http://pyload.org/ -from Crypto.Cipher import AES -from DLC import DLC -from MultipartPostHandler import MultipartPostHandler -import base64, binascii, os, random, re, urllib2 - -def decryptDlc(infile): - dlc = DLC(None) - dlc.proceed(infile, "/tmp") - return dlc.links - -def decryptCcf(infile): - opener = urllib2.build_opener(MultipartPostHandler) - tempdlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', {"src": "ccf", "filename": "test.ccf", "upload": open(infile, "rb")}).read() - random.seed() - tempdlc_name = '/tmp/' + str(random.randint(0, 100)) + '-tmp.dlc' - while os.path.exists(tempdlc_name): - tempdlc_name = '/tmp/' + str(random.randint(0, 100)) + '-tmp.dlc' - tempdlc = open(tempdlc_name, "w") - tempdlc.write(re.search(r'(.*)', tempdlc_content, re.DOTALL).group(1)) - tempdlc.close - return tempdlc_name - -def decryptRsdf(infile): - links = [] - Key = binascii.unhexlify('8C35192D964DC3182C6F84F3252239EB4A320D2500000000') - IV = binascii.unhexlify('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF') - IV_Cipher = AES.new(Key, AES.MODE_ECB) - IV = IV_Cipher.encrypt(IV) - obj = AES.new(Key, AES.MODE_CFB, IV) - rsdf = open(infile, 'r') - data = rsdf.read() - data = binascii.unhexlify(''.join(data.split())) - data = data.splitlines() - for link in data: - link = base64.b64decode(link) - link = obj.decrypt(link) - decryptedUrl = link.replace('CCF: ', '') - links.append(decryptedUrl) - rsdf.close() - return links - -def decrypt(infile): - if infile.lower().endswith(".rsdf"): - return decryptRsdf(infile) - elif infile.lower().endswith(".ccf"): - infile = decryptCcf(infile) - return decryptDlc(infile) - elif infile.lower().endswith(".dlc"): - return decryptDlc(infile) - -##TESTS -#infile = "/tmp/container/test.dlc" -#print "Decrypting %s..."%infile -#links = decrypt(infile) -#print "Links:" -#print links diff --git a/rsdownloader/src/plugin.py b/rsdownloader/src/plugin.py index 83692bf..b09d08f 100644 --- a/rsdownloader/src/plugin.py +++ b/rsdownloader/src/plugin.py @@ -1,1014 +1,1014 @@ -## -## RS Downloader -## by AliAbdul -## -## -from base64 import encodestring -from Components.ActionMap import ActionMap -from Components.config import config, ConfigInteger, ConfigText, ConfigYesNo, ConfigClock, ConfigSubsection, getConfigListEntry -from Components.ConfigList import ConfigListScreen -from Components.Label import Label -from Components.Language import language -from Components.MenuList import MenuList -from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest -from Components.ScrollLabel import ScrollLabel -from decrypt import decrypt -from enigma import eListboxPythonMultiContent, eTimer, gFont, RT_HALIGN_CENTER, RT_HALIGN_RIGHT -from os import environ, listdir, remove -from Plugins.Plugin import PluginDescriptor -from Screens.ChoiceBox import ChoiceBox -from Screens.MessageBox import MessageBox -from Screens.Screen import Screen -from Screens.VirtualKeyBoard import VirtualKeyBoard -from time import localtime, sleep, strftime, time -from Tools.Directories import resolveFilename, SCOPE_SKIN_IMAGE, SCOPE_LANGUAGE, SCOPE_PLUGINS -from Tools.Downloader import HTTPProgressDownloader -from Tools.LoadPixmap import LoadPixmap -from twisted.internet import reactor -from twisted.python import failure -from twisted.web.client import getPage -from urlparse import urlparse, urlunparse -import gettext, re, socket, urllib2 - -############################################################################## - -config.plugins.RSDownloader = ConfigSubsection() -config.plugins.RSDownloader.onoff = ConfigYesNo(default=True) -config.plugins.RSDownloader.username = ConfigText(default="", fixed_size=False) -config.plugins.RSDownloader.password = ConfigText(default="", fixed_size=False) -config.plugins.RSDownloader.lists_directory = ConfigText(default="/media/hdd/rs/lists/", fixed_size=False) -config.plugins.RSDownloader.downloads_directory = ConfigText(default="/media/hdd/rs/downloads", fixed_size=False) -config.plugins.RSDownloader.ignore_time = ConfigYesNo(default=False) -config.plugins.RSDownloader.start_time = ConfigClock(default=time()) -config.plugins.RSDownloader.end_time = ConfigClock(default=time()) -config.plugins.RSDownloader.download_monday = ConfigYesNo(default=True) -config.plugins.RSDownloader.download_tuesday = ConfigYesNo(default=True) -config.plugins.RSDownloader.download_wednesday = ConfigYesNo(default=True) -config.plugins.RSDownloader.download_thursday = ConfigYesNo(default=True) -config.plugins.RSDownloader.download_friday = ConfigYesNo(default=True) -config.plugins.RSDownloader.download_saturday = ConfigYesNo(default=True) -config.plugins.RSDownloader.download_sunday = ConfigYesNo(default=True) -config.plugins.RSDownloader.count_downloads = ConfigInteger(default=3, limits=(1, 6)) -config.plugins.RSDownloader.write_log = ConfigYesNo(default=True) -config.plugins.RSDownloader.reconnect_fritz = ConfigYesNo(default=False) -config.plugins.RSDownloader.autorestart_failed = ConfigYesNo(default=False) - -############################################################################## - -def localeInit(): - lang = language.getLanguage() - environ["LANGUAGE"] = lang[:2] - gettext.bindtextdomain("enigma2", resolveFilename(SCOPE_LANGUAGE)) - gettext.textdomain("enigma2") - gettext.bindtextdomain("RSDownloader", "%s%s"%(resolveFilename(SCOPE_PLUGINS), "Extensions/RSDownloader/locale/")) - -def _(txt): - t = gettext.dgettext("RSDownloader", txt) - if t == txt: - t = gettext.gettext(txt) - return t - -localeInit() -language.addCallback(localeInit) - -############################################################################## - -def writeLog(message): - if config.plugins.RSDownloader.write_log.value: - try: - f = open("/tmp/rapidshare.log", "a") - f.write(strftime("%c", localtime(time())) + " - " + message + "\n") - f.close() - except: - pass - -############################################################################## - -def _parse(url): - url = url.strip() - parsed = urlparse(url) - scheme = parsed[0] - path = urlunparse(('','') + parsed[2:]) - host, port = parsed[1], 80 - if '@' in host: - username, host = host.split('@') - if ':' in username: - username, password = username.split(':') - else: - password = "" - else: - username = "" - password = "" - if ':' in host: - host, port = host.split(':') - port = int(port) - if path == "": - path = "/" - return scheme, host, port, path, username, password - -class ProgressDownload: - def __init__(self, url, outputfile, contextFactory=None, *args, **kwargs): - scheme, host, port, path, username, password = _parse(url) - if username and password: - url = scheme + '://' + host + ':' + str(port) + path - basicAuth = encodestring("%s:%s"%(username, password)) - authHeader = "Basic " + basicAuth.strip() - AuthHeaders = {"Authorization": authHeader} - if kwargs.has_key("headers"): - kwargs["headers"].update(AuthHeaders) - else: - kwargs["headers"] = AuthHeaders - self.factory = HTTPProgressDownloader(url, outputfile, *args, **kwargs) - self.connection = reactor.connectTCP(host, port, self.factory) - - def start(self): - return self.factory.deferred - - def stop(self): - self.connection.disconnect() - - def addProgress(self, progress_callback): - self.factory.progress_callback = progress_callback - -############################################################################## - -def get(url): - try: - data = urllib2.urlopen(url) - return data.read() - except: - return "" - -def post(url, data): - try: - return urllib2.urlopen(url, data).read() - except: - return "" - -def matchGet(rex, string): - match = re.search(rex, string) - if match: - if len(match.groups()) == 0: - return string[match.span()[0]:match.span()[1]] - if len(match.groups()) == 1: - return match.groups()[0] - else: - return False - -############################################################################## - -def reconnect(host='fritz.box', port=49000): - http_body = '\r\n'.join(( - '', - '', - ' ', - ' ', - ' ', - '')) - http_data = '\r\n'.join(( - 'POST /upnp/control/WANIPConn1 HTTP/1.1', - 'Host: %s:%d'%(host, port), - 'SoapAction: urn:schemas-upnp-org:service:WANIPConnection:1#ForceTermination', - 'Content-Type: text/xml; charset="utf-8"', - 'Content-Length: %d'%len(http_body), - '', - http_body)) - try: - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect((host, port)) - s.send(http_data) - s.close() - except: - pass - -############################################################################## - -class RSDownload: - def __init__(self, url): - writeLog("Adding: %s"%url) - self.url = url - self.download = None - self.downloading = False - self.progress = 0 - self.size = 0 - self.status = _("Waiting") - self.name = self.url.split("/")[-1] - - self.freeDownloadUrl = "" - self.freeDownloadTimer = eTimer() - self.freeDownloadTimer.callback.append(self.freeDownloadStart) - self.checkTimer = eTimer() - self.checkTimer.callback.append(self.doCheckTimer) - self.restartFailedTimer = eTimer() - self.restartFailedTimer.callback.append(self.restartFailedCheck) - - self.finishCallbacks = [] - - def start(self): - writeLog("Downloading: %s"%self.url) - self.downloading = True - self.progress = 0 - self.size = 0 - username = config.plugins.RSDownloader.username.value - password = config.plugins.RSDownloader.password.value - if self.url.__contains__("rapidshare.com") and username == "" and password == "": - writeLog("Free RS-Download: %s"%self.url) - self.status = _("Checking") - if config.plugins.RSDownloader.reconnect_fritz.value: - reconnect() - sleep(3) - data = get(self.url) - url = matchGet(']+action="([^"]+)', data) - if not url: - writeLog("Failed: %s"%self.url) - self.httpFailed(True, "Failed to get download page url: %s"%self.url) - else: - data = post(url, "dl.start=Free") - seconds = matchGet('var c=([0-9]+)', data) - if not seconds: - self.httpFailed(True, "Failed to get download page url: %s"%self.url) - else: - writeLog("Free RS-download... must wait %s seconds: %s"%(seconds, self.url)) - self.status = "%s %s"%(_("Waiting"), seconds) - url = matchGet('"dlf" action="([^"]+)', data) - if not url: - self.httpFailed(True, "Failed to get download page url: %s"%self.url) - else: - self.freeDownloadUrl = url - self.freeDownloadTimer.start((int(seconds) + 2) * 1000, 1) - elif self.url.__contains__("youtube.com"): - writeLog("Getting youtube video link: %s"%self.url) - self.status = _("Checking") - downloadLink = self.getYoutubeDownloadLink() - if downloadLink: - self.status = _("Downloading") - writeLog("Downloading video: %s"%downloadLink) - req = urllib2.Request(downloadLink) - url_handle = urllib2.urlopen(req) - headers = url_handle.info() - if headers.getheader("content-type") == "video/mp4": - ext = "mp4" - else: - ext = "flv" - self.download = ProgressDownload(downloadLink, ("%s/%s.%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name, ext)).replace("//", "/")) - self.download.addProgress(self.httpProgress) - self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed) - else: - self.httpFailed(True, "Failed to get video url: %s"%self.url) - else: - if self.url.__contains__("rapidshare.com"): - url = self.url.replace("http://", "http://" + username + ":" + password + "@") - else: - url = self.url - self.status = _("Downloading") - self.download = ProgressDownload(url, ("%s/%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name)).replace("//", "/").replace(".html", "")) - self.download.addProgress(self.httpProgress) - self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed) - - def freeDownloadStart(self): - self.status = _("Downloading") - self.download = ProgressDownload(self.freeDownloadUrl, ("%s/%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name)).replace("//", "/").replace(".html", "")) - self.download.addProgress(self.httpProgress) - self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed) - - def stop(self): - self.progress = 0 - self.downloading = False - self.status = _("Waiting") - if self.download: - writeLog("Stopping download: %s"%self.url) - self.download.stop() - - def httpProgress(self, recvbytes, totalbytes): - if self.size == 0: - self.size = int((totalbytes / 1024) / 1024) - self.progress = int(100.0 * float(recvbytes) / float(totalbytes)) - if self.progress == 100: - writeLog("Finished: %s"%self.url) - self.status = _("Finished") - self.execFinishCallbacks() - - def httpFinished(self, string=""): - if string is not None: - writeLog("Failed: %s"%self.url) - writeLog("Error: %s"%string) - self.status = _("Checking") - self.checkTimer.start(10000, 1) - - def doCheckTimer(self): - if self.size == 0: - self.status = _("Failed") - if config.plugins.RSDownloader.autorestart_failed.value: - self.restartFailedTimer.start(10000*60, 1) - elif self.progress == 100: - self.status = _("Finished") - self.downloading = False - self.execFinishCallbacks() - - def restartFailedCheck(self): - if self.status == _("Failed"): # check if user didn't restart already - self.download = None - self.status = _("Waiting") - - def execFinishCallbacks(self): - for x in self.finishCallbacks: - x() - - def httpFailed(self, failure=None, error=""): - if failure: - if error == "": - error = failure.getErrorMessage() - if error != "" and not error.startswith("[Errno 2]"): - writeLog("Failed: %s"%self.url) - writeLog("Error: %s"%error) - self.status = _("Checking") - self.checkTimer.start(10000, 1) - - def getYoutubeDownloadLink(self): - mrl = None - html = get(self.url) - if html != "": - isHDAvailable = False - video_id = None - t = None - reonecat = re.compile(r'(.+?)', re.DOTALL) - titles = reonecat.findall(html) - if titles: - self.name = titles[0] - if self.name.startswith("YouTube - "): - self.name = (self.name[10:]).replace("&", "&") - if html.__contains__("isHDAvailable = true"): - isHDAvailable = True - for line in html.split('\n'): - if 'swfArgs' in line: - line = line.strip().split() - x = 0 - for thing in line: - if 'video_id' in thing: - video_id = line[x+1][1:-2] - elif '"t":' == thing: - t = line[x+1][1:-2] - x += 1 - if video_id and t: - if isHDAvailable == True: - mrl = "http://www.youtube.com/get_video?video_id=%s&t=%s&fmt=22" % (video_id, t) - else: - mrl = "http://www.youtube.com/get_video?video_id=%s&t=%s&fmt=18" % (video_id, t) - return mrl - -############################################################################## - -class RS: - def __init__(self): - self.downloads = [] - self.checkTimer = eTimer() - self.checkTimer.callback.append(self.startDownloading) - self.checkTimer.start(5000*60, False) - - def mayDownload(self): - if config.plugins.RSDownloader.onoff.value == False: - writeLog("RS Downloader is turned off...") - return False - elif config.plugins.RSDownloader.ignore_time.value: - return True - else: - start = config.plugins.RSDownloader.start_time.value - end = config.plugins.RSDownloader.end_time.value - t = localtime() - weekday = t[6] - if weekday == 0 and config.plugins.RSDownloader.download_monday.value == False: - return False - elif weekday == 1 and config.plugins.RSDownloader.download_tuesday.value == False: - return False - elif weekday == 2 and config.plugins.RSDownloader.download_wednesday.value == False: - return False - elif weekday == 3 and config.plugins.RSDownloader.download_thursday.value == False: - return False - elif weekday == 4 and config.plugins.RSDownloader.download_friday.value == False: - return False - elif weekday == 5 and config.plugins.RSDownloader.download_saturday.value == False: - return False - elif weekday == 6 and config.plugins.RSDownloader.download_sunday.value == False: - return False - else: - hour_now = t[3] - minute_now = t[4] - hour_start = start[0] - minute_start = start[1] - hour_end = end[0] - minute_end = end[1] - if start == end: # Same start and end-time - return True - elif hour_end < hour_start: # Different days!!! - if hour_now > hour_start or hour_now < hour_end: - return True - elif hour_now == hour_start and minute_now > minute_start: - return True - elif hour_now == hour_end and minute_now < minute_end: - return True - else: - return False - elif hour_now > hour_start and hour_now < hour_end: # Same day... - return True - elif hour_now == hour_start and minute_now > minute_start: # Same day, same start-hour... - return True - elif hour_now == hour_end and minute_now < minute_end: # Same day, same end-hour... - return True - else: - return False - - def allDownloadsFinished(self): - allDone = True - for download in self.downloads: - if (download.status != _("Failed")) and (download.status != _("Finished")): - allDone = False - return allDone - - def startDownloading(self): - if self.mayDownload() == True: - if self.allDownloadsFinished() == True: - self.readLists() - downloadCount = 0 - for download in self.downloads: - if download.downloading == True: - downloadCount += 1 # Count the downloaded files - if config.plugins.RSDownloader.username.value == "" and config.plugins.RSDownloader.password.value == "": - if downloadCount < 1: # Allow one download if without account - for download in self.downloads: - if download.downloading == False and download.status.startswith(_("Waiting")): - download.start() # Start first download in the list - break - else: - mayDownloadCount = config.plugins.RSDownloader.count_downloads.value - downloadCount - for download in self.downloads: - if download.downloading == False: - if mayDownloadCount > 0 and download.status == _("Waiting"): - download.start() - mayDownloadCount -= 1 - - def addDownload(self, url): - error = False - for download in self.downloads: - if download.url == url: - error = True - if error: - return False - else: - download = RSDownload(url) - download.finishCallbacks.append(self.cleanLists) - self.downloads.append(download) - return True - - def readLists(self): - writeLog("Reading all lists...") - path = config.plugins.RSDownloader.lists_directory.value - if not path.endswith("/"): - path = path + "/" - writeLog("Directory: " + path) - try: - file_list = listdir(path) - writeLog("Count of lists: " + str(len(file_list))) - except: - file_list = [] - writeLog("Could not find any list!") - for x in file_list: - list = path + x - if list.endswith(".txt"): - try: - writeLog("Reading list %s..."%list) - f = open(list, "r") - count = 0 - for l in f: - if l.startswith("http://"): - if (self.addDownload(l.replace("\n", "").replace("\r", ""))) == True: - count += 1 - f.close() - if count == 0: - writeLog("Empty list or downloads already in download list: %s"%list) - else: - writeLog("Added %d files from list %s..."%(count, list)) - except: - writeLog("Error while reading list %s!"%list) - else: - writeLog("No *.txt file: %s!"%list) - - def cleanLists(self): - writeLog("Cleaning lists...") - path = config.plugins.RSDownloader.lists_directory.value - if not path.endswith("/"): - path = path + "/" - try: - file_list = listdir(path) - except: - file_list = [] - for x in file_list: - list = path + x - try: - f = open(list, "r") - content = f.read() - f.close() - for download in self.downloads: - if download.status == _("Finished") and content.__contains__(download.url): - content = content.replace(download.url, "") - content = content.replace("\n\n", "\n").replace("\r\r", "\r") - f = open(list, "w") - f.write(content) - f.close() - except: - writeLog("Error while cleaning list %s!"%list) - self.startDownloading() - - def removeDownload(self, url): - tmp = [] - for download in self.downloads: - if download.url == url: - download.stop() - else: - tmp.append(download) - del self.downloads - self.downloads = tmp - self.removeFromLists(url) - - def removeFromLists(self, url): - path = config.plugins.RSDownloader.lists_directory.value - if not path.endswith("/"): - path = path + "/" - try: - file_list = listdir(path) - except: - file_list = [] - for x in file_list: - list = path + x - try: - f = open(list, "r") - content = f.read() - f.close() - if content.__contains__(url): - content = content.replace(url, "") - content = content.replace("\n\n", "\n").replace("\r\r", "\r") - f = open(list, "w") - f.write(content) - f.close() - except: - pass - - def clearFinishedDownload(self, url): - idx = 0 - for x in self.downloads: - if x.url == url: - del self.downloads[idx] - break - else: - idx += 1 - - def clearFinishedDownloads(self): - tmp = [] - for download in self.downloads: - if download.status != _("Finished"): - tmp.append(download) - del self.downloads - self.downloads = tmp - - def deleteFailedDownloads(self): - tmp = [] - for download in self.downloads: - if download.status == _("Failed"): - self.removeFromLists(download.url) - else: - tmp.append(download) - del self.downloads - self.downloads = tmp - - def restartFailedDownloads(self): - tmp = [] - for download in self.downloads: - if download.status == _("Failed"): - download.download = None - download.downloading = False - download.progress = 0 - download.size = 0 - download.status = _("Waiting") - tmp.append(download) - del self.downloads - self.downloads = tmp - self.startDownloading() - -rapidshare = RS() - -############################################################################## - -class ChangedScreen(Screen): - def __init__(self, session, parent=None): - Screen.__init__(self, session, parent) - self.onLayoutFinish.append(self.setScreenTitle) - - def setScreenTitle(self): - self.setTitle(_("RS Downloader")) - -############################################################################## - -class RSConfig(ConfigListScreen, ChangedScreen): - skin = """ - - - - - - - - """ - - def __init__(self, session): - ChangedScreen.__init__(self, session) - - self["key_green"] = Label(_("Save")) - - ConfigListScreen.__init__(self, [ - getConfigListEntry(_("Download in the background:"), config.plugins.RSDownloader.onoff), - getConfigListEntry(_("Username:"), config.plugins.RSDownloader.username), - getConfigListEntry(_("Password:"), config.plugins.RSDownloader.password), - getConfigListEntry(_("Lists directory:"), config.plugins.RSDownloader.lists_directory), - getConfigListEntry(_("Downloads directory:"), config.plugins.RSDownloader.downloads_directory), - getConfigListEntry(_("Ignore download times:"), config.plugins.RSDownloader.ignore_time), - getConfigListEntry(_("Allow downloading on monday:"), config.plugins.RSDownloader.download_monday), - getConfigListEntry(_("Allow downloading on tuesday:"), config.plugins.RSDownloader.download_tuesday), - getConfigListEntry(_("Allow downloading on wednesday:"), config.plugins.RSDownloader.download_wednesday), - getConfigListEntry(_("Allow downloading on thursday:"), config.plugins.RSDownloader.download_thursday), - getConfigListEntry(_("Allow downloading on friday:"), config.plugins.RSDownloader.download_friday), - getConfigListEntry(_("Allow downloading on saturday:"), config.plugins.RSDownloader.download_saturday), - getConfigListEntry(_("Allow downloading on sunday:"), config.plugins.RSDownloader.download_sunday), - getConfigListEntry(_("Don't download before:"), config.plugins.RSDownloader.start_time), - getConfigListEntry(_("Don't download after:"), config.plugins.RSDownloader.end_time), - getConfigListEntry(_("Maximal downloads:"), config.plugins.RSDownloader.count_downloads), - getConfigListEntry(_("Write log:"), config.plugins.RSDownloader.write_log), - getConfigListEntry(_("Reconnect fritz.Box before downloading:"), config.plugins.RSDownloader.reconnect_fritz), - getConfigListEntry(_("Restart failed after 10 minutes:"), config.plugins.RSDownloader.autorestart_failed)]) - - self["actions"] = ActionMap(["OkCancelActions", "ColorActions"], {"green": self.save, "cancel": self.exit}, -1) - - def save(self): - for x in self["config"].list: - x[1].save() - self.close() - - def exit(self): - for x in self["config"].list: - x[1].cancel() - self.close() - -############################################################################## - -class RSSearch(Screen): - skin = """ - - - """ - - def __init__(self, session, searchFor): - Screen.__init__(self, session) - self.session = session - - self.searchFor = searchFor.replace(" ", "%2B") - self.maxPage = 1 - self.curPage = 1 - self.files = [] - - self["list"] = MenuList([]) - - self["actions"] = ActionMap(["OkCancelActions", "InfobarChannelSelection"], - { - "historyBack": self.previousPage, - "historyNext": self.nextPage, - "ok": self.okClicked, - "cancel": self.close - }, -1) - - self.onLayoutFinish.append(self.search) - - def okClicked(self): - if len(self.files) > 0: - idx = self["list"].getSelectedIndex() - url = self.files[idx] - try: - f = open(("%s/search.txt" % config.plugins.RSDownloader.lists_directory.value).replace("//", "/"), "a") - f.write("%s\n"%url) - f.close() - self.session.open(MessageBox, (_("Added %s to the download-list.") % url), MessageBox.TYPE_INFO) - except: - self.session.open(MessageBox, (_("Error while adding %s to the download-list!") % url), MessageBox.TYPE_ERROR) - - def search(self): - getPage("http://rapidshare-search-engine.com/index-s_submit=Search&sformval=1&s_type=0&what=1&s=%s&start=%d.html"%(self.searchFor, self.curPage)).addCallback(self.searchCallback).addErrback(self.searchError) - - def searchCallback(self, html=""): - list = [] - files = [] - - if html.__contains__("Nothing found, sorry."): - self.session.open(MessageBox, (_("Error while searching http://rapidshare-search-engine.com!\n\nError: Nothing found, sorry.")), MessageBox.TYPE_ERROR) - self.instance.setTitle(_("Nothing found, sorry.")) - else: - tmp = html - while tmp.__contains__("goPg('"): - idx = tmp.index("goPg('") - tmp = tmp[idx+6:] - idx = tmp.index("'") - pageNumber = tmp[:idx] - - try: - pageNumber = int(pageNumber) - if pageNumber > self.maxPage: - self.maxPage = pageNumber - except: - pass - - self.instance.setTitle(_("Page %d / %d. Push < > to switch the page...")%(self.curPage, self.maxPage)) - - while html.__contains__('title="Download"'): - idx = html.index('title="Download"') - html = html[idx:] - idx = html.index('value="') - html = html[idx+7:] - idx = html.index('"') - size = html[:idx] - idx = html.index('http://rapidshare.com/') - html = html[idx:] - idx = html.index('"') - url = html[:idx] - - files.append(url) - try: - urllist = url.split("/") - idx = len(urllist) - 1 - name = urllist[idx] - list.append("%s - %s"%(size, name)) - except: - list.append("%s - %s"%(size, url)) - - self.files = files - self["list"].setList(list) - - def searchError(self, error=""): - self.session.open(MessageBox, (_("Error while searching http://rapidshare-search-engine.com!\n\nError: %s")%str(error)), MessageBox.TYPE_ERROR) - - def previousPage(self): - if self.curPage > 1: - self.curPage -= 1 - self.instance.setTitle(_("Loading previous page... please wait!")) - self.search() - - def nextPage(self): - if self.curPage < self.maxPage: - self.curPage += 1 - self.instance.setTitle(_("Loading next page... please wait!")) - self.search() - -############################################################################## - -class RSLogScreen(ChangedScreen): - skin = """ - - - """ - - def __init__(self, session): - ChangedScreen.__init__(self, session) - - try: - f = open("/tmp/rapidshare.log") - log = f.read() - f.close() - except: - log = "" - self["label"] = ScrollLabel(log) - - self["actions"] = ActionMap(["WizardActions"], - { - "ok": self.close, - "back": self.close, - "up": self["label"].pageUp, - "down": self["label"].pageDown, - "left": self["label"].pageUp, - "right": self["label"].pageDown - }, -1) - -############################################################################## - -class RSContainerSelector(ChangedScreen): - skin = """ - - - """ - - def __init__(self, session, list): - ChangedScreen.__init__(self, session) - self["list"] = MenuList(list) - self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1) - - def okClicked(self): - cur = self["list"].getCurrent() - self.close(cur) - -############################################################################## - -class RSList(MenuList): - def __init__(self, list): - MenuList.__init__(self, list, False, eListboxPythonMultiContent) - self.l.setItemHeight(25) - self.l.setFont(0, gFont("Regular", 20)) - -############################################################################## - -def RSListEntry(download): - res = [(download)] - res.append(MultiContentEntryText(pos=(0, 0), size=(170, 25), font=0, text=download.name)) - res.append(MultiContentEntryText(pos=(175, 0), size=(75, 25), font=0, text="%d%s"%(download.size, "MB"), flags=RT_HALIGN_CENTER)) - res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 9), size=(84, 7), png=LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/progress_bg.png")))) - res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 10), size=(int(0.84 * download.progress), 5), png=LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/progress_small.png")))) - res.append(MultiContentEntryText(pos=(360, 0), size=(60, 25), font=0, text="%d%s"%(download.progress, "%"), flags=RT_HALIGN_CENTER)) - res.append(MultiContentEntryText(pos=(420, 0), size=(140, 25), font=0, text=download.status, flags=RT_HALIGN_RIGHT)) - return res - -############################################################################## - -class RSMain(ChangedScreen): - skin = """ - - - - - - - - - - - - - """ - - def __init__(self, session): - ChangedScreen.__init__(self, session) - self.session = session - - self["key_red"] = Label(_("Delete")) - self["key_green"] = Label(_("Search")) - self["key_yellow"] = Label(_("Add")) - self["key_blue"] = Label(_("Config")) - self["key_menu"] = Label(_("Menu")) - self["list"] = RSList([]) - - self.refreshTimer = eTimer() - self.refreshTimer.callback.append(self.updateList) - - self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "InfobarMenuActions"], - { - "mainMenu": self.menu, - "cancel": self.close, - "red": self.delete, - "green": self.search, - "yellow": self.add, - "blue": self.config - }, prio=-1) - - self.onLayoutFinish.append(self.updateList) - - def menu(self): - list = [] - #TODO: Add sort list functions - list.append((_("Delete download"), self.delete)) - list.append((_("Use search engine"), self.search)) - list.append((_("Add downloads from txt files"), self.add)) - list.append((_("Add files from container"), self.addContainer)) - list.append((_("Delete failed downloads"), self.deleteFailed)) - list.append((_("Restart failed downloads"), self.restartFailed)) - list.append((_("Clear finished downloads"), self.clearFinished)) - list.append((_("Show log"), self.showLog)) - list.append((_("Delete log"), self.deleteLog)) - list.append((_("Close plugin"), self.close)) - self.session.openWithCallback(self.menuCallback, ChoiceBox, title=_("Please choose a function..."), list=list) - - def menuCallback(self, callback=None): - if callback is not None: - callback[1]() - - def deleteFailed(self): - rapidshare.deleteFailedDownloads() - - def restartFailed(self): - rapidshare.restartFailedDownloads() - - def clearFinished(self): - rapidshare.clearFinishedDownloads() - - def showLog(self): - self.session.open(RSLogScreen) - - def deleteLog(self): - try: - remove("/tmp/rapidshare.log") - except: - pass - - def updateList(self): - list = [] - for download in rapidshare.downloads: - list.append(RSListEntry(download)) - self["list"].setList(list) - self.refreshTimer.start(2000, 1) - - def delete(self): - cur = self["list"].getCurrent() - if cur: - cur = cur[0] - if cur.status == _("Finished"): - rapidshare.clearFinishedDownload(cur.url) - else: - self.session.openWithCallback(self.deleteCallback, MessageBox, (_("Delete %s?")%cur.name)) - - def deleteCallback(self, callback): - if callback: - rapidshare.removeDownload(self["list"].getCurrent()[0].url) - self.refreshTimer.stop() - self.updateList() - - def search(self): - self.session.openWithCallback(self.searchCallback, VirtualKeyBoard, title=_("Search http://rapidshare-search-engine.com for:")) - - def searchCallback(self, callback): - if callback is not None and callback != "": - self.session.openWithCallback(self.searchScreenCallback, RSSearch, callback) - - - def searchScreenCallback(self): - self.refreshTimer.stop() - rapidshare.startDownloading() - self.updateList() - - def add(self): - self.refreshTimer.stop() - rapidshare.startDownloading() - self.updateList() - - def config(self): - self.session.openWithCallback(self.configCallback, RSConfig) - - def configCallback(self): - if config.plugins.RSDownloader.onoff.value: - rapidshare.startDownloading() - else: - for download in rapidshare.downloads: - if download.downloading: - download.stop() - self.updateList() - - def addContainer(self): - try: - file_list = listdir(config.plugins.RSDownloader.lists_directory.value) - except: - file_list = [] - list = [] - for file in file_list: - if file.lower().endswith(".ccf") or file.lower().endswith(".dlc") or file.lower().endswith(".rsdf"): - list.append(file) - list.sort() - self.session.openWithCallback(self.addContainerCallback, RSContainerSelector, list) - - def addContainerCallback(self, callback=None): - if callback: - file = "%s/%s"%(config.plugins.RSDownloader.lists_directory.value, callback) - file = file.replace("//", "/") - links = decrypt(file) - try: - f = open(("%s/%s.txt" % (config.plugins.RSDownloader.lists_directory.value, callback)).replace("//", "/"), "w") - for link in links: - if link.endswith(".html"): - link = link[:-5] - elif link.endswith(".htm"): - link = link[:-4] - f.write("%s\n"%link) - f.close() - except: - pass - self.refreshTimer.stop() - rapidshare.startDownloading() - self.updateList() - -############################################################################## - -def autostart(reason, **kwargs): - if reason == 0: - rapidshare.startDownloading() - -############################################################################## - -def main(session, **kwargs): - session.open(RSMain) - -############################################################################## - -def Plugins(**kwargs): - return [ - PluginDescriptor(where=PluginDescriptor.WHERE_AUTOSTART, fnc=autostart), - PluginDescriptor(name=_("RS Downloader"), description=_("Download files from rapidshare"), where=[PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_PLUGINMENU], icon="rs.png", fnc=main)] - +## +## RS Downloader +## by AliAbdul +## +## +from base64 import encodestring +from Components.ActionMap import ActionMap +from Components.config import config, ConfigInteger, ConfigText, ConfigYesNo, ConfigClock, ConfigSubsection, getConfigListEntry +from Components.ConfigList import ConfigListScreen +from Components.Label import Label +from Components.Language import language +from Components.MenuList import MenuList +from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest +from Components.ScrollLabel import ScrollLabel +from container.decrypt import decrypt +from enigma import eListboxPythonMultiContent, eTimer, gFont, RT_HALIGN_CENTER, RT_HALIGN_RIGHT +from os import environ, listdir, remove +from Plugins.Plugin import PluginDescriptor +from Screens.ChoiceBox import ChoiceBox +from Screens.MessageBox import MessageBox +from Screens.Screen import Screen +from Screens.VirtualKeyBoard import VirtualKeyBoard +from time import localtime, sleep, strftime, time +from Tools.Directories import resolveFilename, SCOPE_SKIN_IMAGE, SCOPE_LANGUAGE, SCOPE_PLUGINS +from Tools.Downloader import HTTPProgressDownloader +from Tools.LoadPixmap import LoadPixmap +from twisted.internet import reactor +from twisted.python import failure +from twisted.web.client import getPage +from urlparse import urlparse, urlunparse +import gettext, re, socket, urllib2 + +############################################################################## + +config.plugins.RSDownloader = ConfigSubsection() +config.plugins.RSDownloader.onoff = ConfigYesNo(default=True) +config.plugins.RSDownloader.username = ConfigText(default="", fixed_size=False) +config.plugins.RSDownloader.password = ConfigText(default="", fixed_size=False) +config.plugins.RSDownloader.lists_directory = ConfigText(default="/media/hdd/rs/lists/", fixed_size=False) +config.plugins.RSDownloader.downloads_directory = ConfigText(default="/media/hdd/rs/downloads", fixed_size=False) +config.plugins.RSDownloader.ignore_time = ConfigYesNo(default=False) +config.plugins.RSDownloader.start_time = ConfigClock(default=time()) +config.plugins.RSDownloader.end_time = ConfigClock(default=time()) +config.plugins.RSDownloader.download_monday = ConfigYesNo(default=True) +config.plugins.RSDownloader.download_tuesday = ConfigYesNo(default=True) +config.plugins.RSDownloader.download_wednesday = ConfigYesNo(default=True) +config.plugins.RSDownloader.download_thursday = ConfigYesNo(default=True) +config.plugins.RSDownloader.download_friday = ConfigYesNo(default=True) +config.plugins.RSDownloader.download_saturday = ConfigYesNo(default=True) +config.plugins.RSDownloader.download_sunday = ConfigYesNo(default=True) +config.plugins.RSDownloader.count_downloads = ConfigInteger(default=3, limits=(1, 6)) +config.plugins.RSDownloader.write_log = ConfigYesNo(default=True) +config.plugins.RSDownloader.reconnect_fritz = ConfigYesNo(default=False) +config.plugins.RSDownloader.autorestart_failed = ConfigYesNo(default=False) + +############################################################################## + +def localeInit(): + lang = language.getLanguage() + environ["LANGUAGE"] = lang[:2] + gettext.bindtextdomain("enigma2", resolveFilename(SCOPE_LANGUAGE)) + gettext.textdomain("enigma2") + gettext.bindtextdomain("RSDownloader", "%s%s"%(resolveFilename(SCOPE_PLUGINS), "Extensions/RSDownloader/locale/")) + +def _(txt): + t = gettext.dgettext("RSDownloader", txt) + if t == txt: + t = gettext.gettext(txt) + return t + +localeInit() +language.addCallback(localeInit) + +############################################################################## + +def writeLog(message): + if config.plugins.RSDownloader.write_log.value: + try: + f = open("/tmp/rapidshare.log", "a") + f.write(strftime("%c", localtime(time())) + " - " + message + "\n") + f.close() + except: + pass + +############################################################################## + +def _parse(url): + url = url.strip() + parsed = urlparse(url) + scheme = parsed[0] + path = urlunparse(('','') + parsed[2:]) + host, port = parsed[1], 80 + if '@' in host: + username, host = host.split('@') + if ':' in username: + username, password = username.split(':') + else: + password = "" + else: + username = "" + password = "" + if ':' in host: + host, port = host.split(':') + port = int(port) + if path == "": + path = "/" + return scheme, host, port, path, username, password + +class ProgressDownload: + def __init__(self, url, outputfile, contextFactory=None, *args, **kwargs): + scheme, host, port, path, username, password = _parse(url) + if username and password: + url = scheme + '://' + host + ':' + str(port) + path + basicAuth = encodestring("%s:%s"%(username, password)) + authHeader = "Basic " + basicAuth.strip() + AuthHeaders = {"Authorization": authHeader} + if kwargs.has_key("headers"): + kwargs["headers"].update(AuthHeaders) + else: + kwargs["headers"] = AuthHeaders + self.factory = HTTPProgressDownloader(url, outputfile, *args, **kwargs) + self.connection = reactor.connectTCP(host, port, self.factory) + + def start(self): + return self.factory.deferred + + def stop(self): + self.connection.disconnect() + + def addProgress(self, progress_callback): + self.factory.progress_callback = progress_callback + +############################################################################## + +def get(url): + try: + data = urllib2.urlopen(url) + return data.read() + except: + return "" + +def post(url, data): + try: + return urllib2.urlopen(url, data).read() + except: + return "" + +def matchGet(rex, string): + match = re.search(rex, string) + if match: + if len(match.groups()) == 0: + return string[match.span()[0]:match.span()[1]] + if len(match.groups()) == 1: + return match.groups()[0] + else: + return False + +############################################################################## + +def reconnect(host='fritz.box', port=49000): + http_body = '\r\n'.join(( + '', + '', + ' ', + ' ', + ' ', + '')) + http_data = '\r\n'.join(( + 'POST /upnp/control/WANIPConn1 HTTP/1.1', + 'Host: %s:%d'%(host, port), + 'SoapAction: urn:schemas-upnp-org:service:WANIPConnection:1#ForceTermination', + 'Content-Type: text/xml; charset="utf-8"', + 'Content-Length: %d'%len(http_body), + '', + http_body)) + try: + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.connect((host, port)) + s.send(http_data) + s.close() + except: + pass + +############################################################################## + +class RSDownload: + def __init__(self, url): + writeLog("Adding: %s"%url) + self.url = url + self.download = None + self.downloading = False + self.progress = 0 + self.size = 0 + self.status = _("Waiting") + self.name = self.url.split("/")[-1] + + self.freeDownloadUrl = "" + self.freeDownloadTimer = eTimer() + self.freeDownloadTimer.callback.append(self.freeDownloadStart) + self.checkTimer = eTimer() + self.checkTimer.callback.append(self.doCheckTimer) + self.restartFailedTimer = eTimer() + self.restartFailedTimer.callback.append(self.restartFailedCheck) + + self.finishCallbacks = [] + + def start(self): + writeLog("Downloading: %s"%self.url) + self.downloading = True + self.progress = 0 + self.size = 0 + username = config.plugins.RSDownloader.username.value + password = config.plugins.RSDownloader.password.value + if self.url.__contains__("rapidshare.com") and username == "" and password == "": + writeLog("Free RS-Download: %s"%self.url) + self.status = _("Checking") + if config.plugins.RSDownloader.reconnect_fritz.value: + reconnect() + sleep(3) + data = get(self.url) + url = matchGet(']+action="([^"]+)', data) + if not url: + writeLog("Failed: %s"%self.url) + self.httpFailed(True, "Failed to get download page url: %s"%self.url) + else: + data = post(url, "dl.start=Free") + seconds = matchGet('var c=([0-9]+)', data) + if not seconds: + self.httpFailed(True, "Failed to get download page url: %s"%self.url) + else: + writeLog("Free RS-download... must wait %s seconds: %s"%(seconds, self.url)) + self.status = "%s %s"%(_("Waiting"), seconds) + url = matchGet('"dlf" action="([^"]+)', data) + if not url: + self.httpFailed(True, "Failed to get download page url: %s"%self.url) + else: + self.freeDownloadUrl = url + self.freeDownloadTimer.start((int(seconds) + 2) * 1000, 1) + elif self.url.__contains__("youtube.com"): + writeLog("Getting youtube video link: %s"%self.url) + self.status = _("Checking") + downloadLink = self.getYoutubeDownloadLink() + if downloadLink: + self.status = _("Downloading") + writeLog("Downloading video: %s"%downloadLink) + req = urllib2.Request(downloadLink) + url_handle = urllib2.urlopen(req) + headers = url_handle.info() + if headers.getheader("content-type") == "video/mp4": + ext = "mp4" + else: + ext = "flv" + self.download = ProgressDownload(downloadLink, ("%s/%s.%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name, ext)).replace("//", "/")) + self.download.addProgress(self.httpProgress) + self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed) + else: + self.httpFailed(True, "Failed to get video url: %s"%self.url) + else: + if self.url.__contains__("rapidshare.com"): + url = self.url.replace("http://", "http://" + username + ":" + password + "@") + else: + url = self.url + self.status = _("Downloading") + self.download = ProgressDownload(url, ("%s/%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name)).replace("//", "/").replace(".html", "")) + self.download.addProgress(self.httpProgress) + self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed) + + def freeDownloadStart(self): + self.status = _("Downloading") + self.download = ProgressDownload(self.freeDownloadUrl, ("%s/%s"%(config.plugins.RSDownloader.downloads_directory.value, self.name)).replace("//", "/").replace(".html", "")) + self.download.addProgress(self.httpProgress) + self.download.start().addCallback(self.httpFinished).addErrback(self.httpFailed) + + def stop(self): + self.progress = 0 + self.downloading = False + self.status = _("Waiting") + if self.download: + writeLog("Stopping download: %s"%self.url) + self.download.stop() + + def httpProgress(self, recvbytes, totalbytes): + if self.size == 0: + self.size = int((totalbytes / 1024) / 1024) + self.progress = int(100.0 * float(recvbytes) / float(totalbytes)) + if self.progress == 100: + writeLog("Finished: %s"%self.url) + self.status = _("Finished") + self.execFinishCallbacks() + + def httpFinished(self, string=""): + if string is not None: + writeLog("Failed: %s"%self.url) + writeLog("Error: %s"%string) + self.status = _("Checking") + self.checkTimer.start(10000, 1) + + def doCheckTimer(self): + if self.size == 0: + self.status = _("Failed") + if config.plugins.RSDownloader.autorestart_failed.value: + self.restartFailedTimer.start(10000*60, 1) + elif self.progress == 100: + self.status = _("Finished") + self.downloading = False + self.execFinishCallbacks() + + def restartFailedCheck(self): + if self.status == _("Failed"): # check if user didn't restart already + self.download = None + self.status = _("Waiting") + + def execFinishCallbacks(self): + for x in self.finishCallbacks: + x() + + def httpFailed(self, failure=None, error=""): + if failure: + if error == "": + error = failure.getErrorMessage() + if error != "" and not error.startswith("[Errno 2]"): + writeLog("Failed: %s"%self.url) + writeLog("Error: %s"%error) + self.status = _("Checking") + self.checkTimer.start(10000, 1) + + def getYoutubeDownloadLink(self): + mrl = None + html = get(self.url) + if html != "": + isHDAvailable = False + video_id = None + t = None + reonecat = re.compile(r'(.+?)', re.DOTALL) + titles = reonecat.findall(html) + if titles: + self.name = titles[0] + if self.name.startswith("YouTube - "): + self.name = (self.name[10:]).replace("&", "&") + if html.__contains__("isHDAvailable = true"): + isHDAvailable = True + for line in html.split('\n'): + if 'swfArgs' in line: + line = line.strip().split() + x = 0 + for thing in line: + if 'video_id' in thing: + video_id = line[x+1][1:-2] + elif '"t":' == thing: + t = line[x+1][1:-2] + x += 1 + if video_id and t: + if isHDAvailable == True: + mrl = "http://www.youtube.com/get_video?video_id=%s&t=%s&fmt=22" % (video_id, t) + else: + mrl = "http://www.youtube.com/get_video?video_id=%s&t=%s&fmt=18" % (video_id, t) + return mrl + +############################################################################## + +class RS: + def __init__(self): + self.downloads = [] + self.checkTimer = eTimer() + self.checkTimer.callback.append(self.startDownloading) + self.checkTimer.start(5000*60, False) + + def mayDownload(self): + if config.plugins.RSDownloader.onoff.value == False: + writeLog("RS Downloader is turned off...") + return False + elif config.plugins.RSDownloader.ignore_time.value: + return True + else: + start = config.plugins.RSDownloader.start_time.value + end = config.plugins.RSDownloader.end_time.value + t = localtime() + weekday = t[6] + if weekday == 0 and config.plugins.RSDownloader.download_monday.value == False: + return False + elif weekday == 1 and config.plugins.RSDownloader.download_tuesday.value == False: + return False + elif weekday == 2 and config.plugins.RSDownloader.download_wednesday.value == False: + return False + elif weekday == 3 and config.plugins.RSDownloader.download_thursday.value == False: + return False + elif weekday == 4 and config.plugins.RSDownloader.download_friday.value == False: + return False + elif weekday == 5 and config.plugins.RSDownloader.download_saturday.value == False: + return False + elif weekday == 6 and config.plugins.RSDownloader.download_sunday.value == False: + return False + else: + hour_now = t[3] + minute_now = t[4] + hour_start = start[0] + minute_start = start[1] + hour_end = end[0] + minute_end = end[1] + if start == end: # Same start and end-time + return True + elif hour_end < hour_start: # Different days!!! + if hour_now > hour_start or hour_now < hour_end: + return True + elif hour_now == hour_start and minute_now > minute_start: + return True + elif hour_now == hour_end and minute_now < minute_end: + return True + else: + return False + elif hour_now > hour_start and hour_now < hour_end: # Same day... + return True + elif hour_now == hour_start and minute_now > minute_start: # Same day, same start-hour... + return True + elif hour_now == hour_end and minute_now < minute_end: # Same day, same end-hour... + return True + else: + return False + + def allDownloadsFinished(self): + allDone = True + for download in self.downloads: + if (download.status != _("Failed")) and (download.status != _("Finished")): + allDone = False + return allDone + + def startDownloading(self): + if self.mayDownload() == True: + if self.allDownloadsFinished() == True: + self.readLists() + downloadCount = 0 + for download in self.downloads: + if download.downloading == True: + downloadCount += 1 # Count the downloaded files + if config.plugins.RSDownloader.username.value == "" and config.plugins.RSDownloader.password.value == "": + if downloadCount < 1: # Allow one download if without account + for download in self.downloads: + if download.downloading == False and download.status.startswith(_("Waiting")): + download.start() # Start first download in the list + break + else: + mayDownloadCount = config.plugins.RSDownloader.count_downloads.value - downloadCount + for download in self.downloads: + if download.downloading == False: + if mayDownloadCount > 0 and download.status == _("Waiting"): + download.start() + mayDownloadCount -= 1 + + def addDownload(self, url): + error = False + for download in self.downloads: + if download.url == url: + error = True + if error: + return False + else: + download = RSDownload(url) + download.finishCallbacks.append(self.cleanLists) + self.downloads.append(download) + return True + + def readLists(self): + writeLog("Reading all lists...") + path = config.plugins.RSDownloader.lists_directory.value + if not path.endswith("/"): + path = path + "/" + writeLog("Directory: " + path) + try: + file_list = listdir(path) + writeLog("Count of lists: " + str(len(file_list))) + except: + file_list = [] + writeLog("Could not find any list!") + for x in file_list: + list = path + x + if list.endswith(".txt"): + try: + writeLog("Reading list %s..."%list) + f = open(list, "r") + count = 0 + for l in f: + if l.startswith("http://"): + if (self.addDownload(l.replace("\n", "").replace("\r", ""))) == True: + count += 1 + f.close() + if count == 0: + writeLog("Empty list or downloads already in download list: %s"%list) + else: + writeLog("Added %d files from list %s..."%(count, list)) + except: + writeLog("Error while reading list %s!"%list) + else: + writeLog("No *.txt file: %s!"%list) + + def cleanLists(self): + writeLog("Cleaning lists...") + path = config.plugins.RSDownloader.lists_directory.value + if not path.endswith("/"): + path = path + "/" + try: + file_list = listdir(path) + except: + file_list = [] + for x in file_list: + list = path + x + try: + f = open(list, "r") + content = f.read() + f.close() + for download in self.downloads: + if download.status == _("Finished") and content.__contains__(download.url): + content = content.replace(download.url, "") + content = content.replace("\n\n", "\n").replace("\r\r", "\r") + f = open(list, "w") + f.write(content) + f.close() + except: + writeLog("Error while cleaning list %s!"%list) + self.startDownloading() + + def removeDownload(self, url): + tmp = [] + for download in self.downloads: + if download.url == url: + download.stop() + else: + tmp.append(download) + del self.downloads + self.downloads = tmp + self.removeFromLists(url) + + def removeFromLists(self, url): + path = config.plugins.RSDownloader.lists_directory.value + if not path.endswith("/"): + path = path + "/" + try: + file_list = listdir(path) + except: + file_list = [] + for x in file_list: + list = path + x + try: + f = open(list, "r") + content = f.read() + f.close() + if content.__contains__(url): + content = content.replace(url, "") + content = content.replace("\n\n", "\n").replace("\r\r", "\r") + f = open(list, "w") + f.write(content) + f.close() + except: + pass + + def clearFinishedDownload(self, url): + idx = 0 + for x in self.downloads: + if x.url == url: + del self.downloads[idx] + break + else: + idx += 1 + + def clearFinishedDownloads(self): + tmp = [] + for download in self.downloads: + if download.status != _("Finished"): + tmp.append(download) + del self.downloads + self.downloads = tmp + + def deleteFailedDownloads(self): + tmp = [] + for download in self.downloads: + if download.status == _("Failed"): + self.removeFromLists(download.url) + else: + tmp.append(download) + del self.downloads + self.downloads = tmp + + def restartFailedDownloads(self): + tmp = [] + for download in self.downloads: + if download.status == _("Failed"): + download.download = None + download.downloading = False + download.progress = 0 + download.size = 0 + download.status = _("Waiting") + tmp.append(download) + del self.downloads + self.downloads = tmp + self.startDownloading() + +rapidshare = RS() + +############################################################################## + +class ChangedScreen(Screen): + def __init__(self, session, parent=None): + Screen.__init__(self, session, parent) + self.onLayoutFinish.append(self.setScreenTitle) + + def setScreenTitle(self): + self.setTitle(_("RS Downloader")) + +############################################################################## + +class RSConfig(ConfigListScreen, ChangedScreen): + skin = """ + + + + + + + + """ + + def __init__(self, session): + ChangedScreen.__init__(self, session) + + self["key_green"] = Label(_("Save")) + + ConfigListScreen.__init__(self, [ + getConfigListEntry(_("Download in the background:"), config.plugins.RSDownloader.onoff), + getConfigListEntry(_("Username:"), config.plugins.RSDownloader.username), + getConfigListEntry(_("Password:"), config.plugins.RSDownloader.password), + getConfigListEntry(_("Lists directory:"), config.plugins.RSDownloader.lists_directory), + getConfigListEntry(_("Downloads directory:"), config.plugins.RSDownloader.downloads_directory), + getConfigListEntry(_("Ignore download times:"), config.plugins.RSDownloader.ignore_time), + getConfigListEntry(_("Allow downloading on monday:"), config.plugins.RSDownloader.download_monday), + getConfigListEntry(_("Allow downloading on tuesday:"), config.plugins.RSDownloader.download_tuesday), + getConfigListEntry(_("Allow downloading on wednesday:"), config.plugins.RSDownloader.download_wednesday), + getConfigListEntry(_("Allow downloading on thursday:"), config.plugins.RSDownloader.download_thursday), + getConfigListEntry(_("Allow downloading on friday:"), config.plugins.RSDownloader.download_friday), + getConfigListEntry(_("Allow downloading on saturday:"), config.plugins.RSDownloader.download_saturday), + getConfigListEntry(_("Allow downloading on sunday:"), config.plugins.RSDownloader.download_sunday), + getConfigListEntry(_("Don't download before:"), config.plugins.RSDownloader.start_time), + getConfigListEntry(_("Don't download after:"), config.plugins.RSDownloader.end_time), + getConfigListEntry(_("Maximal downloads:"), config.plugins.RSDownloader.count_downloads), + getConfigListEntry(_("Write log:"), config.plugins.RSDownloader.write_log), + getConfigListEntry(_("Reconnect fritz.Box before downloading:"), config.plugins.RSDownloader.reconnect_fritz), + getConfigListEntry(_("Restart failed after 10 minutes:"), config.plugins.RSDownloader.autorestart_failed)]) + + self["actions"] = ActionMap(["OkCancelActions", "ColorActions"], {"green": self.save, "cancel": self.exit}, -1) + + def save(self): + for x in self["config"].list: + x[1].save() + self.close() + + def exit(self): + for x in self["config"].list: + x[1].cancel() + self.close() + +############################################################################## + +class RSSearch(Screen): + skin = """ + + + """ + + def __init__(self, session, searchFor): + Screen.__init__(self, session) + self.session = session + + self.searchFor = searchFor.replace(" ", "%2B") + self.maxPage = 1 + self.curPage = 1 + self.files = [] + + self["list"] = MenuList([]) + + self["actions"] = ActionMap(["OkCancelActions", "InfobarChannelSelection"], + { + "historyBack": self.previousPage, + "historyNext": self.nextPage, + "ok": self.okClicked, + "cancel": self.close + }, -1) + + self.onLayoutFinish.append(self.search) + + def okClicked(self): + if len(self.files) > 0: + idx = self["list"].getSelectedIndex() + url = self.files[idx] + try: + f = open(("%s/search.txt" % config.plugins.RSDownloader.lists_directory.value).replace("//", "/"), "a") + f.write("%s\n"%url) + f.close() + self.session.open(MessageBox, (_("Added %s to the download-list.") % url), MessageBox.TYPE_INFO) + except: + self.session.open(MessageBox, (_("Error while adding %s to the download-list!") % url), MessageBox.TYPE_ERROR) + + def search(self): + getPage("http://rapidshare-search-engine.com/index-s_submit=Search&sformval=1&s_type=0&what=1&s=%s&start=%d.html"%(self.searchFor, self.curPage)).addCallback(self.searchCallback).addErrback(self.searchError) + + def searchCallback(self, html=""): + list = [] + files = [] + + if html.__contains__("Nothing found, sorry."): + self.session.open(MessageBox, (_("Error while searching http://rapidshare-search-engine.com!\n\nError: Nothing found, sorry.")), MessageBox.TYPE_ERROR) + self.instance.setTitle(_("Nothing found, sorry.")) + else: + tmp = html + while tmp.__contains__("goPg('"): + idx = tmp.index("goPg('") + tmp = tmp[idx+6:] + idx = tmp.index("'") + pageNumber = tmp[:idx] + + try: + pageNumber = int(pageNumber) + if pageNumber > self.maxPage: + self.maxPage = pageNumber + except: + pass + + self.instance.setTitle(_("Page %d / %d. Push < > to switch the page...")%(self.curPage, self.maxPage)) + + while html.__contains__('title="Download"'): + idx = html.index('title="Download"') + html = html[idx:] + idx = html.index('value="') + html = html[idx+7:] + idx = html.index('"') + size = html[:idx] + idx = html.index('http://rapidshare.com/') + html = html[idx:] + idx = html.index('"') + url = html[:idx] + + files.append(url) + try: + urllist = url.split("/") + idx = len(urllist) - 1 + name = urllist[idx] + list.append("%s - %s"%(size, name)) + except: + list.append("%s - %s"%(size, url)) + + self.files = files + self["list"].setList(list) + + def searchError(self, error=""): + self.session.open(MessageBox, (_("Error while searching http://rapidshare-search-engine.com!\n\nError: %s")%str(error)), MessageBox.TYPE_ERROR) + + def previousPage(self): + if self.curPage > 1: + self.curPage -= 1 + self.instance.setTitle(_("Loading previous page... please wait!")) + self.search() + + def nextPage(self): + if self.curPage < self.maxPage: + self.curPage += 1 + self.instance.setTitle(_("Loading next page... please wait!")) + self.search() + +############################################################################## + +class RSLogScreen(ChangedScreen): + skin = """ + + + """ + + def __init__(self, session): + ChangedScreen.__init__(self, session) + + try: + f = open("/tmp/rapidshare.log") + log = f.read() + f.close() + except: + log = "" + self["label"] = ScrollLabel(log) + + self["actions"] = ActionMap(["WizardActions"], + { + "ok": self.close, + "back": self.close, + "up": self["label"].pageUp, + "down": self["label"].pageDown, + "left": self["label"].pageUp, + "right": self["label"].pageDown + }, -1) + +############################################################################## + +class RSContainerSelector(ChangedScreen): + skin = """ + + + """ + + def __init__(self, session, list): + ChangedScreen.__init__(self, session) + self["list"] = MenuList(list) + self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1) + + def okClicked(self): + cur = self["list"].getCurrent() + self.close(cur) + +############################################################################## + +class RSList(MenuList): + def __init__(self, list): + MenuList.__init__(self, list, False, eListboxPythonMultiContent) + self.l.setItemHeight(25) + self.l.setFont(0, gFont("Regular", 20)) + +############################################################################## + +def RSListEntry(download): + res = [(download)] + res.append(MultiContentEntryText(pos=(0, 0), size=(170, 25), font=0, text=download.name)) + res.append(MultiContentEntryText(pos=(175, 0), size=(75, 25), font=0, text="%d%s"%(download.size, "MB"), flags=RT_HALIGN_CENTER)) + res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 9), size=(84, 7), png=LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/progress_bg.png")))) + res.append(MultiContentEntryPixmapAlphaTest(pos=(260, 10), size=(int(0.84 * download.progress), 5), png=LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/progress_small.png")))) + res.append(MultiContentEntryText(pos=(360, 0), size=(60, 25), font=0, text="%d%s"%(download.progress, "%"), flags=RT_HALIGN_CENTER)) + res.append(MultiContentEntryText(pos=(420, 0), size=(140, 25), font=0, text=download.status, flags=RT_HALIGN_RIGHT)) + return res + +############################################################################## + +class RSMain(ChangedScreen): + skin = """ + + + + + + + + + + + + + """ + + def __init__(self, session): + ChangedScreen.__init__(self, session) + self.session = session + + self["key_red"] = Label(_("Delete")) + self["key_green"] = Label(_("Search")) + self["key_yellow"] = Label(_("Add")) + self["key_blue"] = Label(_("Config")) + self["key_menu"] = Label(_("Menu")) + self["list"] = RSList([]) + + self.refreshTimer = eTimer() + self.refreshTimer.callback.append(self.updateList) + + self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "InfobarMenuActions"], + { + "mainMenu": self.menu, + "cancel": self.close, + "red": self.delete, + "green": self.search, + "yellow": self.add, + "blue": self.config + }, prio=-1) + + self.onLayoutFinish.append(self.updateList) + + def menu(self): + list = [] + #TODO: Add sort list functions + list.append((_("Delete download"), self.delete)) + list.append((_("Use search engine"), self.search)) + list.append((_("Add downloads from txt files"), self.add)) + list.append((_("Add files from container"), self.addContainer)) + list.append((_("Delete failed downloads"), self.deleteFailed)) + list.append((_("Restart failed downloads"), self.restartFailed)) + list.append((_("Clear finished downloads"), self.clearFinished)) + list.append((_("Show log"), self.showLog)) + list.append((_("Delete log"), self.deleteLog)) + list.append((_("Close plugin"), self.close)) + self.session.openWithCallback(self.menuCallback, ChoiceBox, title=_("Please choose a function..."), list=list) + + def menuCallback(self, callback=None): + if callback is not None: + callback[1]() + + def deleteFailed(self): + rapidshare.deleteFailedDownloads() + + def restartFailed(self): + rapidshare.restartFailedDownloads() + + def clearFinished(self): + rapidshare.clearFinishedDownloads() + + def showLog(self): + self.session.open(RSLogScreen) + + def deleteLog(self): + try: + remove("/tmp/rapidshare.log") + except: + pass + + def updateList(self): + list = [] + for download in rapidshare.downloads: + list.append(RSListEntry(download)) + self["list"].setList(list) + self.refreshTimer.start(2000, 1) + + def delete(self): + cur = self["list"].getCurrent() + if cur: + cur = cur[0] + if cur.status == _("Finished"): + rapidshare.clearFinishedDownload(cur.url) + else: + self.session.openWithCallback(self.deleteCallback, MessageBox, (_("Delete %s?")%cur.name)) + + def deleteCallback(self, callback): + if callback: + rapidshare.removeDownload(self["list"].getCurrent()[0].url) + self.refreshTimer.stop() + self.updateList() + + def search(self): + self.session.openWithCallback(self.searchCallback, VirtualKeyBoard, title=_("Search http://rapidshare-search-engine.com for:")) + + def searchCallback(self, callback): + if callback is not None and callback != "": + self.session.openWithCallback(self.searchScreenCallback, RSSearch, callback) + + + def searchScreenCallback(self): + self.refreshTimer.stop() + rapidshare.startDownloading() + self.updateList() + + def add(self): + self.refreshTimer.stop() + rapidshare.startDownloading() + self.updateList() + + def config(self): + self.session.openWithCallback(self.configCallback, RSConfig) + + def configCallback(self): + if config.plugins.RSDownloader.onoff.value: + rapidshare.startDownloading() + else: + for download in rapidshare.downloads: + if download.downloading: + download.stop() + self.updateList() + + def addContainer(self): + try: + file_list = listdir(config.plugins.RSDownloader.lists_directory.value) + except: + file_list = [] + list = [] + for file in file_list: + if file.lower().endswith(".ccf") or file.lower().endswith(".dlc") or file.lower().endswith(".rsdf"): + list.append(file) + list.sort() + self.session.openWithCallback(self.addContainerCallback, RSContainerSelector, list) + + def addContainerCallback(self, callback=None): + if callback: + file = "%s/%s"%(config.plugins.RSDownloader.lists_directory.value, callback) + file = file.replace("//", "/") + links = decrypt(file) + try: + f = open(("%s/%s.txt" % (config.plugins.RSDownloader.lists_directory.value, callback)).replace("//", "/"), "w") + for link in links: + if link.endswith(".html"): + link = link[:-5] + elif link.endswith(".htm"): + link = link[:-4] + f.write("%s\n"%link) + f.close() + except: + pass + self.refreshTimer.stop() + rapidshare.startDownloading() + self.updateList() + +############################################################################## + +def autostart(reason, **kwargs): + if reason == 0: + rapidshare.startDownloading() + +############################################################################## + +def main(session, **kwargs): + session.open(RSMain) + +############################################################################## + +def Plugins(**kwargs): + return [ + PluginDescriptor(where=PluginDescriptor.WHERE_AUTOSTART, fnc=autostart), + PluginDescriptor(name=_("RS Downloader"), description=_("Download files from rapidshare"), where=[PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_PLUGINMENU], icon="rs.png", fnc=main)] + diff --git a/rsdownloader/src/sslfactory.py b/rsdownloader/src/sslfactory.py deleted file mode 100644 index 14903cd..0000000 --- a/rsdownloader/src/sslfactory.py +++ /dev/null @@ -1,89 +0,0 @@ -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the -# Free Software Foundation, Inc., -# 59 Temple Place, Suite 330, -# Boston, MA 02111-1307 USA - -# This file is part of urlgrabber, a high-level cross-protocol url-grabber - -import httplib -import urllib2 - -try: - from M2Crypto import SSL - from M2Crypto import httpslib - from M2Crypto import m2urllib2 - - have_m2crypto = True -except ImportError: - have_m2crypto = False - -DEBUG = None - -if have_m2crypto: - - class M2SSLFactory: - - def __init__(self, ssl_ca_cert, ssl_context): - self.ssl_context = self._get_ssl_context(ssl_ca_cert, ssl_context) - - def _get_ssl_context(self, ssl_ca_cert, ssl_context): - """ - Create an ssl context using the CA cert file or ssl context. - - The CA cert is used first if it was passed as an option. If not, - then the supplied ssl context is used. If no ssl context was supplied, - None is returned. - """ - if ssl_ca_cert: - context = SSL.Context() - context.load_verify_locations(ssl_ca_cert) - context.set_verify(SSL.verify_peer, -1) - return context - else: - return ssl_context - - def create_https_connection(self, host, response_class = None): - connection = httplib.HTTPSConnection(host, self.ssl_context) - if response_class: - connection.response_class = response_class - return connection - - def create_opener(self, *handlers): - return m2urllib2.build_opener(self.ssl_context, *handlers) - - -class SSLFactory: - - def create_https_connection(self, host, response_class = None): - connection = httplib.HTTPSConnection(host) - if response_class: - connection.response_class = response_class - return connection - - def create_opener(self, *handlers): - return urllib2.build_opener(*handlers) - - - -def get_factory(ssl_ca_cert = None, ssl_context = None): - """ Return an SSLFactory, based on if M2Crypto is available. """ - if have_m2crypto: - return M2SSLFactory(ssl_ca_cert, ssl_context) - else: - # Log here if someone provides the args but we don't use them. - if ssl_ca_cert or ssl_context: - if DEBUG: - DEBUG.warning("SSL arguments supplied, but M2Crypto is not available. " - "Using Python SSL.") - return SSLFactory() \ No newline at end of file