summaryrefslogtreecommitdiffstats
path: root/module
diff options
context:
space:
mode:
authorGravatar cp1 <christopherpool1@googlemail.com> 2009-06-11 16:50:48 +0200
committerGravatar cp1 <christopherpool1@googlemail.com> 2009-06-11 16:50:48 +0200
commitfd6e286ee089cf75276f2581471aba254d69fcb5 (patch)
tree1f91e2063f3182d172d7c7f70c4fdb98b407a60b /module
parentadd wxversion selector to ensure wxWidgets 2.8 is used (diff)
downloadpyload-fd6e286ee089cf75276f2581471aba254d69fcb5.tar.xz
correcting false identation (tabs to spaces)
Diffstat (limited to 'module')
-rw-r--r--module/Py_Load_File.py5
-rw-r--r--module/__init__.py1
-rw-r--r--module/download_thread.py28
-rw-r--r--module/network/Keepalive.py46
-rw-r--r--module/network/MultipartPostHandler.py11
-rwxr-xr-xmodule/network/Request.py34
-rw-r--r--module/network/sslfactory.py12
-rw-r--r--module/remote/ClientSocket.py35
-rw-r--r--module/remote/RequestHandler.py11
-rw-r--r--module/remote/SocketServer.py6
-rw-r--r--module/thread_list.py43
11 files changed, 112 insertions, 120 deletions
diff --git a/module/Py_Load_File.py b/module/Py_Load_File.py
index d0a4c4591..8779a18fd 100644
--- a/module/Py_Load_File.py
+++ b/module/Py_Load_File.py
@@ -24,7 +24,7 @@ class PyLoadFile:
return plugin
return "Plugin"
-
+
def prepareDownload(self):
if self.parent.config['useproxy']:
@@ -36,5 +36,4 @@ class PyLoadFile:
self.status.filename = self.plugin.get_file_name()
self.status.waituntil = self.plugin.time_plus_wait
self.status.url = self.plugin.get_file_url()
- self.status.want_reconnect = self.plugin.want_reconnect
-
+ self.status.want_reconnect = self.plugin.want_reconnect \ No newline at end of file
diff --git a/module/__init__.py b/module/__init__.py
index 8d1c8b69c..e69de29bb 100644
--- a/module/__init__.py
+++ b/module/__init__.py
@@ -1 +0,0 @@
-
diff --git a/module/download_thread.py b/module/download_thread.py
index 5313a7d34..3ad2ba85d 100644
--- a/module/download_thread.py
+++ b/module/download_thread.py
@@ -1,6 +1,6 @@
#!/usr/bin/python
-# -*- coding: utf-8 -*-
-#
+# -*- coding: utf-8 -*-
+#
#Copyright (C) 2009 sp00b, sebnapi
#
#This program is free software; you can redistribute it and/or modify
@@ -34,15 +34,15 @@ class Status(object):
self.exists = False
self.waituntil = None
self.want_reconnect = False
-
+
def get_ETA(self):
return self.pyfile.plugin.req.get_ETA()
def get_speed(self):
return self.pyfile.plugin.req.get_speed()
def kB_left(self):
return self.pyfile.plugins.req.kB_left()
-
-
+
+
class Download_Thread(threading.Thread):
def __init__(self, parent):
threading.Thread.__init__(self)
@@ -50,9 +50,9 @@ class Download_Thread(threading.Thread):
self.parent = parent
self.setDaemon(True)
self.loadedPyFile = None
-
+
self.start()
-
+
def run(self):
while (not self.shutdown):
if self.parent.py_load_files:
@@ -76,24 +76,24 @@ class Download_Thread(threading.Thread):
if not status.exists:
raise "FileDontExists" #i know its deprecated, who cares^^
-
- status.type = "waiting"
+
+ status.type = "waiting"
while (time() < status.waituntil):
if self.parent.init_reconnect() or self.parent.reconnecting:
status.type = "reconnected"
- status.want_reconnect = False
+ status.want_reconnect = False
return False
sleep(1)
status.want_reconnect = False
-
+
status.type = "downloading"
-
+
pyfile.plugin.proceed(status.url, pyfile.download_folder + "/" + status.filename)
status.type = "finished"
#startet downloader
- #urllib.urlretrieve(status.url, pyfile.download_folder + "/" + status.filename, status)
- #self.shutdown = True
+ #urllib.urlretrieve(status.url, pyfile.download_folder + "/" + status.filename, status)
+ #self.shutdown = True \ No newline at end of file
diff --git a/module/network/Keepalive.py b/module/network/Keepalive.py
index 68abe087d..dbf4d94cb 100644
--- a/module/network/Keepalive.py
+++ b/module/network/Keepalive.py
@@ -9,9 +9,9 @@
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the
-# Free Software Foundation, Inc.,
-# 59 Temple Place, Suite 330,
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc.,
+# 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
# This file is part of urlgrabber, a high-level cross-protocol url-grabber
@@ -24,7 +24,7 @@
>>> keepalive_handler = HTTPHandler()
>>> opener = urllib2.build_opener(keepalive_handler)
>>> urllib2.install_opener(opener)
->>>
+>>>
>>> fo = urllib2.urlopen('http://www.python.org')
If a connection to a given host is requested, and all of the existing
@@ -113,7 +113,7 @@ import sslfactory
import sys
if sys.version_info < (2, 4): HANDLE_ERRORS = 1
else: HANDLE_ERRORS = 0
-
+
class ConnectionManager:
"""
The connection manager must be able to:
@@ -153,7 +153,7 @@ class ConnectionManager:
def set_ready(self, connection, ready):
try: self._readymap[connection] = ready
except KeyError: pass
-
+
def get_ready_conn(self, host):
conn = None
self._lock.acquire()
@@ -177,7 +177,7 @@ class ConnectionManager:
class KeepAliveHandler:
def __init__(self):
self._cm = ConnectionManager()
-
+
#### Connection Management
def open_connections(self):
"""return a list of connected hosts and the number of connections
@@ -191,14 +191,14 @@ class KeepAliveHandler:
for h in self._cm.get_all(host):
self._cm.remove(h)
h.close()
-
+
def close_all(self):
"""close all open connections"""
for host, conns in self._cm.get_all().items():
for h in conns:
self._cm.remove(h)
h.close()
-
+
def _request_closed(self, request, host, connection):
"""tells us that this request is now closed and the the
connection is ready for another request"""
@@ -207,7 +207,7 @@ class KeepAliveHandler:
def _remove_connection(self, host, connection, close=0):
if close: connection.close()
self._cm.remove(connection)
-
+
#### Transaction Execution
def do_open(self, req):
host = req.get_host()
@@ -238,7 +238,7 @@ class KeepAliveHandler:
r = h.getresponse()
except (socket.error, httplib.HTTPException), err:
raise urllib2.URLError(err)
-
+
# if not a persistent connection, don't try to reuse it
if r.will_close: self._cm.remove(h)
@@ -250,7 +250,7 @@ class KeepAliveHandler:
r.code = r.status
r.headers = r.msg
r.msg = r.reason
-
+
if r.status == 200 or not HANDLE_ERRORS:
return r
else:
@@ -286,7 +286,7 @@ class KeepAliveHandler:
self._cm.remove(h)
h.close()
raise
-
+
if r is None or r.version == 9:
# httplib falls back to assuming HTTP 0.9 if it gets a
# bad header back. This is most likely to happen if
@@ -342,7 +342,7 @@ class HTTPSHandler(KeepAliveHandler, urllib2.HTTPSHandler):
if not ssl_factory:
ssl_factory = sslfactory.get_factory()
self._ssl_factory = ssl_factory
-
+
def https_open(self, req):
return self.do_open(req)
@@ -367,7 +367,7 @@ class HTTPResponse(httplib.HTTPResponse):
# although read() never adds to the buffer.
# Both readline and readlines have been stolen with almost no
# modification from socket.py
-
+
def __init__(self, sock, debuglevel=0, strict=0, method=None):
if method: # the httplib in python 2.3 uses the method arg
@@ -396,7 +396,7 @@ class HTTPResponse(httplib.HTTPResponse):
def close_connection(self):
self._handler._remove_connection(self._host, self._connection, close=1)
self.close()
-
+
def info(self):
return self.headers
@@ -453,7 +453,7 @@ class HTTPConnection(httplib.HTTPConnection):
class HTTPSConnection(httplib.HTTPSConnection):
response_class = HTTPResponse
-
+
#########################################################################
##### TEST FUNCTIONS
#########################################################################
@@ -487,7 +487,7 @@ def error_handler(url):
def continuity(url):
import md5
format = '%25s: %s'
-
+
# first fetch the file with the normal http handler
opener = urllib2.build_opener()
urllib2.install_opener(opener)
@@ -534,7 +534,7 @@ def comp(N, url):
t2 = fetch(N, url)
print ' TIME: %.3f s' % t2
print ' improvement factor: %.2f' % (t1/t2, )
-
+
def fetch(N, url, delay=0):
import time
lens = []
@@ -566,7 +566,7 @@ def test_timeout(url):
fo = urllib2.urlopen(url)
data1 = fo.read()
fo.close()
-
+
i = 20
print " waiting %i seconds for the server to close the connection" % i
while i > 0:
@@ -588,7 +588,7 @@ def test_timeout(url):
DEBUG = dbbackup
-
+
def test(url, N=10):
print "checking error hander (do this on a non-200)"
try: error_handler(url)
@@ -604,7 +604,7 @@ def test(url, N=10):
print
print "performing dropped-connection check"
test_timeout(url)
-
+
if __name__ == '__main__':
import time
import sys
@@ -614,4 +614,4 @@ if __name__ == '__main__':
except:
print "%s <integer> <url>" % sys.argv[0]
else:
- test(url, N)
+ test(url, N) \ No newline at end of file
diff --git a/module/network/MultipartPostHandler.py b/module/network/MultipartPostHandler.py
index f25c6fa1b..b6980ae18 100644
--- a/module/network/MultipartPostHandler.py
+++ b/module/network/MultipartPostHandler.py
@@ -2,18 +2,18 @@
####
# 02/2006 Will Holcomb <wholcomb@gmail.com>
-#
+#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
-#
+#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
-# 7/26/07 Slightly modified by Brian Schneider
+# 7/26/07 Slightly modified by Brian Schneider
# in order to support unicode files ( multipart_encode function )
"""
Usage:
@@ -84,7 +84,7 @@ class MultipartPostHandler(urllib2.BaseHandler):
request.add_unredirected_header('Content-Type', contenttype)
request.add_data(data)
-
+
return request
def multipart_encode(vars, files, boundary = None, buf = None):
@@ -135,5 +135,4 @@ def main():
validateFile("http://www.google.com")
if __name__=="__main__":
- main()
-
+ main() \ No newline at end of file
diff --git a/module/network/Request.py b/module/network/Request.py
index de9000ab3..2a0199fb8 100755
--- a/module/network/Request.py
+++ b/module/network/Request.py
@@ -20,7 +20,7 @@ from cStringIO import StringIO
additionaly you can firstly pass the get and secondly the post data in form of a dictonary
when the last argument is true the handler simulate a http referer with the last called url.
retrieveUrl returns response as string
-
+
"""
class Request:
def __init__(self):
@@ -38,13 +38,13 @@ class Request:
self.opener = urllib2.build_opener(handler, urllib2.HTTPCookieProcessor(self.cj))
self.downloader = urllib2.build_opener()
#self.opener.add_handler()
-
+
self.opener.addheaders = [
("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"),
("Accept-Encoding", "gzip,deflate"),
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7"),
- ("Connection", "keep-alive"),
+ ("Connection", "keep-alive"),
("Keep-Alive", "300")]
self.downloader.addheaders = [
@@ -52,20 +52,20 @@ class Request:
("Accept-Encoding", "gzip,deflate"),
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7")]
-
-
+
+
def load(self, url, get={}, post={}, ref=True, cookies=False):
-
+
if post:
post = urllib.urlencode(post)
else:
post = None
-
+
if get:
get = urllib.urlencode(get)
else:
get = ""
-
+
url = url + get
req = urllib2.Request(url, data=post)
@@ -85,13 +85,13 @@ class Request:
self.cookies.append(cookie)
output = rep.read()
-
+
if rep.headers.has_key("content-encoding"):
if rep.headers["content-encoding"] == "gzip":
output = GzipFile('', 'r', 0, StringIO(output)).read()
-
+
self.lastURL = url
-
+
return output
def add_auth(self, user, pw):
@@ -111,12 +111,12 @@ class Request:
self.downloader.add_handler(handler)
def download(self, url, filename, post={}):
-
+
if post:
post = urllib.urlencode(post)
else:
post = None
-
+
if not self.dl:
self.dl = True
file = open(filename, 'wb')
@@ -128,14 +128,14 @@ class Request:
self.dl_size = 0
self.dl_arrived = 0
self.dl_time = time.time()
- for chunk in conn:
+ for chunk in conn:
self.dl_arrived += len(chunk)
file.write(chunk)
file.close()
self.dl = False
self.dl_finished = time.time()
return True
-
+
def get_speed(self):
try:
return (self.dl_arrived / ((time.time() if self.dl else self.dl_finished) - self.dl_time)) / 1024
@@ -144,7 +144,7 @@ class Request:
def get_ETA(self):
try:
- return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time))
+ return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time))
except:
return 0
@@ -153,4 +153,4 @@ class Request:
if __name__ == "__main__":
import doctest
- doctest.testmod()
+ doctest.testmod() \ No newline at end of file
diff --git a/module/network/sslfactory.py b/module/network/sslfactory.py
index f7e6d3d7e..14903cd2a 100644
--- a/module/network/sslfactory.py
+++ b/module/network/sslfactory.py
@@ -9,9 +9,9 @@
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the
-# Free Software Foundation, Inc.,
-# 59 Temple Place, Suite 330,
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc.,
+# 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
# This file is part of urlgrabber, a high-level cross-protocol url-grabber
@@ -31,7 +31,7 @@ except ImportError:
DEBUG = None
if have_m2crypto:
-
+
class M2SSLFactory:
def __init__(self, ssl_ca_cert, ssl_context):
@@ -74,7 +74,7 @@ class SSLFactory:
def create_opener(self, *handlers):
return urllib2.build_opener(*handlers)
-
+
def get_factory(ssl_ca_cert = None, ssl_context = None):
""" Return an SSLFactory, based on if M2Crypto is available. """
@@ -86,4 +86,4 @@ def get_factory(ssl_ca_cert = None, ssl_context = None):
if DEBUG:
DEBUG.warning("SSL arguments supplied, but M2Crypto is not available. "
"Using Python SSL.")
- return SSLFactory()
+ return SSLFactory() \ No newline at end of file
diff --git a/module/remote/ClientSocket.py b/module/remote/ClientSocket.py
index 4efeaf3ab..ee0d03e82 100644
--- a/module/remote/ClientSocket.py
+++ b/module/remote/ClientSocket.py
@@ -16,29 +16,29 @@ from RequestObject import RequestObject
class SocketThread(threading.Thread):
def __init__(self, adress, port, pw, client):
- threading.Thread.__init__(self)
- self.setDaemon(True)
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.connect((adress, port))
- self.socket = ClientSocket(sock, pw, client)
- self.start()
+ threading.Thread.__init__(self)
+ self.setDaemon(True)
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect((adress, port))
+ self.socket = ClientSocket(sock, pw, client)
+ self.start()
def run(self):
- asyncore.loop()
- print "loop closed"
+ asyncore.loop()
+ print "loop closed"
def push_exec(self, function, args=[]):
- obj = RequestObject()
- obj.command = "exec"
- obj.function = function
- obj.args = args
- self.push(obj)
+ obj = RequestObject()
+ obj.command = "exec"
+ obj.function = function
+ obj.args = args
+ self.push(obj)
def push(self, obj):
- self.socket.push_obj(obj)
+ self.socket.push_obj(obj)
-class ClientSocket(asynchat.async_chat):
+class ClientSocket(asynchat.async_chat):
def __init__(self, sock, pw, client):
asynchat.async_chat.__init__(self, sock)
self.data = ""
@@ -58,6 +58,5 @@ class ClientSocket(asynchat.async_chat):
self.data = ""
def push_obj(self, obj):
- string = self.handler.encrypt(obj)
- self.push(string)
-
+ string = self.handler.encrypt(obj)
+ self.push(string) \ No newline at end of file
diff --git a/module/remote/RequestHandler.py b/module/remote/RequestHandler.py
index 2f6f248bd..9ded5d02e 100644
--- a/module/remote/RequestHandler.py
+++ b/module/remote/RequestHandler.py
@@ -1,5 +1,5 @@
#!/usr/bin/python
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
"""
authored by: RaNaN
@@ -31,7 +31,7 @@ class RequestHandler:
obj.response = func( * obj.args)
else:
obj.response = "error happend"
-
+
return self.encrypt(obj)
@@ -39,7 +39,7 @@ class RequestHandler:
try:
dec_str = base64.standard_b64decode(dec_str)
dec_str = self.bf.decrypt(dec_str)
-
+
dec_str = dec_str[:-(int(dec_str[-1], 16) + 1)]
obj = cPickle.loads(dec_str)
except:
@@ -60,7 +60,4 @@ class RequestHandler:
enc_str = self.bf.encrypt(enc_str)
enc_str = base64.standard_b64encode(enc_str)
- return enc_str + "\n"
-
-
-
+ return enc_str + "\n" \ No newline at end of file
diff --git a/module/remote/SocketServer.py b/module/remote/SocketServer.py
index e5932bb6e..c683c6672 100644
--- a/module/remote/SocketServer.py
+++ b/module/remote/SocketServer.py
@@ -19,7 +19,7 @@ class ServerThread(threading.Thread):
threading.Thread.__init__(self)
self.setDaemon(True)
self.server = MainServerSocket(int(pycore.config['port']), pycore)
-
+
def run(self):
asyncore.loop()
print "loop closed"
@@ -52,8 +52,8 @@ class MainServerSocket(asyncore.dispatcher):
print "Connected from", address
SecondaryServerSocket(newSocket, self.pycore)
def handle_close(self):
- print "going to close"
- self.close()
+ print "going to close"
+ self.close()
class SecondaryServerSocket(asynchat.async_chat):
diff --git a/module/thread_list.py b/module/thread_list.py
index 9f60ca5f5..f7be6f000 100644
--- a/module/thread_list.py
+++ b/module/thread_list.py
@@ -1,6 +1,6 @@
#!/usr/bin/python
-# -*- coding: utf-8 -*-
-#
+# -*- coding: utf-8 -*-
+#
#Copyright (C) 2009 sp00b, sebnapi
#
#This program is free software; you can redistribute it and/or modify
@@ -48,16 +48,16 @@ class Thread_List(object):
thread = Download_Thread(self)
self.threads.append(thread)
return True
-
+
def get_loaded_urls(self):
loaded_urls = []
for file in self.py_load_files:
loaded_urls.append(file.url)
return loaded_urls
-
+
def remove_thread(self, thread):
self.threads.remove(thread)
-
+
def get_job(self):
"""return job if suitable, otherwise send thread idle"""
@@ -67,7 +67,7 @@ class Thread_List(object):
if self.pause:
return None
-
+
if self.reconnecting:
return None
@@ -80,24 +80,24 @@ class Thread_List(object):
if not self.py_load_files[i].modul.__name__ in self.occ_plugins:
pyfile = self.py_load_files.pop(i)
break
-
+
if pyfile:
- self.py_downloading.append(pyfile)
+ self.py_downloading.append(pyfile)
if not pyfile.plugin.multi_dl:
self.occ_plugins.append(pyfile.modul.__name__)
self.parent.logger.info('Download starts: ' + pyfile.url)
-
+
self.lock.release()
return pyfile
-
-
+
+
def job_finished(self, pyfile):
self.lock.acquire()
-
+
if not pyfile.plugin.multi_dl:
self.occ_plugins.remove(pyfile.modul.__name__)
-
- self.py_downloading.remove(pyfile)
+
+ self.py_downloading.remove(pyfile)
if pyfile.status.type == "finished":
self.parent.logger.info('Download finished: ' + pyfile.url + ' @' + str(pyfile.status.get_speed()) + 'kb/s')
@@ -132,13 +132,13 @@ class Thread_List(object):
def extend_py_load_files(self):
pass
-
+
def select_thread(self):
""" select a thread
"""
if len(self.threads) < self.max_threads:
self.create_thread()
-
+
def append_py_load_file(self, py_load_file):
py_load_file.id = len(self.py_load_files)
self.py_load_files.append(py_load_file)
@@ -152,7 +152,7 @@ class Thread_List(object):
if self.reconnecting:
return False
-
+
self.lock.acquire()
if self.check_reconnect():
@@ -163,13 +163,13 @@ class Thread_List(object):
self.reconnecting = False
self.lock.release()
return True
-
+
self.lock.release()
return False
-
+
def check_reconnect(self):
"""checks if all files want reconnect"""
-
+
if not self.py_downloading:
return False
@@ -190,5 +190,4 @@ class Thread_List(object):
while ip == "": #solange versuch bis neue ip ausgelesen
ip = re.match(".*Current IP Address: (.*)</body>.*", urllib2.urlopen("http://checkip.dyndns.org/").read()).group(1)
time.sleep(1)
- self.parent.logger.info("Reconnected, new IP: " + ip)
-
+ self.parent.logger.info("Reconnected, new IP: " + ip) \ No newline at end of file