summaryrefslogtreecommitdiffstats
path: root/module
diff options
context:
space:
mode:
authorGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2009-05-20 13:05:21 +0200
committerGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2009-05-20 13:05:21 +0200
commit641cd8c63f4c3a96a9acf4d46450ab975b9c66cd (patch)
treea1bf0e74c3bde51d174bc0d154d1f88359437fbc /module
parentModul struktur angelegt und request klasse eingefügt (diff)
downloadpyload-641cd8c63f4c3a96a9acf4d46450ab975b9c66cd.tar.xz
Neue Request Klasse eingebunden + ein paar Status Funktionen
Diffstat (limited to 'module')
-rw-r--r--module/Py_Load_File.py28
-rw-r--r--module/download_thread.py124
-rwxr-xr-xmodule/network/Request.py77
-rw-r--r--module/thread_list.py87
4 files changed, 291 insertions, 25 deletions
diff --git a/module/Py_Load_File.py b/module/Py_Load_File.py
new file mode 100644
index 000000000..4a15cd990
--- /dev/null
+++ b/module/Py_Load_File.py
@@ -0,0 +1,28 @@
+from download_thread import Status
+
+class PyLoadFile:
+ """ represents the url or file
+ """
+ def __init__(self, parent, plugin, url):
+ self.parent = parent
+ self.id = None
+ pluginClass = getattr(plugin, plugin.__name__)
+ self.plugin = pluginClass(self)
+ self.url = url
+ self.filename = "filename"
+ self.download_folder = ""
+ self.status = Status(self)
+
+
+ def _get_my_plugin():
+ plugins = parent.get_avail_plugins()
+
+
+ def prepareDownload(self):
+ self.status.exists = self.plugin.file_exists()
+ if self.status.exists:
+ self.status.filename = self.plugin.get_file_name()
+ self.status.waituntil = self.plugin.time_plus_wait
+ self.status.url = self.plugin.get_file_url()
+ self.status.want_reconnect = self.plugin.want_reconnect
+
diff --git a/module/download_thread.py b/module/download_thread.py
new file mode 100644
index 000000000..f6f43a295
--- /dev/null
+++ b/module/download_thread.py
@@ -0,0 +1,124 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+#
+#Copyright (C) 2009 sp00b, sebnapi
+#
+#This program is free software; you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation; either version 3 of the License,
+#or (at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+#See the GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program; if not, see <http://www.gnu.org/licenses/>.
+#
+###
+
+import threading
+from time import time, sleep
+from copy import copy
+import urllib
+
+
+class Status(object):
+ """ Saves all status information
+ """
+ def __init__(self, pyfile):
+ self.pyfile = pyfile
+ self.type = None
+ self.status_queue = None
+ self.total_kb = 0
+ self.downloaded_kb = 0
+ self.rate = 0
+ self.expected_time = 0
+ self.filename = None
+ self.url = None
+ self.exists = None
+ self.waituntil = None
+ self.want_reconnect = None
+
+ def __call__(self, blocks_read, block_size, total_size):
+ if self.status_queue == None:
+ return False
+ self.start = time()
+ self.last_status = time()
+ self.total_kb = total_size / 1024
+ self.downloaded_kb = (blocks_read * block_size) / 1024
+ elapsed_time = time() - self.start
+ if elapsed_time != 0:
+ self.rate = self.downloaded_kb / elapsed_time
+ if self.rate != 0:
+ self.expected_time = self.downloaded_kb / self.rate
+ if self.last_status+0.2 < time():
+ self.status_queue.put(copy(self))
+ self.last_status = time()
+
+ def set_status_queue(self, queue):
+ self.status_queue = queue
+
+ def getETA():
+ return self.pyfile.plugin.req.getETA()
+ def getSpeed():
+ return self.pyfile.plugin.req.getSpeed()
+ def kBleft():
+ return self.pyfile.plugins.req.kBleft()
+
+
+class Download_Thread(threading.Thread):
+ def __init__(self, parent):
+ threading.Thread.__init__(self)
+ self.shutdown = False
+ self.parent = parent
+ self.setDaemon(True)
+ self.loadedPyFile = None
+
+ self.start()
+
+ def run(self):
+ while (not self.shutdown):
+ if not self.parent.download_queue.empty():
+ self.loadedPyFile = self.parent.getJob()
+ self.download(self.loadedPyFile)
+
+ if self.shutdown:
+ sleep(1)
+ self.parent.remove_thread(self)
+
+ def download(self, py_load_file):
+ pyfile = py_load_file
+ status = pyfile.status
+ pyfile.prepareDownload()
+
+ if not status.exists:
+ return False
+
+ if status.want_reconnect:
+ print "handle reconnect"
+ return False
+
+ while (time() < status.waituntil):
+ status.type = "waiting"
+ sleep(1) #eventuell auf genaue zeit warten
+
+ #missing wenn datei nicht auf server vorhanden
+ #if type=="check":
+ #return params
+ #if type in 'missing':
+ #self.status = "missing"
+ #print "Datei auf Server nicht vorhanden: " + params
+ ##im logger eintragen das datei auf server nicht vorhanden ist
+ #warning("Datei auf Server nicht voblocks_readrhanden: " + url)
+
+ print "going to download"
+ status.type = "downloading"
+ print status.url , status.filename
+
+ pyfile.plugin.req.download(status.url, pyfile.download_folder + "/" + status.filename)
+ status.type = "finished"
+ #startet downloader
+ #urllib.urlretrieve(status.url, pyfile.download_folder + "/" + status.filename, status)
+ #self.shutdown = True
diff --git a/module/network/Request.py b/module/network/Request.py
index 3325d085d..07b0129d2 100755
--- a/module/network/Request.py
+++ b/module/network/Request.py
@@ -7,6 +7,8 @@ import urllib
import urllib2
import cookielib
import Keepalive
+import base64
+import time
from Keepalive import HTTPHandler
from cStringIO import StringIO
@@ -21,23 +23,19 @@ from gzip import GzipFile
retrieveUrl returns response as string
"""
-class Downloader(urllib.FancyURLopener):
- version = "Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.8"
-
-
-
class Request:
def __init__(self):
+
+ self.dl_time = 0
+ self.dl_size = 0
+ self.dl_arrived = 0
+ self.dl = False
self.lastURL = None
- #self.cookiefile = 'cookies.lwp'
self.cj = cookielib.CookieJar()
-
-# if os.path.isfile(self.cookiefile):
- # self.cj.load(self.cookiefile)
-
- self.handler = HTTPHandler()
- self.opener = urllib2.build_opener(self.handler, urllib2.HTTPCookieProcessor(self.cj))
+ handler = HTTPHandler()
+ self.opener = urllib2.build_opener(handler, urllib2.HTTPCookieProcessor(self.cj))
+ self.downloader = urllib2.build_opener()
#self.opener.add_handler()
self.opener.addheaders = [
@@ -47,11 +45,15 @@ class Request:
("Accept-Charset","ISO-8859-1,utf-8;q=0.7,*;q=0.7"),
("Connection","keep-alive"),
("Keep-Alive","300")]
-
- self.downloader = Downloader()
+
+ self.downloader.addheaders = [
+ ("User-Agent","Mozilla/5.0 (Windows; U; Windows NT 5.1; de; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.8"),
+ ("Accept-Encoding","gzip,deflate"),
+ ("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),
+ ("Accept-Charset","ISO-8859-1,utf-8;q=0.7,*;q=0.7")]
- def retrieveURL(self,url, get = {}, post = {}, ref = True):
+ def load(self, url, get = {}, post = {}, ref = True):
if post:
post = urllib.urlencode(post)
@@ -72,11 +74,7 @@ class Request:
rep = self.opener.open(req)
output = rep.read()
-
- print rep.headers
-
- self.cj.extract_cookies(rep, req)
-
+
if rep.headers.has_key("content-encoding") :
if rep.headers["content-encoding"] == "gzip" :
output = GzipFile('','r',0,StringIO(output)).read()
@@ -86,13 +84,42 @@ class Request:
return output
def addAuth(self, user, pw):
- auth_handler = urllib2.HTTPBasicAuthHandler()
- auth_handler.add_password(user, passwd= pw)
- self.opener.add_handler(auth_handler)
+ self.downloader.addheaders.append(['Authorization','Basic ' + base64.encodestring(user + ':' + pw)[:-1]])
+
+
+ #def download(url, filename, reporthook = None, data = None): #default von urlretrieve auch None?
+ # return self.downloader.urlretrieve(url, filename, reporthook, data)
+
+ def download(self, url, filename):
+ if not self.dl:
+ self.dl = True
+ file = open(filename, 'wb')
+ req = urllib2.Request(url)
+ conn = self.downloader.open(req)
+ self.dl_size = int(conn.headers["content-length"])
+ self.dl_arrived = 0
+ self.dl_time = time.time()
+ for chunk in conn:
+ self.dl_arrived += len(chunk)
+ file.write(chunk)
+ file.close()
+ self.dl = False
+ return True
+
+ def getSpeed(self):
+ try:
+ return (self.dl_arrived / (time.time() - self.dl_time)) / 1024
+ except:
+ return "No Download"
- def download(url, filename, reporthook = None, data = None): #default von urlretrieve auch None?
- return self.downloader.urlretrieve(url, filename, reporthook, data)
+ def getETA(self):
+ try:
+ return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time))
+ except:
+ return "No Download"
+ def kBleft(self):
+ return (self.dl_size - self.dl_arrived) / 1024
if __name__ == "__main__" :
import doctest
diff --git a/module/thread_list.py b/module/thread_list.py
new file mode 100644
index 000000000..2a5dbe6f6
--- /dev/null
+++ b/module/thread_list.py
@@ -0,0 +1,87 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+#
+#Copyright (C) 2009 sp00b, sebnapi
+#
+#This program is free software; you can redistribute it and/or modify
+#it under the terms of the GNU General Public License as published by
+#the Free Software Foundation; either version 3 of the License,
+#or (at your option) any later version.
+#
+#This program is distributed in the hope that it will be useful,
+#but WITHOUT ANY WARRANTY; without even the implied warranty of
+#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+#See the GNU General Public License for more details.
+#
+#You should have received a copy of the GNU General Public License
+# along with this program; if not, see <http://www.gnu.org/licenses/>.
+#
+###
+#python
+from Queue import Queue
+
+#my
+from download_thread import Download_Thread
+
+class Thread_List(object):
+ def __init__(self, parent):
+ self.parent = parent
+ self.threads = []
+ self.max_threads = 3
+ self.py_load_files = []
+ self.download_queue = Queue()
+ self.status_queue = Queue()
+ self.f_relation = [0,0]
+
+ def create_thread(self):
+ """ creates thread for Py_Load_File and append thread to self.threads
+ """
+ if self.py_load_files:
+ thread = Download_Thread(self)
+ self.threads.append(thread)
+ return True
+
+ def get_loaded_urls(self):
+ loaded_urls = []
+ for file in self.py_load_files:
+ loaded_urls.append(file.url)
+ return loaded_urls
+
+ def remove_thread(self, thread):
+ self.threads.remove(thread)
+
+ def status(self):
+ if not self.status_queue.empty():
+ while not self.status_queue.empty():
+ status = self.status_queue.get()
+ self.py_load_files[status.id].status = status
+
+ def getJob(self):
+ # nur wenn auch geladen werden soll, ansonsten thread in leerlauf schicken
+ if True:
+ return self.download_queue.get()
+
+ def extend_py_load_files(self):
+ pass
+
+ def select_thread(self):
+ """ select a thread
+ """
+ if len(self.threads) < self.max_threads:
+ self.create_thread()
+
+ def append_py_load_file(self, py_load_file):
+ py_load_file.id = len(self.py_load_files)
+ self.py_load_files.append(py_load_file)
+ self.download_queue.put(py_load_file)
+ self.f_relation[1] += 1
+ self.select_thread()
+
+ def reconnect():
+ reconn = subprocess.Popen(reconnectMethod)
+ reconn.wait()
+ ip = re.match(".*Current IP Address: (.*)</body>.*", urllib2.urlopen("http://checkip.dyndns.org/").read()).group(1) #versuchen neue ip aus zu lesen
+ while ip == "": #solange versuch bis neue ip ausgelesen
+ ip = re.match(".*Current IP Address: (.*)</body>.*", urllib2.urlopen("http://checkip.dyndns.org/").read()).group(1)
+ time.sleep(1)
+ #print "Neue IP: " + ip