diff options
author | RaNaN <Mast3rRaNaN@hotmail.de> | 2009-06-04 13:46:44 +0200 |
---|---|---|
committer | RaNaN <Mast3rRaNaN@hotmail.de> | 2009-06-04 13:46:44 +0200 |
commit | 618c03fc5c5b57c154772b1fd373004e186ae0f8 (patch) | |
tree | fc6e14b69785a86e2b69d257685e182d23a434b3 | |
parent | removed test files (diff) | |
download | pyload-618c03fc5c5b57c154772b1fd373004e186ae0f8.tar.xz |
remove links from txt + several improvments
-rw-r--r-- | Core.py | 64 | ||||
-rw-r--r-- | config | 11 | ||||
-rw-r--r-- | module/Py_Load_File.py | 4 | ||||
-rw-r--r-- | module/download_thread.py | 28 | ||||
-rwxr-xr-x | module/network/Request.py | 92 | ||||
-rw-r--r-- | module/thread_list.py | 62 |
6 files changed, 109 insertions, 152 deletions
@@ -59,6 +59,7 @@ class Core(object): self.check_create(self.config['log_folder'], _("folder for logs")) self.check_create(self.config['download_folder'], _("folder for downloads")) self.check_create(self.config['link_file'], _("file for links"), False) + self.check_create(self.config['failed_file'], 'file for failed links', False) self.init_logger(logging.DEBUG) # logging level @@ -82,13 +83,6 @@ class Core(object): self.config[option] = config.get(section, option) self.config[option] = False if self.config[option].lower() == 'false' else self.config[option] - self.config['language'] = config.get('general', 'language') - self.config['download_folder'] = config.get('general', 'download_folder') - self.config['link_file'] = config.get('general', 'link_file') - self.config['search_updates'] = config.getboolean('updates', 'search_updates') - self.config['log_folder'] = config.get('log', 'log_folder') - self.config['reconnectMethod'] = config.get('general', 'reconnect_method') - def create_plugin_index(self): for file_handler in glob(self.config['plugin_folder'] + sep + '*.py'): if file_handler != self.config['plugin_folder'] + sep + "Plugin.py": @@ -102,30 +96,6 @@ class Core(object): self.logger.debug(plugin_file + _(" added")) self.logger.info(_("created index of plugins")) -## def check_needed_plugins(self): -## links = open(self.link_file, 'r').readlines() -## plugins_indexed = pickle.load(open(self.plugin_index, "r")) -## for link in links: -## link = link.replace("\n", "") -## for plugin_file in plugins_indexed.keys(): -## if re.search(plugins_indexed[plugin_file], link) != None: -## self.plugins_needed[plugin_file] = plugins_indexed[plugin_file] -## print "Benoetigte Plugins: " + str(self.plugins_needed.keys()) -## -## def import_needed_plugins(self): -## for needed_plugin in self.plugins_needed.keys(): -## self.import_plugin(needed_plugin) -## -## def import_plugin(self, needed_plugin): -## try: -## new_plugin = __import__(needed_plugin) -## self.plugins_dict[new_plugin] = self.plugins_needed[needed_plugin] -## #if new_plugin.plugin_type in "hoster" or new_plugin.plugin_type in "container": -## # print "Plugin geladen: " + new_plugin.plugin_name -## #plugins[plugin_file] = __import__(plugin_file) -## except: -## print "Fehlerhaftes Plugin: " + needed_plugin -## def _get_links(self, link_file): """ funktion nur zum testen ohne gui bzw. tui @@ -161,21 +131,9 @@ class Core(object): open(check_name, "w") print _("%s created") % legend except: - print _("could %s not create ") % legend + print _("could not create %s") % legend exit() - - #def addLinks(self, newLinks, atTheBeginning): - #pass - -# def get_hoster(self, url): -# """ searches the right plugin for an url -# """ -# for plugin, plugin_pattern in self.plugins_avaible.items(): -# if re.match(plugin_pattern, url) != None: #guckt ob übergebende url auf muster des plugins passt -# return plugin -# #logger: kein plugin gefunden -# return None - + def __new_py_load_file(self, url): new_file = PyLoadFile(self, url) new_file.download_folder = self.config['download_folder'] @@ -183,15 +141,19 @@ class Core(object): return True def init_logger(self, level): - handler = logging.handlers.RotatingFileHandler(self.config['log_folder'] + sep + 'log.txt', maxBytes=12800, backupCount=10) #100 kb + + file_handler = logging.handlers.RotatingFileHandler(self.config['log_folder'] + sep + 'log.txt', maxBytes=102400, backupCount=self.config['log_count']) #100 kib * 5 console = logging.StreamHandler(stdout) - #handler = logging.FileHandler('Logs/log.txt') - frm = logging.Formatter("%(asctime)s: %(levelname)-8s %(message)s", "%d.%m.%Y %H:%M:%S") - handler.setFormatter(frm) + + frm = logging.Formatter("%(asctime)s: %(levelname)-8s %(message)s", "%d.%m.%Y %H:%M:%S") + file_handler.setFormatter(frm) console.setFormatter(frm) - self.logger = logging.getLogger() # settable in config - self.logger.addHandler(handler) + self.logger = logging.getLogger("log") # settable in config + + if self.config['file_log']: + self.logger.addHandler(file_handler) + self.logger.addHandler(console) #if console logging self.logger.setLevel(level) @@ -3,17 +3,18 @@ language: de download_folder = Downloads link_file = links.txt -useReconnect = False +failed_file = failed_links.txt +use_reconnect = False reconnect_method = ./reconnect.sh [updates] search_updates = True [log] +file_log = True log_folder = Logs -fullLog = True -[time] +log_count = 5 +[downloadTime] start = 0:00 -end = 6:00 +end = 24:00 [remote] -remote_activated = False port = 7272 remotePassword = pwhere diff --git a/module/Py_Load_File.py b/module/Py_Load_File.py index 97479384c..9fcd554a5 100644 --- a/module/Py_Load_File.py +++ b/module/Py_Load_File.py @@ -11,7 +11,7 @@ class PyLoadFile: self.url = url self.filename = "filename" self.download_folder = "" - self.modul = __import__(self._get_my_plugin()) #maybe replace to prepare download + self.modul = __import__(self._get_my_plugin()) pluginClass = getattr(self.modul, self.modul.__name__) self.plugin = pluginClass(self) self.status = Status(self) @@ -22,7 +22,7 @@ class PyLoadFile: for plugin, plugin_pattern in self.parent.plugins_avaible.items(): if re.match(plugin_pattern, self.url) != None: return plugin - #logger: kein plugin gefunden # was soll passieren wenn nichts gefunden wird?!? + return "Plugin" def prepareDownload(self): diff --git a/module/download_thread.py b/module/download_thread.py index d5e4f4edc..5313a7d34 100644 --- a/module/download_thread.py +++ b/module/download_thread.py @@ -19,7 +19,6 @@ ### import threading -import random from time import time, sleep @@ -30,35 +29,12 @@ class Status(object): self.pyfile = pyfile self.type = None self.status_queue = None - self.total_kb = 0 - self.downloaded_kb = 0 - self.rate = 0 - self.expected_time = 0 self.filename = None self.url = None self.exists = False self.waituntil = None self.want_reconnect = False -# def __call__(self, blocks_read, block_size, total_size): -# if self.status_queue == None: -# return False -# self.start = time() -# self.last_status = time() -# self.total_kb = total_size / 1024 -# self.downloaded_kb = (blocks_read * block_size) / 1024 -# elapsed_time = time() - self.start -# if elapsed_time != 0: -# self.rate = self.downloaded_kb / elapsed_time -# if self.rate != 0: -# self.expected_time = self.downloaded_kb / self.rate -# if self.last_status+0.2 < time(): -# self.status_queue.put(copy(self)) -# self.last_status = time() -# - def set_status_queue(self, queue): - self.status_queue = queue - def get_ETA(self): return self.pyfile.plugin.req.get_ETA() def get_speed(self): @@ -86,6 +62,7 @@ class Download_Thread(threading.Thread): self.download(self.loadedPyFile) except Exception, e: print "Error:", e #catch up all error here + self.loadedPyFile.status.type = "failed" finally: self.parent.job_finished(self.loadedPyFile) sleep(0.5) @@ -96,7 +73,6 @@ class Download_Thread(threading.Thread): def download(self, pyfile): status = pyfile.status pyfile.prepareDownload() - print "dl prepared", status.filename if not status.exists: raise "FileDontExists" #i know its deprecated, who cares^^ @@ -104,7 +80,7 @@ class Download_Thread(threading.Thread): status.type = "waiting" while (time() < status.waituntil): - if self.parent.init_reconnect(pyfile) or self.parent.reconnecting: + if self.parent.init_reconnect() or self.parent.reconnecting: status.type = "reconnected" status.want_reconnect = False return False diff --git a/module/network/Request.py b/module/network/Request.py index ce6dc6c8c..8725d0607 100755 --- a/module/network/Request.py +++ b/module/network/Request.py @@ -3,15 +3,15 @@ """ authored by: RaNaN """ -import urllib -import urllib2 -import cookielib import base64 +import cookielib import time +import urllib +import urllib2 +from gzip import GzipFile from Keepalive import HTTPHandler from cStringIO import StringIO -from gzip import GzipFile """ handles all outgoing HTTP-Requests of the Server @@ -31,64 +31,64 @@ class Request: self.dl_arrived = 0 self.dl = False - self.lastURL = None - self.cj = cookielib.CookieJar() + self.lastURL = None + self.cj = cookielib.CookieJar() handler = HTTPHandler() - self.opener = urllib2.build_opener(handler, urllib2.HTTPCookieProcessor(self.cj)) - self.downloader = urllib2.build_opener() - #self.opener.add_handler() + self.opener = urllib2.build_opener(handler, urllib2.HTTPCookieProcessor(self.cj)) + self.downloader = urllib2.build_opener() + #self.opener.add_handler() - self.opener.addheaders = [ - ("User-Agent","Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"), - ("Accept-Encoding","gzip,deflate"), - ("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), - ("Accept-Charset","ISO-8859-1,utf-8;q=0.7,*;q=0.7"), - ("Connection","keep-alive"), - ("Keep-Alive","300")] + self.opener.addheaders = [ + ("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"), + ("Accept-Encoding", "gzip,deflate"), + ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), + ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7"), + ("Connection", "keep-alive"), + ("Keep-Alive", "300")] - self.downloader.addheaders = [ - ("User-Agent","Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"), - ("Accept-Encoding","gzip,deflate"), - ("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), - ("Accept-Charset","ISO-8859-1,utf-8;q=0.7,*;q=0.7")] + self.downloader.addheaders = [ + ("User-Agent", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10"), + ("Accept-Encoding", "gzip,deflate"), + ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), + ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7")] - def load(self, url, get = {}, post = {}, ref = True): + def load(self, url, get={}, post={}, ref=True): - if post: - post = urllib.urlencode(post) - else: - post = None + if post: + post = urllib.urlencode(post) + else: + post = None - if get: - get = urllib.urlencode(get) - else: - get = "" + if get: + get = urllib.urlencode(get) + else: + get = "" - url = url + get - req = urllib2.Request(url, data = post) - - if ref and self.lastURL is not None: - req.add_header("Referer",self.lastURL) + url = url + get + req = urllib2.Request(url, data=post) + + if ref and self.lastURL is not None: + req.add_header("Referer", self.lastURL) - rep = self.opener.open(req) + rep = self.opener.open(req) - output = rep.read() + output = rep.read() - if rep.headers.has_key("content-encoding") : - if rep.headers["content-encoding"] == "gzip" : - output = GzipFile('','r',0,StringIO(output)).read() + if rep.headers.has_key("content-encoding"): + if rep.headers["content-encoding"] == "gzip": + output = GzipFile('', 'r', 0, StringIO(output)).read() - self.lastURL = url + self.lastURL = url - return output + return output def add_auth(self, user, pw): - self.downloader.addheaders.append(['Authorization','Basic ' + base64.encodestring(user + ':' + pw)[:-1]]) + self.downloader.addheaders.append(['Authorization', 'Basic ' + base64.encodestring(user + ':' + pw)[:-1]]) #def download(url, filename, reporthook = None, data = None): #default von urlretrieve auch None? - # return self.downloader.urlretrieve(url, filename, reporthook, data) + # return self.downloader.urlretrieve(url, filename, reporthook, data) def download(self, url, filename, post={}): @@ -115,7 +115,7 @@ class Request: def get_speed(self): try: - return (self.dl_arrived / ((time.time() if self.dl else self.dl_finished) - self.dl_time )) / 1024 + return (self.dl_arrived / ((time.time() if self.dl else self.dl_finished) - self.dl_time)) / 1024 except: return 0 @@ -128,6 +128,6 @@ class Request: def kB_left(self): return (self.dl_size - self.dl_arrived) / 1024 -if __name__ == "__main__" : +if __name__ == "__main__": import doctest doctest.testmod() diff --git a/module/thread_list.py b/module/thread_list.py index a8224ad52..531bc7401 100644 --- a/module/thread_list.py +++ b/module/thread_list.py @@ -56,24 +56,20 @@ class Thread_List(object): def remove_thread(self, thread): self.threads.remove(thread) -# def status(self): -# if not self.status_queue.empty(): -# while not self.status_queue.empty(): -# status = self.status_queue.get() -# self.py_load_files[status.id].status = status def get_job(self): - # return job if suitable, otherwise send thread idle - + """return job if suitable, otherwise send thread idle""" - if self.reconnecting: + if not self.parent.is_dltime(): return None - self.init_reconnect() - if self.pause: return None + + if self.reconnecting: + return None + self.init_reconnect() self.lock.acquire() @@ -87,7 +83,7 @@ class Thread_List(object): self.py_downloading.append(pyfile) if not pyfile.plugin.multi_dl: self.occ_plugins.append(pyfile.modul.__name__) - self.parent.logger.info('start downloading ' + pyfile.url) + self.parent.logger.info('Download starts: ' + pyfile.url) self.lock.release() return pyfile @@ -102,16 +98,34 @@ class Thread_List(object): self.py_downloading.remove(pyfile) if pyfile.status.type == "finished": - self.parent.logger.info('finished downloading ' + pyfile.url + ' @' + str(pyfile.status.get_speed()) + 'kb/s') + self.parent.logger.info('Download finished: ' + pyfile.url + ' @' + str(pyfile.status.get_speed()) + 'kb/s') - #remove from txt + with open(self.parent.config['link_file'], 'r') as f: + data = f.read() if pyfile.plugin.props['type'] == "container": + links = "" + + for link in pyfile.plugin.links: + links += link+"\n" + self.parent.extend_links(pyfile.plugin.links) + data = links + data # put the links into text file + + data = data.replace(pyfile.url+'\n', "") + + with open(self.parent.config['link_file'], 'w') as f: + f.write(data) if pyfile.status.type == "reconnected":#put it back in queque self.py_load_files.insert(0, pyfile) + if pyfile.status.type == "failed": + self.parent.logger.warning("Download failed: " + pyfile.url) + with open(self.parent.config['failed_file'], 'a') as f: + f.write(pyfile.url+"\n") + + self.lock.release() return True @@ -130,12 +144,15 @@ class Thread_List(object): self.f_relation[1] += 1 self.select_thread() - def init_reconnect(self, pyfile=None): + def init_reconnect(self): """initialise a reonnect""" - if self.reconnecting: - return False + if not self.parent.config['use_reconnect']: + return False + + if self.reconnecting: + return False - self.lock.acquire() + self.lock.acquire() if self.check_reconnect(): @@ -143,13 +160,14 @@ class Thread_List(object): self.reconnect() - time.sleep(1) + time.sleep(1.1) self.reconnecting = False - self.lock.release() - return True - self.lock.release() - return False + self.lock.release() + return True + + self.lock.release() + return False def check_reconnect(self): """checks if all files want reconnect""" |