From d35c003cc53d4723d1dfe0d81eeb9bea78cee594 Mon Sep 17 00:00:00 2001 From: RaNaN Date: Sat, 31 Dec 2011 16:01:24 +0100 Subject: new crypter plugin API, now decrypting possible for now. --- module/threads/BaseThread.py | 117 ++++++++++++++ module/threads/DecrypterThread.py | 35 +++++ module/threads/DownloadThread.py | 215 ++++++++++++++++++++++++++ module/threads/HookThread.py | 56 +++++++ module/threads/InfoThread.py | 215 ++++++++++++++++++++++++++ module/threads/ThreadManager.py | 311 ++++++++++++++++++++++++++++++++++++++ module/threads/__init__.py | 0 7 files changed, 949 insertions(+) create mode 100644 module/threads/BaseThread.py create mode 100644 module/threads/DecrypterThread.py create mode 100644 module/threads/DownloadThread.py create mode 100644 module/threads/HookThread.py create mode 100644 module/threads/InfoThread.py create mode 100644 module/threads/ThreadManager.py create mode 100644 module/threads/__init__.py (limited to 'module/threads') diff --git a/module/threads/BaseThread.py b/module/threads/BaseThread.py new file mode 100644 index 000000000..b5856c856 --- /dev/null +++ b/module/threads/BaseThread.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from threading import Thread +from time import strftime, gmtime +from sys import exc_info +from types import MethodType +from pprint import pformat +from traceback import format_exc + +from module.utils.fs import listdir, join, save_join, stat + +class BaseThread(Thread): + """abstract base class for thread types""" + + def __init__(self, manager): + """Constructor""" + Thread.__init__(self) + self.setDaemon(True) + self.m = manager #thread manager + self.log = manager.core.log + + + def writeDebugReport(self, pyfile): + """ writes a debug report to disk """ + + dump_name = "debug_%s_%s.zip" % (pyfile.pluginname, strftime("%d-%m-%Y_%H-%M-%S")) + dump = self.getDebugDump(pyfile) + + try: + import zipfile + + zip = zipfile.ZipFile(dump_name, "w") + + for f in listdir(join("tmp", pyfile.pluginname)): + try: + # avoid encoding errors + zip.write(join("tmp", pyfile.pluginname, f), save_join(pyfile.pluginname, f)) + except: + pass + + info = zipfile.ZipInfo(save_join(pyfile.pluginname, "debug_Report.txt"), gmtime()) + info.external_attr = 0644 << 16L # change permissions + + zip.writestr(info, dump) + zip.close() + + if not stat(dump_name).st_size: + raise Exception("Empty Zipfile") + + except Exception, e: + self.log.debug("Error creating zip file: %s" % e) + + dump_name = dump_name.replace(".zip", ".txt") + f = open(dump_name, "wb") + f.write(dump) + f.close() + + self.log.info("Debug Report written to %s" % dump_name) + + def getDebugDump(self, pyfile): + dump = "pyLoad %s Debug Report of %s %s \n\nTRACEBACK:\n %s \n\nFRAMESTACK:\n" % ( + self.m.core.api.getServerVersion(), pyfile.pluginname, pyfile.plugin.__version__, format_exc()) + + tb = exc_info()[2] + stack = [] + while tb: + stack.append(tb.tb_frame) + tb = tb.tb_next + + for frame in stack[1:]: + dump += "\nFrame %s in %s at line %s\n" % (frame.f_code.co_name, + frame.f_code.co_filename, + frame.f_lineno) + + for key, value in frame.f_locals.items(): + dump += "\t%20s = " % key + try: + dump += pformat(value) + "\n" + except Exception, e: + dump += " " + str(e) + "\n" + + del frame + + del stack #delete it just to be sure... + + dump += "\n\nPLUGIN OBJECT DUMP: \n\n" + + for name in dir(pyfile.plugin): + attr = getattr(pyfile.plugin, name) + if not name.endswith("__") and type(attr) != MethodType: + dump += "\t%20s = " % name + try: + dump += pformat(attr) + "\n" + except Exception, e: + dump += " " + str(e) + "\n" + + dump += "\nPYFILE OBJECT DUMP: \n\n" + + for name in dir(pyfile): + attr = getattr(pyfile, name) + if not name.endswith("__") and type(attr) != MethodType: + dump += "\t%20s = " % name + try: + dump += pformat(attr) + "\n" + except Exception, e: + dump += " " + str(e) + "\n" + + dump += "\n\nCONFIG: \n\n" + dump += pformat(self.m.core.config.values) + "\n" + + return dump + + def clean(self, pyfile): + """ set thread unactive and release pyfile """ + self.active = False + pyfile.release() diff --git a/module/threads/DecrypterThread.py b/module/threads/DecrypterThread.py new file mode 100644 index 000000000..5ce59a65e --- /dev/null +++ b/module/threads/DecrypterThread.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from BaseThread import BaseThread + +class DecrypterThread(BaseThread): + """thread for decrypting""" + + def __init__(self, manager, data, package): + """constructor""" + BaseThread.__init__(self, manager) + self.queue = data + self.package = package + + self.m.log.debug("Starting Decrypt thread") + + self.start() + + def add(self, data): + self.queue.extend(data) + + def run(self): + plugin_map = {} + for plugin, url in self.queue: + if plugin in plugin_map: + plugin_map[plugin].append(url) + else: + plugin_map[plugin] = [url] + + + self.decrypt(plugin_map) + + def decrypt(self, plugin_map): + for name, urls in plugin_map.iteritems(): + p = self.m.core.pluginManager.loadClass("crypter", name) diff --git a/module/threads/DownloadThread.py b/module/threads/DownloadThread.py new file mode 100644 index 000000000..3d444686b --- /dev/null +++ b/module/threads/DownloadThread.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: RaNaN +""" + +from Queue import Queue +from time import sleep, time +from traceback import print_exc +from sys import exc_clear +from pycurl import error + +from module.plugins.Base import Fail, Retry +from module.plugins.Hoster import Abort, Reconnect, SkipDownload + +from BaseThread import BaseThread + +class DownloadThread(BaseThread): + """thread for downloading files from 'real' hoster plugins""" + + def __init__(self, manager): + """Constructor""" + BaseThread.__init__(self, manager) + + self.queue = Queue() # job queue + self.active = False + + self.start() + + def run(self): + """run method""" + pyfile = None + + while True: + del pyfile + self.active = self.queue.get() + pyfile = self.active + + if self.active == "quit": + self.active = False + self.m.threads.remove(self) + return True + + try: + if not pyfile.hasPlugin(): continue + #this pyfile was deleted while queueing + + pyfile.plugin.checkForSameFiles(starting=True) + self.m.log.info(_("Download starts: %s" % pyfile.name)) + + # start download + self.m.core.hookManager.downloadPreparing(pyfile) + pyfile.plugin.preprocessing(self) + + self.m.log.info(_("Download finished: %s") % pyfile.name) + self.m.core.hookManager.downloadFinished(pyfile) + self.m.core.files.checkPackageFinished(pyfile) + + except NotImplementedError: + self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname) + pyfile.setStatus("failed") + pyfile.error = "Plugin does not work" + self.clean(pyfile) + continue + + except Abort: + try: + self.m.log.info(_("Download aborted: %s") % pyfile.name) + except: + pass + + pyfile.setStatus("aborted") + + self.clean(pyfile) + continue + + except Reconnect: + self.queue.put(pyfile) + #pyfile.req.clearCookies() + + while self.m.reconnecting.isSet(): + sleep(0.5) + + continue + + except Retry, e: + reason = e.args[0] + self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason}) + self.queue.put(pyfile) + continue + + except Fail, e: + msg = e.args[0] + + if msg == "offline": + pyfile.setStatus("offline") + self.m.log.warning(_("Download is offline: %s") % pyfile.name) + elif msg == "temp. offline": + pyfile.setStatus("temp. offline") + self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name) + else: + pyfile.setStatus("failed") + self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg}) + pyfile.error = msg + + self.m.core.hookManager.downloadFailed(pyfile) + self.clean(pyfile) + continue + + except error, e: + if len(e.args) == 2: + code, msg = e.args + else: + code = 0 + msg = e.args + + self.m.log.debug("pycurl exception %s: %s" % (code, msg)) + + if code in (7, 18, 28, 52, 56): + self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry.")) + wait = time() + 60 + + pyfile.waitUntil = wait + pyfile.setStatus("waiting") + while time() < wait: + sleep(1) + if pyfile.abort: + break + + if pyfile.abort: + self.m.log.info(_("Download aborted: %s") % pyfile.name) + pyfile.setStatus("aborted") + + self.clean(pyfile) + else: + self.queue.put(pyfile) + + continue + + else: + pyfile.setStatus("failed") + self.m.log.error("pycurl error %s: %s" % (code, msg)) + if self.m.core.debug: + print_exc() + self.writeDebugReport(pyfile) + + self.m.core.hookManager.downloadFailed(pyfile) + + self.clean(pyfile) + continue + + except SkipDownload, e: + pyfile.setStatus("skipped") + + self.m.log.info( + _("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message}) + + self.clean(pyfile) + + self.m.core.files.checkPackageFinished(pyfile) + + self.active = False + self.m.core.files.save() + + continue + + + except Exception, e: + pyfile.setStatus("failed") + self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)}) + pyfile.error = str(e) + + if self.m.core.debug: + print_exc() + self.writeDebugReport(pyfile) + + self.m.core.hookManager.downloadFailed(pyfile) + self.clean(pyfile) + continue + + finally: + self.m.core.files.save() + pyfile.checkIfProcessed() + exc_clear() + + + #pyfile.plugin.req.clean() + + self.active = False + pyfile.finishIfDone() + self.m.core.files.save() + + + def put(self, job): + """assing job to thread""" + self.queue.put(job) + + + def stop(self): + """stops the thread""" + self.put("quit") \ No newline at end of file diff --git a/module/threads/HookThread.py b/module/threads/HookThread.py new file mode 100644 index 000000000..fe4a2a651 --- /dev/null +++ b/module/threads/HookThread.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from copy import copy + +from BaseThread import BaseThread + +class HookThread(BaseThread): + """thread for hooks""" + + def __init__(self, m, function, args, kwargs): + """Constructor""" + BaseThread.__init__(self, m) + + self.f = function + self.args = args + self.kwargs = kwargs + + self.active = [] + + m.localThreads.append(self) + + self.start() + + def getActiveFiles(self): + return self.active + + def addActive(self, pyfile): + """ Adds a pyfile to active list and thus will be displayed on overview""" + if pyfile not in self.active: + self.active.append(pyfile) + + def finishFile(self, pyfile): + if pyfile in self.active: + self.active.remove(pyfile) + + pyfile.finishIfDone() + + def run(self): + try: + try: + self.kwargs["thread"] = self + self.f(*self.args, **self.kwargs) + except TypeError, e: + #dirty method to filter out exceptions + if "unexpected keyword argument 'thread'" not in e.args[0]: + raise + + del self.kwargs["thread"] + self.f(*self.args, **self.kwargs) + finally: + local = copy(self.active) + for x in local: + self.finishFile(x) + + self.m.localThreads.remove(self) \ No newline at end of file diff --git a/module/threads/InfoThread.py b/module/threads/InfoThread.py new file mode 100644 index 000000000..4cba7da38 --- /dev/null +++ b/module/threads/InfoThread.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from time import time +from traceback import print_exc + +from module.Api import OnlineStatus +from module.PyFile import PyFile +from module.common.packagetools import parseNames + +from BaseThread import BaseThread + +class InfoThread(BaseThread): + def __init__(self, manager, data, pid=-1, rid=-1, add=False): + """Constructor""" + BaseThread.__init__(self, manager) + + self.data = data + self.pid = pid # package id + # [ .. (name, plugin) .. ] + + self.rid = rid #result id + self.add = add #add packages instead of return result + + self.cache = [] #accumulated data + + self.start() + + def run(self): + """run method""" + + plugins = {} + container = [] + + for url, plugin in self.data: + if plugin in plugins: + plugins[plugin].append(url) + else: + plugins[plugin] = [url] + + + # filter out container plugins + for name in self.m.core.pluginManager.getPlugins("container"): + if name in plugins: + container.extend([(name, url) for url in plugins[name]]) + + del plugins[name] + + #directly write to database + if self.pid > -1: + for pluginname, urls in plugins.iteritems(): + plugin = self.m.core.pluginManager.getPlugin(pluginname, True) + if hasattr(plugin, "getInfo"): + self.fetchForPlugin(pluginname, plugin, urls, self.updateDB) + self.m.core.files.save() + + elif self.add: + for pluginname, urls in plugins.iteritems(): + plugin = self.m.core.pluginManager.getPlugin(pluginname, True) + if hasattr(plugin, "getInfo"): + self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True) + + else: + #generate default result + result = [(url, 0, 3, url) for url in urls] + + self.updateCache(pluginname, result) + + packs = parseNames([(name, url) for name, x, y, url in self.cache]) + + self.m.log.debug("Fetched and generated %d packages" % len(packs)) + + for k, v in packs: + self.m.core.api.addPackage(k, v) + + #empty cache + del self.cache[:] + + else: #post the results + + + for name, url in container: + #attach container content + try: + data = self.decryptContainer(name, url) + except: + print_exc() + self.m.log.error("Could not decrypt container.") + data = [] + + for url, plugin in data: + if plugin in plugins: + plugins[plugin].append(url) + else: + plugins[plugin] = [url] + + self.m.infoResults[self.rid] = {} + + for pluginname, urls in plugins.iteritems(): + plugin = self.m.core.pluginManager.getPlugin(pluginname, True) + if hasattr(plugin, "getInfo"): + self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True) + + #force to process cache + if self.cache: + self.updateResult(pluginname, [], True) + + else: + #generate default result + result = [(url, 0, 3, url) for url in urls] + + self.updateResult(pluginname, result, True) + + self.m.infoResults[self.rid]["ALL_INFO_FETCHED"] = {} + + self.m.timestamp = time() + 5 * 60 + + + def updateDB(self, plugin, result): + self.m.core.files.updateFileInfo(result, self.pid) + + def updateResult(self, plugin, result, force=False): + #parse package name and generate result + #accumulate results + + self.cache.extend(result) + + if len(self.cache) >= 20 or force: + #used for package generating + tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size)))) + for name, size, status, url in self.cache] + + data = parseNames(tmp) + result = {} + for k, v in data.iteritems(): + for url, status in v: + status.packagename = k + result[url] = status + + self.m.setInfoResults(self.rid, result) + + self.cache = [] + + def updateCache(self, plugin, result): + self.cache.extend(result) + + def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None): + try: + result = [] #result loaded from cache + process = [] #urls to process + for url in urls: + if url in self.m.infoCache: + result.append(self.m.infoCache[url]) + else: + process.append(url) + + if result: + self.m.log.debug("Fetched %d values from cache for %s" % (len(result), pluginname)) + cb(pluginname, result) + + if process: + self.m.log.debug("Run Info Fetching for %s" % pluginname) + for result in plugin.getInfo(process): + #result = [ .. (name, size, status, url) .. ] + if not type(result) == list: result = [result] + + for res in result: + self.m.infoCache[res[3]] = res + + cb(pluginname, result) + + self.m.log.debug("Finished Info Fetching for %s" % pluginname) + except Exception, e: + self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") % + {"name": pluginname, "err": str(e)}) + if self.m.core.debug: + print_exc() + + # generate default results + if err: + result = [(url, 0, 3, url) for url in urls] + cb(pluginname, result) + + + def decryptContainer(self, plugin, url): + data = [] + # only works on container plugins + + self.m.log.debug("Pre decrypting %s with %s" % (url, plugin)) + + # dummy pyfile + pyfile = PyFile(self.m.core.files, -1, url, url, 0, 0, "", plugin, -1, -1) + + pyfile.initPlugin() + + # little plugin lifecycle + try: + pyfile.plugin.setup() + pyfile.plugin.loadToDisk() + pyfile.plugin.decrypt(pyfile) + pyfile.plugin.deleteTmp() + + for pack in pyfile.plugin.packages: + pyfile.plugin.urls.extend(pack[1]) + + data, crypter = self.m.core.pluginManager.parseUrls(pyfile.plugin.urls) + + self.m.log.debug("Got %d links." % len(data)) + + except Exception, e: + self.m.log.debug("Pre decrypting error: %s" % str(e)) + finally: + pyfile.release() + + return data diff --git a/module/threads/ThreadManager.py b/module/threads/ThreadManager.py new file mode 100644 index 000000000..c32286eb9 --- /dev/null +++ b/module/threads/ThreadManager.py @@ -0,0 +1,311 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: RaNaN +""" + +from os.path import exists, join +import re +from subprocess import Popen +from threading import Event, Lock +from time import sleep, time +from traceback import print_exc +from random import choice + +import pycurl + +from module.PyFile import PyFile +from module.network.RequestFactory import getURL +from module.utils import lock +from module.utils.fs import free_space + +from DecrypterThread import DecrypterThread +from DownloadThread import DownloadThread +from InfoThread import InfoThread + +class ThreadManager: + """manages the download threads, assign jobs, reconnect etc""" + + + def __init__(self, core): + """Constructor""" + self.core = core + self.log = core.log + + self.threads = [] # thread list + self.localThreads = [] #hook+decrypter threads + + self.pause = True + + self.reconnecting = Event() + self.reconnecting.clear() + self.downloaded = 0 #number of files downloaded since last cleanup + + self.lock = Lock() + + # some operations require to fetch url info from hoster, so we caching them so it wont be done twice + # contains a timestamp and will be purged after timeout + self.infoCache = {} + + # pool of ids for online check + self.resultIDs = 0 + + # threads which are fetching hoster results + self.infoResults = {} + # timeout for cache purge + self.timestamp = 0 + + pycurl.global_init(pycurl.GLOBAL_DEFAULT) + + for i in range(0, self.core.config.get("download", "max_downloads")): + self.createThread() + + + def createThread(self): + """create a download thread""" + + thread = DownloadThread(self) + self.threads.append(thread) + + def createInfoThread(self, data, pid): + """ start a thread whichs fetches online status and other infos """ + self.timestamp = time() + 5 * 60 + + InfoThread(self, data, pid) + + @lock + def createResultThread(self, data, add=False): + """ creates a thread to fetch online status, returns result id """ + self.timestamp = time() + 5 * 60 + + rid = self.resultIDs + self.resultIDs += 1 + + InfoThread(self, data, rid=rid, add=add) + + return rid + + @lock + def createDecryptThread(self, data, pid): + """ Start decrypting of entered data, all links in one package are accumulated to one thread.""" + DecrypterThread(self, data, pid) + + + @lock + def getInfoResult(self, rid): + """returns result and clears it""" + self.timestamp = time() + 5 * 60 + + if rid in self.infoResults: + data = self.infoResults[rid] + self.infoResults[rid] = {} + return data + else: + return {} + + @lock + def setInfoResults(self, rid, result): + self.infoResults[rid].update(result) + + def getActiveFiles(self): + active = [x.active for x in self.threads if x.active and isinstance(x.active, PyFile)] + + for t in self.localThreads: + active.extend(t.getActiveFiles()) + + return active + + def processingIds(self): + """get a id list of all pyfiles processed""" + return [x.id for x in self.getActiveFiles()] + + + def work(self): + """run all task which have to be done (this is for repetivive call by core)""" + try: + self.tryReconnect() + except Exception, e: + self.log.error(_("Reconnect Failed: %s") % str(e) ) + self.reconnecting.clear() + if self.core.debug: + print_exc() + self.checkThreadCount() + + try: + self.assignJob() + except Exception, e: + self.log.warning("Assign job error", e) + if self.core.debug: + print_exc() + + sleep(0.5) + self.assignJob() + #it may be failed non critical so we try it again + + if (self.infoCache or self.infoResults) and self.timestamp < time(): + self.infoCache.clear() + self.infoResults.clear() + self.log.debug("Cleared Result cache") + + def tryReconnect(self): + """checks if reconnect needed""" + + if not (self.core.config["reconnect"]["activated"] and self.core.api.isTimeReconnect()): + return False + + active = [x.active.plugin.wantReconnect and x.active.plugin.waiting for x in self.threads if x.active] + + if not (0 < active.count(True) == len(active)): + return False + + if not exists(self.core.config['reconnect']['method']): + if exists(join(pypath, self.core.config['reconnect']['method'])): + self.core.config['reconnect']['method'] = join(pypath, self.core.config['reconnect']['method']) + else: + self.core.config["reconnect"]["activated"] = False + self.log.warning(_("Reconnect script not found!")) + return + + self.reconnecting.set() + + #Do reconnect + self.log.info(_("Starting reconnect")) + + while [x.active.plugin.waiting for x in self.threads if x.active].count(True) != 0: + sleep(0.25) + + ip = self.getIP() + + self.core.hookManager.beforeReconnecting(ip) + + self.log.debug("Old IP: %s" % ip) + + try: + reconn = Popen(self.core.config['reconnect']['method'], bufsize=-1, shell=True)#, stdout=subprocess.PIPE) + except: + self.log.warning(_("Failed executing reconnect script!")) + self.core.config["reconnect"]["activated"] = False + self.reconnecting.clear() + if self.core.debug: + print_exc() + return + + reconn.wait() + sleep(1) + ip = self.getIP() + self.core.hookManager.afterReconnecting(ip) + + self.log.info(_("Reconnected, new IP: %s") % ip) + + self.reconnecting.clear() + + def getIP(self): + """retrieve current ip""" + services = [("http://automation.whatismyip.com/n09230945.asp", "(\S+)"), + ("http://checkip.dyndns.org/",".*Current IP Address: (\S+).*")] + + ip = "" + for i in range(10): + try: + sv = choice(services) + ip = getURL(sv[0]) + ip = re.match(sv[1], ip).group(1) + break + except: + ip = "" + sleep(1) + + return ip + + def checkThreadCount(self): + """checks if there are need for increasing or reducing thread count""" + + if len(self.threads) == self.core.config.get("download", "max_downloads"): + return True + elif len(self.threads) < self.core.config.get("download", "max_downloads"): + self.createThread() + else: + free = [x for x in self.threads if not x.active] + if free: + free[0].put("quit") + + + def cleanPycurl(self): + """ make a global curl cleanup (currently ununused) """ + if self.processingIds(): + return False + pycurl.global_cleanup() + pycurl.global_init(pycurl.GLOBAL_DEFAULT) + self.downloaded = 0 + self.log.debug("Cleaned up pycurl") + return True + + + def assignJob(self): + """assing a job to a thread if possible""" + + if self.pause or not self.core.api.isTimeDownload(): return + + #if self.downloaded > 20: + # if not self.cleanPyCurl(): return + + free = [x for x in self.threads if not x.active] + + inuse = set([(x.active.pluginname,self.getLimit(x)) for x in self.threads if x.active and x.active.hasPlugin() and x.active.plugin.account]) + inuse = map(lambda x : (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])) ,inuse) + onlimit = [x[0] for x in inuse if 0 < x[1] <= x[2]] + + occ = [x.active.pluginname for x in self.threads if x.active and x.active.hasPlugin() and not x.active.plugin.multiDL] + onlimit + + occ.sort() + occ = tuple(set(occ)) + job = self.core.files.getJob(occ) + if job: + try: + job.initPlugin() + except Exception, e: + self.log.critical(str(e)) + print_exc() + job.setStatus("failed") + job.error = str(e) + job.release() + return + + spaceLeft = free_space(self.core.config["general"]["download_folder"]) / 1024 / 1024 + if spaceLeft < self.core.config["general"]["min_free_space"]: + self.log.warning(_("Not enough space left on device")) + self.pause = True + + if free and not self.pause: + thread = free[0] + #self.downloaded += 1 + thread.put(job) + else: + #put job back + if occ not in self.core.files.jobCache: + self.core.files.jobCache[occ] = [] + self.core.files.jobCache[occ].append(job.id) + + def getLimit(self, thread): + limit = thread.active.plugin.account.options.get("limitDL","0") + if limit == "": limit = "0" + return int(limit) + + + def cleanup(self): + """do global cleanup, should be called when finished with pycurl""" + pycurl.global_cleanup() diff --git a/module/threads/__init__.py b/module/threads/__init__.py new file mode 100644 index 000000000..e69de29bb -- cgit v1.2.3