summaryrefslogtreecommitdiffstats
path: root/module/manager
diff options
context:
space:
mode:
Diffstat (limited to 'module/manager')
-rw-r--r--module/manager/Account.py191
-rw-r--r--module/manager/Addon.py304
-rw-r--r--module/manager/Captcha.py138
-rw-r--r--module/manager/Event.py104
-rw-r--r--module/manager/Plugin.py404
-rw-r--r--module/manager/Remote.py76
-rw-r--r--module/manager/Thread.py302
-rw-r--r--module/manager/__init__.py1
-rw-r--r--module/manager/event/Scheduler.py126
-rw-r--r--module/manager/event/__init__.py1
-rw-r--r--module/manager/thread/Addon.py69
-rw-r--r--module/manager/thread/Decrypter.py101
-rw-r--r--module/manager/thread/Download.py213
-rw-r--r--module/manager/thread/Info.py225
-rw-r--r--module/manager/thread/Plugin.py130
-rw-r--r--module/manager/thread/Server.py111
-rw-r--r--module/manager/thread/__init__.py1
17 files changed, 0 insertions, 2497 deletions
diff --git a/module/manager/Account.py b/module/manager/Account.py
deleted file mode 100644
index 3acf70311..000000000
--- a/module/manager/Account.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-from os.path import exists
-from shutil import copy
-
-from threading import Lock
-
-from pyload.manager.Event import AccountUpdateEvent
-from pyload.utils import chmod, lock
-
-ACC_VERSION = 1
-
-
-class AccountManager(object):
- """manages all accounts"""
-
- #----------------------------------------------------------------------
- def __init__(self, core):
- """Constructor"""
-
- self.core = core
- self.lock = Lock()
-
- self.initPlugins()
- self.saveAccounts() # save to add categories to conf
-
-
- def initPlugins(self):
- self.accounts = {} # key = ( plugin )
- self.plugins = {}
-
- self.initAccountPlugins()
- self.loadAccounts()
-
-
- def getAccountPlugin(self, plugin):
- """get account instance for plugin or None if anonymous"""
- try:
- if plugin in self.accounts:
- if plugin not in self.plugins:
- klass = self.core.pluginManager.loadClass("accounts", plugin)
- if klass:
- self.plugins[plugin] = klass(self, self.accounts[plugin])
- else: #@NOTE: The account class no longer exists (blacklisted plugin). Skipping the account to avoid crash
- raise
-
- return self.plugins[plugin]
- else:
- raise
- except Exception:
- return None
-
-
- def getAccountPlugins(self):
- """ get all account instances"""
-
- plugins = []
- for plugin in self.accounts.keys():
- plugins.append(self.getAccountPlugin(plugin))
-
- return plugins
-
-
- #----------------------------------------------------------------------
- def loadAccounts(self):
- """loads all accounts available"""
-
- try:
- with open("accounts.conf", "a+") as f:
- content = f.readlines()
- version = content[0].split(":")[1].strip() if content else ""
-
- if not version or int(version) < ACC_VERSION:
- copy("accounts.conf", "accounts.backup")
- f.seek(0)
- f.write("version: " + str(ACC_VERSION))
-
- self.core.log.warning(_("Account settings deleted, due to new config format"))
- return
-
- except IOError, e:
- self.core.log.error(str(e))
- return
-
- plugin = ""
- name = ""
-
- for line in content[1:]:
- line = line.strip()
-
- if not line: continue
- if line.startswith("#"): continue
- if line.startswith("version"): continue
-
- if line.endswith(":") and line.count(":") == 1:
- plugin = line[:-1]
- self.accounts[plugin] = {}
-
- elif line.startswith("@"):
- try:
- option = line[1:].split()
- self.accounts[plugin][name]['options'][option[0]] = [] if len(option) < 2 else ([option[1]] if len(option) < 3 else option[1:])
- except Exception:
- pass
-
- elif ":" in line:
- name, sep, pw = line.partition(":")
- self.accounts[plugin][name] = {"password": pw, "options": {}, "valid": True}
-
-
- #----------------------------------------------------------------------
- def saveAccounts(self):
- """save all account information"""
-
- try:
- with open("accounts.conf", "wb") as f:
- f.write("version: " + str(ACC_VERSION) + "\n")
-
- for plugin, accounts in self.accounts.iteritems():
- f.write("\n")
- f.write(plugin + ":\n")
-
- for name,data in accounts.iteritems():
- f.write("\n\t%s:%s\n" % (name,data['password']) )
- if data['options']:
- for option, values in data['options'].iteritems():
- f.write("\t@%s %s\n" % (option, " ".join(values)))
-
- chmod(f.name, 0600)
-
- except Exception, e:
- self.core.log.error(str(e))
-
-
- #----------------------------------------------------------------------
- def initAccountPlugins(self):
- """init names"""
- for name in self.core.pluginManager.getAccountPlugins():
- self.accounts[name] = {}
-
-
- @lock
- def updateAccount(self, plugin , user, password=None, options={}):
- """add or update account"""
- if plugin in self.accounts:
- p = self.getAccountPlugin(plugin)
- updated = p.updateAccounts(user, password, options)
- #since accounts is a ref in plugin self.accounts doesnt need to be updated here
-
- self.saveAccounts()
- if updated: p.scheduleRefresh(user, force=False)
-
-
- @lock
- def removeAccount(self, plugin, user):
- """remove account"""
-
- if plugin in self.accounts:
- p = self.getAccountPlugin(plugin)
- p.removeAccount(user)
-
- self.saveAccounts()
-
-
- @lock
- def getAccountInfos(self, force=True, refresh=False):
- data = {}
-
- if refresh:
- self.core.scheduler.addJob(0, self.core.accountManager.getAccountInfos)
- force = False
-
- for p in self.accounts.keys():
- if self.accounts[p]:
- p = self.getAccountPlugin(p)
- if p:
- data[p.__name__] = p.getAllAccounts(force)
- else: #@NOTE: When an account has been skipped, p is None
- data[p] = []
- else:
- data[p] = []
- e = AccountUpdateEvent()
- self.core.pullManager.addEvent(e)
- return data
-
-
- def sendChange(self):
- e = AccountUpdateEvent()
- self.core.pullManager.addEvent(e)
diff --git a/module/manager/Addon.py b/module/manager/Addon.py
deleted file mode 100644
index d293109c7..000000000
--- a/module/manager/Addon.py
+++ /dev/null
@@ -1,304 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN, mkaay
-# @interface-version: 0.1
-
-import __builtin__
-
-import traceback
-from threading import RLock, Thread
-
-from types import MethodType
-
-from pyload.manager.thread.Addon import AddonThread
-from pyload.manager.Plugin import literal_eval
-from pyload.utils import lock
-
-
-class AddonManager(object):
- """Manages addons, delegates and handles Events.
-
- Every plugin can define events, \
- but some very usefull events are called by the Core.
- Contrary to overwriting addon methods you can use event listener,
- which provides additional entry point in the control flow.
- Only do very short tasks or use threads.
-
- **Known Events:**
- Most addon methods exists as events. These are the additional known events.
-
- ======================= ============== ==================================
- Name Arguments Description
- ======================= ============== ==================================
- download-preparing fid A download was just queued and will be prepared now.
- download-start fid A plugin will immediately starts the download afterwards.
- links-added links, pid Someone just added links, you are able to modify the links.
- all_downloads-processed Every link was handled, pyload would idle afterwards.
- all_downloads-finished Every download in queue is finished.
- config-changed The config was changed via the api.
- pluginConfigChanged The plugin config changed, due to api or internal process.
- ======================= ============== ==================================
-
- | Notes:
- | all_downloads-processed is *always* called before all_downloads-finished.
- | config-changed is *always* called before pluginConfigChanged.
-
-
- """
-
- def __init__(self, core):
- self.core = core
-
- __builtin__.addonManager = self #: needed to let addons register themself
-
- self.plugins = []
- self.pluginMap = {}
- self.methods = {} #: dict of names and list of methods usable by rpc
-
- self.events = {} #: contains events
-
- # registering callback for config event
- self.core.config.pluginCB = MethodType(self.dispatchEvent, "pluginConfigChanged", basestring) #@TODO: Rename event pluginConfigChanged
-
- self.addEvent("pluginConfigChanged", self.manageAddon)
-
- self.lock = RLock()
- self.createIndex()
-
-
- def try_catch(func):
-
- def new(*args):
- try:
- return func(*args)
- except Exception, e:
- args[0].log.error(_("Error executing addon: %s") % e)
- if args[0].core.debug:
- traceback.print_exc()
-
- return new
-
-
- def addRPC(self, plugin, func, doc):
- plugin = plugin.rpartition(".")[2]
- doc = doc.strip() if doc else ""
-
- if plugin in self.methods:
- self.methods[plugin][func] = doc
- else:
- self.methods[plugin] = {func: doc}
-
-
- def callRPC(self, plugin, func, args, parse):
- if not args:
- args = tuple()
- if parse:
- args = tuple([literal_eval(x) for x in args])
- plugin = self.pluginMap[plugin]
- f = getattr(plugin, func)
- return f(*args)
-
-
- def createIndex(self):
- plugins = []
- active = []
- deactive = []
-
- for pluginname in self.core.pluginManager.addonPlugins:
- try:
- # hookClass = getattr(plugin, plugin.__name__)
- if self.core.config.getPlugin(pluginname, "activated"):
- pluginClass = self.core.pluginManager.loadClass("addon", pluginname)
- if not pluginClass:
- continue
-
- plugin = pluginClass(self.core, self)
- plugins.append(plugin)
- self.pluginMap[pluginClass.__name__] = plugin
- if plugin.isActivated():
- active.append(pluginClass.__name__)
- else:
- deactive.append(pluginname)
-
- except Exception:
- self.core.log.warning(_("Failed activating %(name)s") % {"name": pluginname})
- if self.core.debug:
- traceback.print_exc()
-
- self.core.log.info(_("Activated addons: %s") % ", ".join(sorted(active)))
- self.core.log.info(_("Deactivated addons: %s") % ", ".join(sorted(deactive)))
-
- self.plugins = plugins
-
-
- def manageAddon(self, plugin, name, value):
- if name == "activated" and value:
- self.activateAddon(plugin)
-
- elif name == "activated" and not value:
- self.deactivateAddon(plugin)
-
-
- def activateAddon(self, pluginname):
- # check if already loaded
- for inst in self.plugins:
- if inst.__name__ == pluginname:
- return
-
- pluginClass = self.core.pluginManager.loadClass("addon", pluginname)
-
- if not pluginClass:
- return
-
- self.core.log.debug("Activate addon: %s" % pluginname)
-
- addon = pluginClass(self.core, self)
- self.plugins.append(addon)
- self.pluginMap[pluginClass.__name__] = addon
-
- addon.activate()
-
-
- def deactivateAddon(self, pluginname):
- for plugin in self.plugins:
- if plugin.__name__ == pluginname:
- addon = plugin
- break
- else:
- return
-
- self.core.log.debug("Deactivate addon: %s" % pluginname)
-
- addon.deactivate()
-
- #remove periodic call
- self.core.log.debug("Removed callback: %s" % self.core.scheduler.removeJob(addon.cb))
-
- self.plugins.remove(addon)
- del self.pluginMap[addon.__name__]
-
-
- @try_catch
- def coreReady(self):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.activate()
-
- self.dispatchEvent("addon-start")
-
-
- @try_catch
- def coreExiting(self):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.exit()
-
- self.dispatchEvent("addon-exit")
-
-
- @lock
- def downloadPreparing(self, pyfile):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.downloadPreparing(pyfile)
-
- self.dispatchEvent("download-preparing", pyfile)
-
-
- @lock
- def downloadFinished(self, pyfile):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.downloadFinished(pyfile)
-
- self.dispatchEvent("download-finished", pyfile)
-
-
- @lock
- @try_catch
- def downloadFailed(self, pyfile):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.downloadFailed(pyfile)
-
- self.dispatchEvent("download-failed", pyfile)
-
-
- @lock
- def packageFinished(self, package):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.packageFinished(package)
-
- self.dispatchEvent("package-finished", package)
-
-
- @lock
- def beforeReconnecting(self, ip):
- for plugin in self.plugins:
- plugin.beforeReconnecting(ip)
-
- self.dispatchEvent("beforeReconnecting", ip)
-
-
- @lock
- def afterReconnecting(self, ip):
- for plugin in self.plugins:
- if plugin.isActivated():
- plugin.afterReconnecting(ip)
-
- self.dispatchEvent("afterReconnecting", ip)
-
-
- def startThread(self, function, *args, **kwargs):
- return AddonThread(self.core.threadManager, function, args, kwargs)
-
-
- def activePlugins(self):
- """ returns all active plugins """
- return [x for x in self.plugins if x.isActivated()]
-
-
- def getAllInfo(self):
- """returns info stored by addon plugins"""
- info = {}
- for name, plugin in self.pluginMap.iteritems():
- if plugin.info:
- # copy and convert so str
- info[name] = dict(
- [(x, str(y) if not isinstance(y, basestring) else y) for x, y in plugin.info.iteritems()])
- return info
-
-
- def getInfo(self, plugin):
- info = {}
- if plugin in self.pluginMap and self.pluginMap[plugin].info:
- info = dict((x, str(y) if not isinstance(y, basestring) else y)
- for x, y in self.pluginMap[plugin].info.iteritems())
- return info
-
-
- def addEvent(self, event, func):
- """Adds an event listener for event name"""
- if event in self.events:
- self.events[event].append(func)
- else:
- self.events[event] = [func]
-
-
- def removeEvent(self, event, func):
- """removes previously added event listener"""
- if event in self.events:
- self.events[event].remove(func)
-
-
- def dispatchEvent(self, event, *args):
- """dispatches event with args"""
- if event in self.events:
- for f in self.events[event]:
- try:
- f(*args)
- except Exception, e:
- self.core.log.warning("Error calling event handler %s: %s, %s, %s"
- % (event, f, args, str(e)))
- if self.core.debug:
- traceback.print_exc()
diff --git a/module/manager/Captcha.py b/module/manager/Captcha.py
deleted file mode 100644
index e54eacf30..000000000
--- a/module/manager/Captcha.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN, mkaay
-
-from time import time
-from traceback import print_exc
-from threading import Lock
-
-from pyload.utils import encode
-
-
-class CaptchaManager(object):
- def __init__(self, core):
- self.lock = Lock()
- self.core = core
- self.tasks = [] # task store, for outgoing tasks only
- self.ids = 0 # only for internal purpose
-
- def newTask(self, img, format, file, result_type):
- task = CaptchaTask(self.ids, img, format, file, result_type)
- self.ids += 1
- return task
-
- def removeTask(self, task):
- self.lock.acquire()
- if task in self.tasks:
- self.tasks.remove(task)
- self.lock.release()
-
- def getTask(self):
- self.lock.acquire()
- for task in self.tasks:
- if task.status in ("waiting", "shared-user"):
- self.lock.release()
- return task
- self.lock.release()
- return None
-
- def getTaskByID(self, tid):
- self.lock.acquire()
- for task in self.tasks:
- if task.id == str(tid): # task ids are strings
- self.lock.release()
- return task
- self.lock.release()
- return None
-
- def handleCaptcha(self, task, timeout=50):
- cli = self.core.isClientConnected()
-
- if cli: #: client connected -> should solve the captcha
- task.setWaiting(timeout) #wait 50 sec for response
-
- for plugin in self.core.addonManager.activePlugins():
- try:
- plugin.captchaTask(task)
- except Exception:
- if self.core.debug:
- print_exc()
-
- if task.handler or cli: #: the captcha was handled
- self.tasks.append(task)
- return True
- task.error = _("No Client connected for captcha decrypting")
- return False
-
-
-class CaptchaTask(object):
- def __init__(self, id, img, format, file, result_type='textual'):
- self.id = str(id)
- self.captchaImg = img
- self.captchaFormat = format
- self.captchaFile = file
- self.captchaResultType = result_type
- self.handler = [] #: the hook plugins that will take care of the solution
- self.result = None
- self.waitUntil = None
- self.error = None # error message
- self.status = "init"
- self.data = {} # handler can store data here
-
- def getCaptcha(self):
- return self.captchaImg, self.captchaFormat, self.captchaResultType
-
- def setResult(self, text):
- if self.isTextual():
- self.result = text
- if self.isPositional():
- try:
- parts = text.split(',')
- self.result = (int(parts[0]), int(parts[1]))
- except Exception:
- self.result = None
-
- def getResult(self):
- return encode(self.result)
-
- def getStatus(self):
- return self.status
-
- def setWaiting(self, sec):
- """ let the captcha wait secs for the solution """
- self.waitUntil = max(time() + sec, self.waitUntil)
- self.status = "waiting"
-
- def isWaiting(self):
- if self.result or self.error or self.timedOut():
- return False
- else:
- return True
-
- def isTextual(self):
- """ returns if text is written on the captcha """
- return self.captchaResultType == 'textual'
-
- def isPositional(self):
- """ returns if user have to click a specific region on the captcha """
- return self.captchaResultType == 'positional'
-
- def setWatingForUser(self, exclusive):
- if exclusive:
- self.status = "user"
- else:
- self.status = "shared-user"
-
- def timedOut(self):
- return time() > self.waitUntil
-
- def invalid(self):
- """ indicates the captcha was not correct """
- for x in self.handler:
- x.captchaInvalid(self)
-
- def correct(self):
- for x in self.handler:
- x.captchaCorrect(self)
-
- def __str__(self):
- return "<CaptchaTask '%s'>" % self.id
diff --git a/module/manager/Event.py b/module/manager/Event.py
deleted file mode 100644
index 20897290e..000000000
--- a/module/manager/Event.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: mkaay
-
-from time import time
-from pyload.utils import uniqify
-
-class PullManager(object):
- def __init__(self, core):
- self.core = core
- self.clients = []
-
- def newClient(self, uuid):
- self.clients.append(Client(uuid))
-
- def clean(self):
- for n, client in enumerate(self.clients):
- if client.lastActive + 30 < time():
- del self.clients[n]
-
- def getEvents(self, uuid):
- events = []
- validUuid = False
- for client in self.clients:
- if client.uuid == uuid:
- client.lastActive = time()
- validUuid = True
- while client.newEvents():
- events.append(client.popEvent().toList())
- break
- if not validUuid:
- self.newClient(uuid)
- events = [ReloadAllEvent("queue").toList(), ReloadAllEvent("collector").toList()]
- return uniqify(events)
-
- def addEvent(self, event):
- for client in self.clients:
- client.addEvent(event)
-
-class Client(object):
- def __init__(self, uuid):
- self.uuid = uuid
- self.lastActive = time()
- self.events = []
-
- def newEvents(self):
- return len(self.events) > 0
-
- def popEvent(self):
- if not len(self.events):
- return None
- return self.events.pop(0)
-
- def addEvent(self, event):
- self.events.append(event)
-
-class UpdateEvent(object):
- def __init__(self, itype, iid, destination):
- assert itype == "pack" or itype == "file"
- assert destination == "queue" or destination == "collector"
- self.type = itype
- self.id = iid
- self.destination = destination
-
- def toList(self):
- return ["update", self.destination, self.type, self.id]
-
-class RemoveEvent(object):
- def __init__(self, itype, iid, destination):
- assert itype == "pack" or itype == "file"
- assert destination == "queue" or destination == "collector"
- self.type = itype
- self.id = iid
- self.destination = destination
-
- def toList(self):
- return ["remove", self.destination, self.type, self.id]
-
-class InsertEvent(object):
- def __init__(self, itype, iid, after, destination):
- assert itype == "pack" or itype == "file"
- assert destination == "queue" or destination == "collector"
- self.type = itype
- self.id = iid
- self.after = after
- self.destination = destination
-
- def toList(self):
- return ["insert", self.destination, self.type, self.id, self.after]
-
-class ReloadAllEvent(object):
- def __init__(self, destination):
- assert destination == "queue" or destination == "collector"
- self.destination = destination
-
- def toList(self):
- return ["reload", self.destination]
-
-class AccountUpdateEvent(object):
- def toList(self):
- return ["account"]
-
-class ConfigUpdateEvent(object):
- def toList(self):
- return ["config"]
diff --git a/module/manager/Plugin.py b/module/manager/Plugin.py
deleted file mode 100644
index 879430be2..000000000
--- a/module/manager/Plugin.py
+++ /dev/null
@@ -1,404 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-import sys
-
-from itertools import chain
-from os import listdir, makedirs
-from os.path import isdir, isfile, join, exists, abspath
-from sys import version_info
-from traceback import print_exc
-from urllib import unquote
-
-from SafeEval import const_eval as literal_eval
-
-
-class PluginManager(object):
- ROOT = "pyload.plugin."
- USERROOT = "userplugins."
- TYPES = ["account", "addon", "container", "crypter", "hook", "hoster", "internal", "ocr"]
-
- PATTERN = re.compile(r'__pattern__\s*=\s*u?r("|\')([^"\']+)')
- VERSION = re.compile(r'__version__\s*=\s*("|\')([\d.]+)')
- CONFIG = re.compile(r'__config__\s*=\s*\[([^\]]+)', re.M)
- DESC = re.compile(r'__description__\s*=\s*("|"""|\')([^"\']+)')
-
-
- def __init__(self, core):
- self.core = core
-
- self.plugins = {}
- self.createIndex()
-
- #register for import addon
- sys.meta_path.append(self)
-
-
- def loadTypes(self):
- rootdir = join(pypath, "pyload", "plugins")
- userdir = "userplugins"
-
- types = set().union(*[[d for d in listdir(p) if isdir(join(p, d))]
- for p in (rootdir, userdir) if exists(p)])
-
- if not types:
- self.log.critical(_("No plugins found!"))
-
- self.TYPES = list(set(self.TYPES) | types)
-
-
- def createIndex(self):
- """create information for all plugins available"""
-
- sys.path.append(abspath(""))
-
- self.loadTypes()
-
- for type in self.TYPES:
- self.plugins[type] = self.parse(type)
- setattr(self, "%sPlugins" % type, self.plugins[type])
-
- self.plugins['addon'] = self.addonPlugins.update(self.hookPlugins)
-
- self.core.log.debug("Created index of plugins")
-
-
- def parse(self, folder, rootplugins={}):
- """
- returns dict with information
- home contains parsed plugins from pyload.
- """
-
- plugins = {}
-
- if rootplugins:
- try:
- pfolder = join("userplugins", folder)
- if not exists(pfolder):
- makedirs(pfolder)
-
- for ifile in (join("userplugins", "__init__.py"),
- join(pfolder, "__init__.py")):
- if not exists(ifile):
- f = open(ifile, "wb")
- f.close()
-
- except IOError, e:
- self.core.log.critical(str(e))
- return rootplugins
-
- else:
- pfolder = join(pypath, "pyload", "plugins", folder)
-
- for f in listdir(pfolder):
- if (isfile(join(pfolder, f)) and f.endswith(".py") or f.endswith("_25.pyc") or f.endswith(
- "_26.pyc") or f.endswith("_27.pyc")) and not f.startswith("_"):
-
- try:
- with open(join(pfolder, f)) as data:
- content = data.read()
-
- except IOError, e:
- self.core.log.error(str(e))
- continue
-
- if f.endswith("_25.pyc") and version_info[0:2] != (2, 5): #@TODO: Remove in 0.4.10
- continue
-
- elif f.endswith("_26.pyc") and version_info[0:2] != (2, 6): #@TODO: Remove in 0.4.10
- continue
-
- elif f.endswith("_27.pyc") and version_info[0:2] != (2, 7): #@TODO: Remove in 0.4.10
- continue
-
- name = f[:-3]
- if name[-1] == ".":
- name = name[:-4]
-
- version = self.VERSION.findall(content)
- if version:
- version = float(version[0][1])
- else:
- version = 0
-
- if rootplugins and name in rootplugins:
- if rootplugins[name]['version'] >= version:
- continue
-
- plugins[name] = {}
- plugins[name]['version'] = version
-
- module = f.replace(".pyc", "").replace(".py", "")
-
- # the plugin is loaded from user directory
- plugins[name]['user'] = True if rootplugins else False
- plugins[name]['name'] = module
-
- pattern = self.PATTERN.findall(content)
-
- if pattern:
- pattern = pattern[0][1]
-
- try:
- regexp = re.compile(pattern)
- except Exception:
- self.core.log.error(_("%s has a invalid pattern") % name)
- pattern = r'^unmatchable$'
- regexp = re.compile(pattern)
-
- plugins[name]['pattern'] = pattern
- plugins[name]['re'] = regexp
-
- # internals have no config
- if folder == "internal":
- self.core.config.deleteConfig(name)
- continue
-
- config = self.CONFIG.findall(content)
- if config:
- try:
- config = literal_eval(config[0].strip().replace("\n", "").replace("\r", ""))
- desc = self.DESC.findall(content)
- desc = desc[0][1] if desc else ""
-
- if type(config[0]) == tuple:
- config = [list(x) for x in config]
- else:
- config = [list(config)]
-
- if folder not in ("account", "internal") and not [True for item in config if item[0] == "activated"]:
- config.insert(0, ["activated", "bool", "Activated", False if folder in ("addon", "hook") else True])
-
- self.core.config.addPluginConfig(name, config, desc)
- except Exception:
- self.core.log.error("Invalid config in %s: %s" % (name, config))
-
- elif folder in ("addon", "hook"): #force config creation
- desc = self.DESC.findall(content)
- desc = desc[0][1] if desc else ""
- config = (["activated", "bool", "Activated", False],)
-
- try:
- self.core.config.addPluginConfig(name, config, desc)
- except Exception:
- self.core.log.error("Invalid config in %s: %s" % (name, config))
-
- if not rootplugins and plugins: #: Double check
- plugins.update(self.parse(folder, plugins))
-
- return plugins
-
-
- def parseUrls(self, urls):
- """parse plugins for given list of urls"""
-
- last = None
- res = [] #: tupels of (url, plugintype, pluginname)
-
- for url in urls:
- if type(url) not in (str, unicode, buffer):
- continue
-
- url = unquote(url)
-
- if last and last[2]['re'].match(url):
- res.append((url, last[0], last[1]))
- continue
-
- for type in self.TYPES:
- for name, plugin in self.plugins[type]:
-
- m = None
- try:
- if 'pattern' in plugin:
- m = plugin['re'].match(url)
-
- except KeyError:
- self.core.log.error(_("Plugin [%(type)s] %(name)s skipped due broken pattern")
- % {'name': name, 'type': type})
-
- if m:
- res.append((url, type, name))
- last = (type, name, plugin)
- break
- else:
- res.append((url, "internal", "BasePlugin"))
-
- return res
-
-
- def findPlugin(self, type, name):
- if type not in self.plugins:
- return None
-
- elif name not in self.plugins[type]:
- self.core.log.warning(_("Plugin [%(type)s] %(name)s not found | Using plugin: [internal] BasePlugin")
- % {'name': name, 'type': type})
- return self.internalPlugins["BasePlugin"]
-
- else:
- return self.plugins[type][name]
-
-
- def getPlugin(self, type, name, original=False):
- """return plugin module from hoster|decrypter|container"""
- plugin = self.findPlugin(type, name)
-
- if plugin is None:
- return {}
-
- if "new_module" in plugin and not original:
- return plugin['new_module']
- else:
- return self.loadModule(type, name)
-
-
- def getPluginName(self, type, name):
- """ used to obtain new name if other plugin was injected"""
- plugin = self.findPlugin(type, name)
-
- if plugin is None:
- return ""
-
- if "new_name" in plugin:
- return plugin['new_name']
-
- return name
-
-
- def loadModule(self, type, name):
- """ Returns loaded module for plugin
-
- :param type: plugin type, subfolder of pyload.plugins
- :param name:
- """
- plugins = self.plugins[type]
-
- if name in plugins:
- if "module" in plugins[name]:
- return plugins[name]['module']
-
- try:
- module = __import__(self.ROOT + "%s.%s" % (type, plugins[name]['name']), globals(), locals(),
- plugins[name]['name'])
-
- except Exception, e:
- self.core.log.error(_("Error importing plugin: [%(type)s] %(name)s (v%(version).2f) | %(errmsg)s")
- % {'name': name, 'type': type, 'version': plugins[name]['version'], "errmsg": str(e)})
- if self.core.debug:
- print_exc()
-
- else:
- plugins[name]['module'] = module #: cache import, maybe unneeded
-
- self.core.log.debug(_("Loaded plugin: [%(type)s] %(name)s (v%(version).2f)")
- % {'name': name, 'type': type, 'version': plugins[name]['version']})
- return module
-
-
- def loadClass(self, type, name):
- """Returns the class of a plugin with the same name"""
- module = self.loadModule(type, name)
- if module:
- return getattr(module, name)
- else:
- return None
-
-
- def getAccountPlugins(self):
- """return list of account plugin names"""
- return self.accountPlugins.keys()
-
-
- def find_module(self, fullname, path=None):
- #redirecting imports if necesarry
- if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #seperate pyload plugins
- if fullname.startswith(self.USERROOT): user = 1
- else: user = 0 #used as bool and int
-
- split = fullname.split(".")
- if len(split) != 4 - user: return
- type, name = split[2 - user:4 - user]
-
- if type in self.plugins and name in self.plugins[type]:
- #userplugin is a newer version
- if not user and self.plugins[type][name]['user']:
- return self
- #imported from userdir, but pyloads is newer
- if user and not self.plugins[type][name]['user']:
- return self
-
-
- def load_module(self, name, replace=True):
- if name not in sys.modules: #could be already in modules
- if replace:
- if self.ROOT in name:
- newname = name.replace(self.ROOT, self.USERROOT)
- else:
- newname = name.replace(self.USERROOT, self.ROOT)
- else:
- newname = name
-
- base, plugin = newname.rsplit(".", 1)
-
- self.core.log.debug("Redirected import %s -> %s" % (name, newname))
-
- module = __import__(newname, globals(), locals(), [plugin])
- #inject under new an old name
- sys.modules[name] = module
- sys.modules[newname] = module
-
- return sys.modules[name]
-
-
- def reloadPlugins(self, type_plugins):
- """ reload and reindex plugins """
- if not type_plugins:
- return None
-
- self.core.log.debug("Request reload of plugins: %s" % type_plugins)
-
- reloaded = []
-
- as_dict = {}
- for t,n in type_plugins:
- if t in as_dict:
- as_dict[t].append(n)
- else:
- as_dict[t] = [n]
-
- for type in as_dict.iterkeys():
- if type in ("addon", "internal"): #: do not reload them because would cause to much side effects
- self.core.log.debug("Skipping reload for plugin: [%(type)s] %(name)s" % {'name': plugin, 'type': type})
- continue
-
- for plugin in as_dict[type]:
- if plugin in self.plugins[type] and "module" in self.plugins[type][plugin]:
- self.core.log.debug(_("Reloading plugin: [%(type)s] %(name)s") % {'name': plugin, 'type': type})
-
- try:
- reload(self.plugins[type][plugin]['module'])
-
- except Exception, e:
- self.core.log.error(_("Error when reloading plugin: [%(type)s] %(name)s") % {'name': plugin, 'type': type}, e)
- continue
-
- else:
- reloaded.append((type, plugin))
-
- #index creation
- self.plugins[type] = self.parse(type)
- setattr(self, "%sPlugins" % type, self.plugins[type])
-
- if "account" in as_dict: #: accounts needs to be reloaded
- self.core.accountManager.initPlugins()
- self.core.scheduler.addJob(0, self.core.accountManager.getAccountInfos)
-
- return reloaded #: return a list of the plugins successfully reloaded
-
-
- def reloadPlugin(self, type_plugin):
- """ reload and reindex ONE plugin """
- return True if self.reloadPlugins(type_plugin) else False
diff --git a/module/manager/Remote.py b/module/manager/Remote.py
deleted file mode 100644
index 910881164..000000000
--- a/module/manager/Remote.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from threading import Thread
-from traceback import print_exc
-
-class BackendBase(Thread):
- def __init__(self, manager):
- Thread.__init__(self)
- self.m = manager
- self.core = manager.core
- self.enabled = True
- self.running = False
-
- def run(self):
- self.running = True
- try:
- self.serve()
- except Exception, e:
- self.core.log.error(_("Remote backend error: %s") % e)
- if self.core.debug:
- print_exc()
- finally:
- self.running = False
-
- def setup(self, host, port):
- pass
-
- def checkDeps(self):
- return True
-
- def serve(self):
- pass
-
- def shutdown(self):
- pass
-
- def stop(self):
- self.enabled = False# set flag and call shutdowm message, so thread can react
- self.shutdown()
-
-
-class RemoteManager(object):
- available = []
-
- def __init__(self, core):
- self.core = core
- self.backends = []
-
- if self.core.remote:
- self.available.append("ThriftBackend")
-# else:
-# self.available.append("SocketBackend")
-
-
- def startBackends(self):
- host = self.core.config["remote"]["listenaddr"]
- port = self.core.config["remote"]["port"]
-
- for b in self.available:
- klass = getattr(__import__("pyload.remote.%s" % b, globals(), locals(), [b], -1), b)
- backend = klass(self)
- if not backend.checkDeps():
- continue
- try:
- backend.setup(host, port)
- self.core.log.info(_("Starting %(name)s: %(addr)s:%(port)s") % {"name": b, "addr": host, "port": port})
- except Exception, e:
- self.core.log.error(_("Failed loading backend %(name)s | %(error)s") % {"name": b, "error": str(e)})
- if self.core.debug:
- print_exc()
- else:
- backend.start()
- self.backends.append(backend)
-
- port += 1
diff --git a/module/manager/Thread.py b/module/manager/Thread.py
deleted file mode 100644
index 2ace9c789..000000000
--- a/module/manager/Thread.py
+++ /dev/null
@@ -1,302 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from os.path import exists, join
-import re
-from subprocess import Popen
-from threading import Event, Lock
-from time import sleep, time
-from traceback import print_exc
-from random import choice
-
-import pycurl
-
-from pyload.manager.thread.Decrypter import DecrypterThread
-from pyload.manager.thread.Download import DownloadThread
-from pyload.manager.thread.Info import InfoThread
-from pyload.datatype.File import PyFile
-from pyload.network.RequestFactory import getURL
-from pyload.utils import freeSpace, lock
-
-
-class ThreadManager(object):
- """manages the download threads, assign jobs, reconnect etc"""
-
-
- def __init__(self, core):
- """Constructor"""
- self.core = core
-
- self.threads = [] #: thread list
- self.localThreads = [] #: addon+decrypter threads
-
- self.pause = True
-
- self.reconnecting = Event()
- self.reconnecting.clear()
- self.downloaded = 0 #number of files downloaded since last cleanup
-
- self.lock = Lock()
-
- # some operations require to fetch url info from hoster, so we caching them so it wont be done twice
- # contains a timestamp and will be purged after timeout
- self.infoCache = {}
-
- # pool of ids for online check
- self.resultIDs = 0
-
- # threads which are fetching hoster results
- self.infoResults = {}
- #timeout for cache purge
- self.timestamp = 0
-
- pycurl.global_init(pycurl.GLOBAL_DEFAULT)
-
- for i in range(0, self.core.config.get("download", "max_downloads")):
- self.createThread()
-
-
- def createThread(self):
- """create a download thread"""
-
- thread = DownloadThread(self)
- self.threads.append(thread)
-
- def createInfoThread(self, data, pid):
- """
- start a thread whichs fetches online status and other infos
- data = [ .. () .. ]
- """
- self.timestamp = time() + 5 * 60
-
- InfoThread(self, data, pid)
-
- @lock
- def createResultThread(self, data, add=False):
- """ creates a thread to fetch online status, returns result id """
- self.timestamp = time() + 5 * 60
-
- rid = self.resultIDs
- self.resultIDs += 1
-
- InfoThread(self, data, rid=rid, add=add)
-
- return rid
-
-
- @lock
- def getInfoResult(self, rid):
- """returns result and clears it"""
- self.timestamp = time() + 5 * 60
-
- if rid in self.infoResults:
- data = self.infoResults[rid]
- self.infoResults[rid] = {}
- return data
- else:
- return {}
-
- @lock
- def setInfoResults(self, rid, result):
- self.infoResults[rid].update(result)
-
- def getActiveFiles(self):
- active = [x.active for x in self.threads if x.active and isinstance(x.active, PyFile)]
-
- for t in self.localThreads:
- active.extend(t.getActiveFiles())
-
- return active
-
- def processingIds(self):
- """get a id list of all pyfiles processed"""
- return [x.id for x in self.getActiveFiles()]
-
-
- def work(self):
- """run all task which have to be done (this is for repetivive call by core)"""
- try:
- self.tryReconnect()
- except Exception, e:
- self.core.log.error(_("Reconnect Failed: %s") % str(e) )
- self.reconnecting.clear()
- if self.core.debug:
- print_exc()
- self.checkThreadCount()
-
- try:
- self.assignJob()
- except Exception, e:
- self.core.log.warning("Assign job error", e)
- if self.core.debug:
- print_exc()
-
- sleep(0.5)
- self.assignJob()
- #it may be failed non critical so we try it again
-
- if (self.infoCache or self.infoResults) and self.timestamp < time():
- self.infoCache.clear()
- self.infoResults.clear()
- self.core.log.debug("Cleared Result cache")
-
- #--------------------------------------------------------------------------
- def tryReconnect(self):
- """checks if reconnect needed"""
-
- if not (self.core.config["reconnect"]["activated"] and self.core.api.isTimeReconnect()):
- return False
-
- active = [x.active.plugin.wantReconnect and x.active.plugin.waiting for x in self.threads if x.active]
-
- if not (0 < active.count(True) == len(active)):
- return False
-
- if not exists(self.core.config['reconnect']['method']):
- if exists(join(pypath, self.core.config['reconnect']['method'])):
- self.core.config['reconnect']['method'] = join(pypath, self.core.config['reconnect']['method'])
- else:
- self.core.config["reconnect"]["activated"] = False
- self.core.log.warning(_("Reconnect script not found!"))
- return
-
- self.reconnecting.set()
-
- #Do reconnect
- self.core.log.info(_("Starting reconnect"))
-
- while [x.active.plugin.waiting for x in self.threads if x.active].count(True) != 0:
- sleep(0.25)
-
- ip = self.getIP()
-
- self.core.addonManager.beforeReconnecting(ip)
-
- self.core.log.debug("Old IP: %s" % ip)
-
- try:
- reconn = Popen(self.core.config['reconnect']['method'], bufsize=-1, shell=True)#, stdout=subprocess.PIPE)
- except Exception:
- self.core.log.warning(_("Failed executing reconnect script!"))
- self.core.config["reconnect"]["activated"] = False
- self.reconnecting.clear()
- if self.core.debug:
- print_exc()
- return
-
- reconn.wait()
- sleep(1)
- ip = self.getIP()
- self.core.addonManager.afterReconnecting(ip)
-
- self.core.log.info(_("Reconnected, new IP: %s") % ip)
-
- self.reconnecting.clear()
-
- def getIP(self):
- """retrieve current ip"""
- services = [("http://automation.whatismyip.com/n09230945.asp", "(\S+)"),
- ("http://checkip.dyndns.org/",".*Current IP Address: (\S+)</body>.*")]
-
- ip = ""
- for i in range(10):
- try:
- sv = choice(services)
- ip = getURL(sv[0])
- ip = re.match(sv[1], ip).group(1)
- break
- except Exception:
- ip = ""
- sleep(1)
-
- return ip
-
- #--------------------------------------------------------------------------
- def checkThreadCount(self):
- """checks if there are need for increasing or reducing thread count"""
-
- if len(self.threads) == self.core.config.get("download", "max_downloads"):
- return True
- elif len(self.threads) < self.core.config.get("download", "max_downloads"):
- self.createThread()
- else:
- free = [x for x in self.threads if not x.active]
- if free:
- free[0].put("quit")
-
-
- def cleanPycurl(self):
- """ make a global curl cleanup (currently ununused) """
- if self.processingIds():
- return False
- pycurl.global_cleanup()
- pycurl.global_init(pycurl.GLOBAL_DEFAULT)
- self.downloaded = 0
- self.core.log.debug("Cleaned up pycurl")
- return True
-
- #--------------------------------------------------------------------------
- def assignJob(self):
- """assing a job to a thread if possible"""
-
- if self.pause or not self.core.api.isTimeDownload(): return
-
- #if self.downloaded > 20:
- # if not self.cleanPyCurl(): return
-
- free = [x for x in self.threads if not x.active]
-
- inuse = set([(x.active.pluginname, self.getLimit(x)) for x in self.threads if x.active and x.active.hasPlugin() and x.active.plugin.account])
- inuse = map(lambda x: (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])) ,inuse)
- onlimit = [x[0] for x in inuse if x[1] > 0 and x[2] >= x[1]]
-
- occ = [x.active.pluginname for x in self.threads if x.active and x.active.hasPlugin() and not x.active.plugin.multiDL] + onlimit
-
- occ.sort()
- occ = tuple(set(occ))
- job = self.core.files.getJob(occ)
- if job:
- try:
- job.initPlugin()
- except Exception, e:
- self.core.log.critical(str(e))
- print_exc()
- job.setStatus("failed")
- job.error = str(e)
- job.release()
- return
-
- if job.plugin.__type__ == "hoster":
- spaceLeft = freeSpace(self.core.config["general"]["download_folder"]) / 1024 / 1024
- if spaceLeft < self.core.config["general"]["min_free_space"]:
- self.core.log.warning(_("Not enough space left on device"))
- self.pause = True
-
- if free and not self.pause:
- thread = free[0]
- #self.downloaded += 1
-
- thread.put(job)
- else:
- #put job back
- if occ not in self.core.files.jobCache:
- self.core.files.jobCache[occ] = []
- self.core.files.jobCache[occ].append(job.id)
-
- #check for decrypt jobs
- job = self.core.files.getDecryptJob()
- if job:
- job.initPlugin()
- thread = DecrypterThread(self, job)
-
-
- else:
- thread = DecrypterThread(self, job)
-
- def getLimit(self, thread):
- limit = thread.active.plugin.account.getAccountData(thread.active.plugin.user)["options"].get("limitDL", ["0"])[0]
- return int(limit)
-
- def cleanup(self):
- """do global cleanup, should be called when finished with pycurl"""
- pycurl.global_cleanup()
diff --git a/module/manager/__init__.py b/module/manager/__init__.py
deleted file mode 100644
index 40a96afc6..000000000
--- a/module/manager/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# -*- coding: utf-8 -*-
diff --git a/module/manager/event/Scheduler.py b/module/manager/event/Scheduler.py
deleted file mode 100644
index fd428a956..000000000
--- a/module/manager/event/Scheduler.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: mkaay
-
-from time import time
-from heapq import heappop, heappush
-from threading import Lock, Thread
-
-class AlreadyCalled(Exception):
- pass
-
-
-class Deferred(object):
- def __init__(self):
- self.call = []
- self.result = ()
-
- def addCallback(self, f, *cargs, **ckwargs):
- self.call.append((f, cargs, ckwargs))
-
- def callback(self, *args, **kwargs):
- if self.result:
- raise AlreadyCalled
- self.result = (args, kwargs)
- for f, cargs, ckwargs in self.call:
- args += tuple(cargs)
- kwargs.update(ckwargs)
- f(*args ** kwargs)
-
-
-class Scheduler(object):
- def __init__(self, core):
- self.core = core
-
- self.queue = PriorityQueue()
-
- def addJob(self, t, call, args=[], kwargs={}, threaded=True):
- d = Deferred()
- t += time()
- j = Job(t, call, args, kwargs, d, threaded)
- self.queue.put((t, j))
- return d
-
-
- def removeJob(self, d):
- """
- :param d: defered object
- :return: if job was deleted
- """
- index = -1
-
- for i, j in enumerate(self.queue):
- if j[1].deferred == d:
- index = i
-
- if index >= 0:
- del self.queue[index]
- return True
-
- return False
-
- def work(self):
- while True:
- t, j = self.queue.get()
- if not j:
- break
- else:
- if t <= time():
- j.start()
- else:
- self.queue.put((t, j))
- break
-
-
-class Job(object):
- def __init__(self, time, call, args=[], kwargs={}, deferred=None, threaded=True):
- self.time = float(time)
- self.call = call
- self.args = args
- self.kwargs = kwargs
- self.deferred = deferred
- self.threaded = threaded
-
- def run(self):
- ret = self.call(*self.args, **self.kwargs)
- if self.deferred is None:
- return
- else:
- self.deferred.callback(ret)
-
- def start(self):
- if self.threaded:
- t = Thread(target=self.run)
- t.setDaemon(True)
- t.start()
- else:
- self.run()
-
-
-class PriorityQueue(object):
- """ a non blocking priority queue """
-
- def __init__(self):
- self.queue = []
- self.lock = Lock()
-
- def __iter__(self):
- return iter(self.queue)
-
- def __delitem__(self, key):
- del self.queue[key]
-
- def put(self, element):
- self.lock.acquire()
- heappush(self.queue, element)
- self.lock.release()
-
- def get(self):
- """ return element or None """
- self.lock.acquire()
- try:
- el = heappop(self.queue)
- return el
- except IndexError:
- return None, None
- finally:
- self.lock.release()
diff --git a/module/manager/event/__init__.py b/module/manager/event/__init__.py
deleted file mode 100644
index 40a96afc6..000000000
--- a/module/manager/event/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# -*- coding: utf-8 -*-
diff --git a/module/manager/thread/Addon.py b/module/manager/thread/Addon.py
deleted file mode 100644
index 7feec227e..000000000
--- a/module/manager/thread/Addon.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.manager.thread.Plugin import PluginThread
-
-
-class AddonThread(PluginThread):
- """thread for addons"""
-
- #--------------------------------------------------------------------------
- def __init__(self, m, function, args, kwargs):
- """Constructor"""
- PluginThread.__init__(self, m)
-
- self.f = function
- self.args = args
- self.kwargs = kwargs
-
- self.active = []
-
- m.localThreads.append(self)
-
- self.start()
-
- def getActiveFiles(self):
- return self.active
-
- def addActive(self, pyfile):
- """ Adds a pyfile to active list and thus will be displayed on overview"""
- if pyfile not in self.active:
- self.active.append(pyfile)
-
- def finishFile(self, pyfile):
- if pyfile in self.active:
- self.active.remove(pyfile)
-
- pyfile.finishIfDone()
-
- def run(self):
- try:
- try:
- self.kwargs["thread"] = self
- self.f(*self.args, **self.kwargs)
- except TypeError, e:
- #dirty method to filter out exceptions
- if "unexpected keyword argument 'thread'" not in e.args[0]:
- raise
-
- del self.kwargs["thread"]
- self.f(*self.args, **self.kwargs)
- finally:
- local = copy(self.active)
- for x in local:
- self.finishFile(x)
-
- self.m.localThreads.remove(self)
diff --git a/module/manager/thread/Decrypter.py b/module/manager/thread/Decrypter.py
deleted file mode 100644
index 51544d1b9..000000000
--- a/module/manager/thread/Decrypter.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.manager.thread.Plugin import PluginThread
-from pyload.plugin.Plugin import Abort, Fail, Retry
-
-
-class DecrypterThread(PluginThread):
- """thread for decrypting"""
-
- def __init__(self, manager, pyfile):
- """constructor"""
- PluginThread.__init__(self, manager)
-
- self.active = pyfile
- manager.localThreads.append(self)
-
- pyfile.setStatus("decrypting")
-
- self.start()
-
- def getActiveFiles(self):
- return [self.active]
-
- def run(self):
- """run method"""
-
- pyfile = self.active
- retry = False
-
- try:
- self.m.log.info(_("Decrypting starts: %s") % pyfile.name)
- pyfile.error = ""
- pyfile.plugin.preprocessing(self)
-
- except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
- return
-
- except Fail, e:
- msg = e.args[0]
-
- if msg == "offline":
- pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
- else:
- pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
- pyfile.error = msg
-
- if self.m.core.debug:
- print_exc()
- return
-
- except Abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- pyfile.setStatus("aborted")
-
- if self.m.core.debug:
- print_exc()
- return
-
- except Retry:
- self.m.log.info(_("Retrying %s") % pyfile.name)
- retry = True
- return self.run()
-
- except Exception, e:
- pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
- pyfile.error = str(e)
-
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- return
-
- finally:
- if not retry:
- pyfile.release()
- self.active = False
- self.m.core.files.save()
- self.m.localThreads.remove(self)
- exc_clear()
-
- if not retry:
- pyfile.delete()
diff --git a/module/manager/thread/Download.py b/module/manager/thread/Download.py
deleted file mode 100644
index c7d21a4ba..000000000
--- a/module/manager/thread/Download.py
+++ /dev/null
@@ -1,213 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.manager.thread.Plugin import PluginThread
-from pyload.plugin.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload
-
-
-class DownloadThread(PluginThread):
- """thread for downloading files from 'real' hoster plugins"""
-
- #--------------------------------------------------------------------------
- def __init__(self, manager):
- """Constructor"""
- PluginThread.__init__(self, manager)
-
- self.queue = Queue() #: job queue
- self.active = False
-
- self.start()
-
- #--------------------------------------------------------------------------
- def run(self):
- """run method"""
- pyfile = None
-
- while True:
- del pyfile
- self.active = self.queue.get()
- pyfile = self.active
-
- if self.active == "quit":
- self.active = False
- self.m.threads.remove(self)
- return True
-
- try:
- if not pyfile.hasPlugin():
- continue
- #this pyfile was deleted while queueing
-
- pyfile.plugin.checkForSameFiles(starting=True)
- self.m.log.info(_("Download starts: %s" % pyfile.name))
-
- # start download
- self.m.core.addonManager.downloadPreparing(pyfile)
- pyfile.error = ""
- pyfile.plugin.preprocessing(self)
-
- self.m.log.info(_("Download finished: %s") % pyfile.name)
- self.m.core.addonManager.downloadFinished(pyfile)
- self.m.core.files.checkPackageFinished(pyfile)
-
- except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
- pyfile.setStatus("failed")
- pyfile.error = "Plugin does not work"
- self.clean(pyfile)
- continue
-
- except Abort:
- try:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- except Exception:
- pass
-
- pyfile.setStatus("aborted")
-
- if self.m.core.debug:
- print_exc()
-
- self.clean(pyfile)
- continue
-
- except Reconnect:
- self.queue.put(pyfile)
- #pyfile.req.clearCookies()
-
- while self.m.reconnecting.isSet():
- sleep(0.5)
-
- continue
-
- except Retry, e:
- reason = e.args[0]
- self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
- self.queue.put(pyfile)
- continue
-
- except Fail, e:
- msg = e.args[0]
-
- if msg == "offline":
- pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
- elif msg == "temp. offline":
- pyfile.setStatus("temp. offline")
- self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name)
- else:
- pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
- pyfile.error = msg
-
- if self.m.core.debug:
- print_exc()
-
- self.m.core.addonManager.downloadFailed(pyfile)
- self.clean(pyfile)
- continue
-
- except error, e:
- if len(e.args) == 2:
- code, msg = e.args
- else:
- code = 0
- msg = e.args
-
- self.m.log.debug("pycurl exception %s: %s" % (code, msg))
-
- if code in (7, 18, 28, 52, 56):
- self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
- wait = time() + 60
-
- pyfile.waitUntil = wait
- pyfile.setStatus("waiting")
- while time() < wait:
- sleep(1)
- if pyfile.abort:
- break
-
- if pyfile.abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- pyfile.setStatus("aborted")
-
- self.clean(pyfile)
- else:
- self.queue.put(pyfile)
-
- continue
-
- else:
- pyfile.setStatus("failed")
- self.m.log.error("pycurl error %s: %s" % (code, msg))
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- self.m.core.addonManager.downloadFailed(pyfile)
-
- self.clean(pyfile)
- continue
-
- except SkipDownload, e:
- pyfile.setStatus("skipped")
-
- self.m.log.info(
- _("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
-
- self.clean(pyfile)
-
- self.m.core.files.checkPackageFinished(pyfile)
-
- self.active = False
- self.m.core.files.save()
-
- continue
-
-
- except Exception, e:
- pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
- pyfile.error = str(e)
-
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- self.m.core.addonManager.downloadFailed(pyfile)
- self.clean(pyfile)
- continue
-
- finally:
- self.m.core.files.save()
- pyfile.checkIfProcessed()
- exc_clear()
-
- #pyfile.plugin.req.clean()
-
- self.active = False
- pyfile.finishIfDone()
- self.m.core.files.save()
-
-
- def put(self, job):
- """assing job to thread"""
- self.queue.put(job)
-
-
- def stop(self):
- """stops the thread"""
- self.put("quit")
diff --git a/module/manager/thread/Info.py b/module/manager/thread/Info.py
deleted file mode 100644
index 4526a07ed..000000000
--- a/module/manager/thread/Info.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.datatype.File import PyFile
-from pyload.manager.thread.Plugin import PluginThread
-from pyload.api import OnlineStatus
-
-
-class InfoThread(PluginThread):
-
- def __init__(self, manager, data, pid=-1, rid=-1, add=False):
- """Constructor"""
- PluginThread.__init__(self, manager)
-
- self.data = data
- self.pid = pid # package id
- # [ .. (name, plugin) .. ]
-
- self.rid = rid #result id
- self.add = add #add packages instead of return result
-
- self.cache = [] #accumulated data
-
- self.start()
-
- def run(self):
- """run method"""
-
- plugins = {}
- container = []
-
- for url, plugintype, pluginname in data:
- try:
- plugins[plugintype][pluginname].append(url)
- except Exception:
- plugins[plugintype][pluginname] = [url]
-
- # filter out container plugins
- for name in self.m.core.pluginManager.containerPlugins:
- if name in plugins:
- container.extend([(name, url) for url in plugins[name]])
-
- del plugins[name]
-
- #directly write to database
- if self.pid > -1:
- for plugintype, pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateDB)
- self.m.core.files.save()
-
- elif self.add:
- for plugintype, pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True)
-
- else:
- #generate default result
- result = [(url, 0, 3, url) for url in urls]
-
- self.updateCache(pluginname, result)
-
- packs = parseNames([(name, url) for name, x, y, url in self.cache])
-
- self.m.log.debug("Fetched and generated %d packages" % len(packs))
-
- for k, v in packs:
- self.m.core.api.addPackage(k, v)
-
- #empty cache
- del self.cache[:]
-
- else: #post the results
-
-
- for name, url in container:
- #attach container content
- try:
- data = self.decryptContainer(name, url)
- except Exception:
- print_exc()
- self.m.log.error("Could not decrypt container.")
- data = []
-
- for url, plugintype, pluginname in data:
- try:
- plugins[plugintype][pluginname].append(url)
- except Exception:
- plugins[plugintype][pluginname] = [url]
-
- self.m.infoResults[self.rid] = {}
-
- for plugintype, pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True)
-
- #force to process cache
- if self.cache:
- self.updateResult(pluginname, [], True)
-
- else:
- #generate default result
- result = [(url, 0, 3, url) for url in urls]
-
- self.updateResult(pluginname, result, True)
-
- self.m.infoResults[self.rid]["ALL_INFO_FETCHED"] = {}
-
- self.m.timestamp = time() + 5 * 60
-
-
- def updateDB(self, plugin, result):
- self.m.core.files.updateFileInfo(result, self.pid)
-
- def updateResult(self, plugin, result, force=False):
- #parse package name and generate result
- #accumulate results
-
- self.cache.extend(result)
-
- if len(self.cache) >= 20 or force:
- #used for package generating
- tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size))))
- for name, size, status, url in self.cache]
-
- data = parseNames(tmp)
- result = {}
- for k, v in data.iteritems():
- for url, status in v:
- status.packagename = k
- result[url] = status
-
- self.m.setInfoResults(self.rid, result)
-
- self.cache = []
-
- def updateCache(self, plugin, result):
- self.cache.extend(result)
-
- def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None):
- try:
- result = [] #result loaded from cache
- process = [] #urls to process
- for url in urls:
- if url in self.m.infoCache:
- result.append(self.m.infoCache[url])
- else:
- process.append(url)
-
- if result:
- self.m.log.debug("Fetched %d values from cache for %s" % (len(result), pluginname))
- cb(pluginname, result)
-
- if process:
- self.m.log.debug("Run Info Fetching for %s" % pluginname)
- for result in plugin.getInfo(process):
- #result = [ .. (name, size, status, url) .. ]
- if not type(result) == list:
- result = [result]
-
- for res in result:
- self.m.infoCache[res[3]] = res #: why don't assign res dict directly?
-
- cb(pluginname, result)
-
- self.m.log.debug("Finished Info Fetching for %s" % pluginname)
- except Exception, e:
- self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") %
- {"name": pluginname, "err": str(e)})
- if self.m.core.debug:
- print_exc()
-
- # generate default results
- if err:
- result = [(url, 0, 3, url) for url in urls]
- cb(pluginname, result)
-
-
- def decryptContainer(self, plugin, url):
- data = []
- # only works on container plugins
-
- self.m.log.debug("Pre decrypting %s with %s" % (url, plugin))
-
- # dummy pyfile
- pyfile = PyFile(self.m.core.files, -1, url, url, 0, 0, "", plugin, -1, -1)
-
- pyfile.initPlugin()
-
- # little plugin lifecycle
- try:
- pyfile.plugin.setup()
- pyfile.plugin.loadToDisk()
- pyfile.plugin.decrypt(pyfile)
- pyfile.plugin.deleteTmp()
-
- for pack in pyfile.plugin.packages:
- pyfile.plugin.urls.extend(pack[1])
-
- data = self.m.core.pluginManager.parseUrls(pyfile.plugin.urls)
-
- self.m.log.debug("Got %d links." % len(data))
-
- except Exception, e:
- self.m.log.debug("Pre decrypting error: %s" % str(e))
- finally:
- pyfile.release()
-
- return data
diff --git a/module/manager/thread/Plugin.py b/module/manager/thread/Plugin.py
deleted file mode 100644
index 20d57c933..000000000
--- a/module/manager/thread/Plugin.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# -*- coding: utf-8 -*-
-# @author: RaNaN
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from pyload.datatype.File import PyFile
-from pyload.plugin.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload
-from pyload.utils.packagetools import parseNames
-from pyload.utils import safe_join
-from pyload.api import OnlineStatus
-
-class PluginThread(Thread):
- """abstract base class for thread types"""
-
- #--------------------------------------------------------------------------
- def __init__(self, manager):
- """Constructor"""
- Thread.__init__(self)
- self.setDaemon(True)
- self.m = manager #thread manager
-
-
- def writeDebugReport(self, pyfile):
- """ writes a
- :return:
- """
-
- dump_name = "debug_%s_%s.zip" % (pyfile.pluginname, strftime("%d-%m-%Y_%H-%M-%S"))
- dump = self.getDebugDump(pyfile)
-
- try:
- import zipfile
-
- zip = zipfile.ZipFile(dump_name, "w")
-
- for f in listdir(join("tmp", pyfile.pluginname)):
- try:
- # avoid encoding errors
- zip.write(join("tmp", pyfile.pluginname, f), safe_join(pyfile.pluginname, f))
- except Exception:
- pass
-
- info = zipfile.ZipInfo(safe_join(pyfile.pluginname, "debug_Report.txt"), gmtime())
- info.external_attr = 0644 << 16L # change permissions
-
- zip.writestr(info, dump)
- zip.close()
-
- if not stat(dump_name).st_size:
- raise Exception("Empty Zipfile")
-
- except Exception, e:
- self.m.log.debug("Error creating zip file: %s" % e)
-
- dump_name = dump_name.replace(".zip", ".txt")
- f = open(dump_name, "wb")
- f.write(dump)
- f.close()
-
- self.m.core.log.info("Debug Report written to %s" % dump_name)
-
- def getDebugDump(self, pyfile):
- dump = "pyLoad %s Debug Report of %s %s \n\nTRACEBACK:\n %s \n\nFRAMESTACK:\n" % (
- self.m.core.api.getServerVersion(), pyfile.pluginname, pyfile.plugin.__version__, format_exc())
-
- tb = exc_info()[2]
- stack = []
- while tb:
- stack.append(tb.tb_frame)
- tb = tb.tb_next
-
- for frame in stack[1:]:
- dump += "\nFrame %s in %s at line %s\n" % (frame.f_code.co_name,
- frame.f_code.co_filename,
- frame.f_lineno)
-
- for key, value in frame.f_locals.items():
- dump += "\t%20s = " % key
- try:
- dump += pformat(value) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- del frame
-
- del stack #delete it just to be sure...
-
- dump += "\n\nPLUGIN OBJECT DUMP: \n\n"
-
- for name in dir(pyfile.plugin):
- attr = getattr(pyfile.plugin, name)
- if not name.endswith("__") and type(attr) != MethodType:
- dump += "\t%20s = " % name
- try:
- dump += pformat(attr) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- dump += "\nPYFILE OBJECT DUMP: \n\n"
-
- for name in dir(pyfile):
- attr = getattr(pyfile, name)
- if not name.endswith("__") and type(attr) != MethodType:
- dump += "\t%20s = " % name
- try:
- dump += pformat(attr) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- if pyfile.pluginname in self.m.core.config.plugin:
- dump += "\n\nCONFIG: \n\n"
- dump += pformat(self.m.core.config.plugin[pyfile.pluginname]) + "\n"
-
- return dump
-
- def clean(self, pyfile):
- """ set thread unactive and release pyfile """
- self.active = False
- pyfile.release()
diff --git a/module/manager/thread/Server.py b/module/manager/thread/Server.py
deleted file mode 100644
index f3f174e74..000000000
--- a/module/manager/thread/Server.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-from os.path import exists
-
-import os
-import threading
-import logging
-
-core = None
-setup = None
-log = logging.getLogger("log")
-
-class WebServer(threading.Thread):
- def __init__(self, pycore):
- global core
- threading.Thread.__init__(self)
- self.core = pycore
- core = pycore
- self.running = True
- self.server = pycore.config['webinterface']['server']
- self.https = pycore.config['webinterface']['https']
- self.cert = pycore.config["ssl"]["cert"]
- self.key = pycore.config["ssl"]["key"]
- self.host = pycore.config['webinterface']['host']
- self.port = pycore.config['webinterface']['port']
-
- self.setDaemon(True)
-
- def run(self):
- import pyload.webui as webinterface
- global webinterface
-
- reset = False
-
- if self.https and (not exists(self.cert) or not exists(self.key)):
- log.warning(_("SSL certificates not found."))
- self.https = False
-
- if self.server in ("lighttpd", "nginx"):
- log.warning(_("Sorry, we dropped support for starting %s directly within pyLoad") % self.server)
- log.warning(_("You can use the threaded server which offers good performance and ssl,"))
- log.warning(_("of course you can still use your existing %s with pyLoads fastcgi server") % self.server)
- log.warning(_("sample configs are located in the pyload/web/servers directory"))
- reset = True
- elif self.server == "fastcgi":
- try:
- import flup
- except Exception:
- log.warning(_("Can't use %(server)s, python-flup is not installed!") % {
- "server": self.server})
- reset = True
-
- if reset or self.server == "lightweight":
- if os.name != "nt":
- try:
- import bjoern
- except Exception, e:
- log.error(_("Error importing lightweight server: %s") % e)
- log.warning(_("You need to download and compile bjoern, https://github.com/jonashaag/bjoern"))
- log.warning(_("Copy the boern.so to the lib folder or use setup.py install"))
- log.warning(_("Of course you need to be familiar with linux and know how to compile software"))
- self.server = "builtin"
- else:
- self.core.log.info(_("Server set to threaded, due to known performance problems on windows."))
- self.core.config['webinterface']['server'] = "threaded"
- self.server = "threaded"
-
- if self.server == "threaded":
- self.start_threaded()
- elif self.server == "fastcgi":
- self.start_fcgi()
- elif self.server == "lightweight":
- self.start_lightweight()
- else:
- self.start_builtin()
-
- def start_builtin(self):
-
- if self.https:
- log.warning(_("This server offers no SSL, please consider using threaded instead"))
-
- self.core.log.info(_("Starting builtin webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
- webinterface.run_simple(host=self.host, port=self.port)
-
- def start_threaded(self):
- if self.https:
- self.core.log.info(_("Starting threaded SSL webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
- else:
- self.cert = ""
- self.key = ""
- self.core.log.info(_("Starting threaded webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
-
- webinterface.run_threaded(host=self.host, port=self.port, cert=self.cert, key=self.key)
-
- def start_fcgi(self):
-
- self.core.log.info(_("Starting fastcgi server: %(host)s:%(port)d") % {"host": self.host, "port": self.port})
- webinterface.run_fcgi(host=self.host, port=self.port)
-
-
- def start_lightweight(self):
- if self.https:
- log.warning(_("This server offers no SSL, please consider using threaded instead"))
-
- self.core.log.info(_("Starting lightweight webserver (bjoern): %(host)s:%(port)d") % {"host": self.host, "port": self.port})
- webinterface.run_lightweight(host=self.host, port=self.port)
-
- def quit(self):
- self.running = False
diff --git a/module/manager/thread/__init__.py b/module/manager/thread/__init__.py
deleted file mode 100644
index 40a96afc6..000000000
--- a/module/manager/thread/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# -*- coding: utf-8 -*-