summaryrefslogtreecommitdiffstats
path: root/pyload/manager
diff options
context:
space:
mode:
Diffstat (limited to 'pyload/manager')
-rw-r--r--pyload/manager/Addon.py2
-rw-r--r--pyload/manager/Plugin.py64
-rw-r--r--pyload/manager/Thread.py30
-rw-r--r--pyload/manager/event/Scheduler.py2
-rw-r--r--pyload/manager/thread/Decrypter.py18
-rw-r--r--pyload/manager/thread/Download.py39
-rw-r--r--pyload/manager/thread/Info.py70
7 files changed, 99 insertions, 126 deletions
diff --git a/pyload/manager/Addon.py b/pyload/manager/Addon.py
index 26a2735cf..9ce3a4f8f 100644
--- a/pyload/manager/Addon.py
+++ b/pyload/manager/Addon.py
@@ -121,7 +121,7 @@ class AddonManager(object):
except Exception:
self.core.log.warning(_("Failed activating %(name)s") % {"name": pluginname})
- if self.core.debug or True:
+ if self.core.debug:
traceback.print_exc()
self.core.log.info(_("Activated %ss: %s") % (type, ", ".join(sorted(active))))
diff --git a/pyload/manager/Plugin.py b/pyload/manager/Plugin.py
index 739fa3538..918f6de8a 100644
--- a/pyload/manager/Plugin.py
+++ b/pyload/manager/Plugin.py
@@ -25,30 +25,27 @@ class PluginManager(object):
CONFIG = re.compile(r'__config\s*=\s*\[([^\]]+)', re.M)
DESC = re.compile(r'__description\s*=\s*("|"""|\')([^"\']+)')
-
def __init__(self, core):
self.core = core
self.plugins = {}
self.createIndex()
- #register for import addon
+ # register for import addon
sys.meta_path.append(self)
-
def loadTypes(self):
rootdir = join(pypath, "pyload", "plugin")
userdir = "userplugins"
types = set().union(*[[d for d in listdir(p) if isdir(join(p, d))]
- for p in (rootdir, userdir) if exists(p)])
+ for p in (rootdir, userdir) if exists(p)])
if not types:
self.core.log.critical(_("No plugins found!"))
self.TYPES = list(set(self.TYPES) | types)
-
def createIndex(self):
"""create information for all plugins available"""
@@ -67,7 +64,6 @@ class PluginManager(object):
self.core.log.debug("Created index of plugins")
-
def parse(self, folder, rootplugins={}):
"""
returns dict with information
@@ -168,7 +164,7 @@ class PluginManager(object):
except Exception:
self.core.log.error("Invalid config in %s: %s" % (name, config))
- elif folder in ("addon", "hook"): #force config creation
+ elif folder in ("addon", "hook"): # force config creation
desc = self.DESC.findall(content)
desc = desc[0][1] if desc else ""
config = (["activated", "bool", "Activated", False],)
@@ -183,7 +179,6 @@ class PluginManager(object):
return plugins
-
def parseUrls(self, urls):
"""parse plugins for given list of urls"""
@@ -200,28 +195,28 @@ class PluginManager(object):
res.append((url, last[0], last[1]))
continue
- for type in self.TYPES:
- for name, plugin in self.plugins[type]:
-
- m = None
+ for plugintype in self.TYPES:
+ m = None
+ for name, plugin in self.plugins[plugintype].iteritems():
try:
if 'pattern' in plugin:
m = plugin['re'].match(url)
except KeyError:
self.core.log.error(_("Plugin [%(type)s] %(name)s skipped due broken pattern")
- % {'name': name, 'type': type})
+ % {'name': plugin['name'], 'type': plugintype})
if m:
- res.append((url, type, name))
- last = (type, name, plugin)
+ res.append((url, plugintype, name))
+ last = (plugintype, name, plugin)
break
- else:
- res.append((url, "internal", "BasePlugin"))
-
+ if m:
+ break
+ else:
+ res.append((url, "internal", "BasePlugin"))
+ print res
return res
-
def findPlugin(self, type, name):
if type not in self.plugins:
return None
@@ -234,7 +229,6 @@ class PluginManager(object):
else:
return self.plugins[type][name]
-
def getPlugin(self, type, name, original=False):
"""return plugin module from hoster|decrypter|container"""
plugin = self.findPlugin(type, name)
@@ -247,7 +241,6 @@ class PluginManager(object):
else:
return self.loadModule(type, name)
-
def getPluginName(self, type, name):
""" used to obtain new name if other plugin was injected"""
plugin = self.findPlugin(type, name)
@@ -260,7 +253,6 @@ class PluginManager(object):
return name
-
def loadModule(self, type, name):
""" Returns loaded module for plugin
@@ -284,13 +276,12 @@ class PluginManager(object):
print_exc()
else:
- plugins[name]['module'] = module #: cache import, maybe unneeded
+ plugins[name]['module'] = module # : cache import, maybe unneeded
self.core.log.debug(_("Loaded plugin: [%(type)s] %(name)s (v%(version).2f)")
% {'name': name, 'type': type, 'version': plugins[name]['version']})
return module
-
def loadClass(self, type, name):
"""Returns the class of a plugin with the same name"""
module = self.loadModule(type, name)
@@ -299,33 +290,30 @@ class PluginManager(object):
else:
return None
-
def getAccountPlugins(self):
"""return list of account plugin names"""
return self.accountPlugins.keys()
-
def find_module(self, fullname, path=None):
- #redirecting imports if necesarry
- if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #seperate pyload plugins
+ # redirecting imports if necesarry
+ if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): # seperate pyload plugins
if fullname.startswith(self.USERROOT): user = 1
- else: user = 0 #used as bool and int
+ else: user = 0 # used as bool and int
split = fullname.split(".")
if len(split) != 4 - user: return
type, name = split[2 - user:4 - user]
if type in self.plugins and name in self.plugins[type]:
- #userplugin is a newer version
+ # userplugin is a newer version
if not user and self.plugins[type][name]['user']:
return self
- #imported from userdir, but pyloads is newer
+ # imported from userdir, but pyloads is newer
if user and not self.plugins[type][name]['user']:
return self
-
def load_module(self, name, replace=True):
- if name not in sys.modules: #could be already in modules
+ if name not in sys.modules: # could be already in modules
if replace:
if self.ROOT in name:
newname = name.replace(self.ROOT, self.USERROOT)
@@ -339,13 +327,12 @@ class PluginManager(object):
self.core.log.debug("Redirected import %s -> %s" % (name, newname))
module = __import__(newname, globals(), locals(), [plugin])
- #inject under new an old name
+ # inject under new an old name
sys.modules[name] = module
sys.modules[newname] = module
return sys.modules[name]
-
def reloadPlugins(self, type_plugins):
""" reload and reindex plugins """
if not type_plugins:
@@ -356,14 +343,14 @@ class PluginManager(object):
reloaded = []
as_dict = {}
- for t,n in type_plugins:
+ for t, n in type_plugins:
if t in as_dict:
as_dict[t].append(n)
else:
as_dict[t] = [n]
for type in as_dict.iterkeys():
- if type in ("addon", "internal"): #: do not reload them because would cause to much side effects
+ if type in ("addon", "internal"): # : do not reload them because would cause to much side effects
self.core.log.debug("Skipping reload for plugin: [%(type)s] %(name)s" % {'name': plugin, 'type': type})
continue
@@ -381,7 +368,7 @@ class PluginManager(object):
else:
reloaded.append((type, plugin))
- #index creation
+ # index creation
self.plugins[type] = self.parse(type)
setattr(self, "%sPlugins" % type, self.plugins[type])
@@ -391,7 +378,6 @@ class PluginManager(object):
return reloaded #: return a list of the plugins successfully reloaded
-
def reloadPlugin(self, type_plugin):
""" reload and reindex ONE plugin """
return True if self.reloadPlugins(type_plugin) else False
diff --git a/pyload/manager/Thread.py b/pyload/manager/Thread.py
index 6c9304e87..753a8c251 100644
--- a/pyload/manager/Thread.py
+++ b/pyload/manager/Thread.py
@@ -22,7 +22,6 @@ from pyload.utils import freeSpace, lock
class ThreadManager(object):
"""manages the download threads, assign jobs, reconnect etc"""
-
def __init__(self, core):
"""Constructor"""
self.core = core
@@ -34,7 +33,7 @@ class ThreadManager(object):
self.reconnecting = Event()
self.reconnecting.clear()
- self.downloaded = 0 #number of files downloaded since last cleanup
+ self.downloaded = 0 # number of files downloaded since last cleanup
self.lock = Lock()
@@ -47,7 +46,7 @@ class ThreadManager(object):
# threads which are fetching hoster results
self.infoResults = {}
- #timeout for cache purge
+ # timeout for cache purge
self.timestamp = 0
pycurl.global_init(pycurl.GLOBAL_DEFAULT)
@@ -55,7 +54,6 @@ class ThreadManager(object):
for i in range(0, self.core.config.get("download", "max_downloads")):
self.createThread()
-
def createThread(self):
"""create a download thread"""
@@ -83,7 +81,6 @@ class ThreadManager(object):
return rid
-
@lock
def getInfoResult(self, rid):
"""returns result and clears it"""
@@ -112,13 +109,12 @@ class ThreadManager(object):
"""get a id list of all pyfiles processed"""
return [x.id for x in self.getActiveFiles()]
-
def work(self):
"""run all task which have to be done (this is for repetivive call by core)"""
try:
self.tryReconnect()
except Exception, e:
- self.core.log.error(_("Reconnect Failed: %s") % str(e) )
+ self.core.log.error(_("Reconnect Failed: %s") % str(e))
self.reconnecting.clear()
if self.core.debug:
print_exc()
@@ -133,7 +129,7 @@ class ThreadManager(object):
sleep(0.5)
self.assignJob()
- #it may be failed non critical so we try it again
+ # it may be failed non critical so we try it again
if (self.infoCache or self.infoResults) and self.timestamp < time():
self.infoCache.clear()
@@ -162,7 +158,7 @@ class ThreadManager(object):
self.reconnecting.set()
- #Do reconnect
+ # Do reconnect
self.core.log.info(_("Starting reconnect"))
while [x.active.plugin.waiting for x in self.threads if x.active].count(True) != 0:
@@ -175,7 +171,7 @@ class ThreadManager(object):
self.core.log.debug("Old IP: %s" % ip)
try:
- reconn = Popen(self.core.config['reconnect']['method'], bufsize=-1, shell=True)#, stdout=subprocess.PIPE)
+ reconn = Popen(self.core.config['reconnect']['method'], bufsize=-1, shell=True) # , stdout=subprocess.PIPE)
except Exception:
self.core.log.warning(_("Failed executing reconnect script!"))
self.core.config["reconnect"]["activated"] = False
@@ -196,7 +192,7 @@ class ThreadManager(object):
def getIP(self):
"""retrieve current ip"""
services = [("http://automation.whatismyip.com/n09230945.asp", "(\S+)"),
- ("http://checkip.dyndns.org/",".*Current IP Address: (\S+)</body>.*")]
+ ("http://checkip.dyndns.org/", ".*Current IP Address: (\S+)</body>.*")]
ip = ""
for i in range(10):
@@ -224,7 +220,6 @@ class ThreadManager(object):
if free:
free[0].put("quit")
-
def cleanPycurl(self):
""" make a global curl cleanup (currently ununused) """
if self.processingIds():
@@ -241,13 +236,13 @@ class ThreadManager(object):
if self.pause or not self.core.api.isTimeDownload(): return
- #if self.downloaded > 20:
+ # if self.downloaded > 20:
# if not self.cleanPyCurl(): return
free = [x for x in self.threads if not x.active]
inuse = set([(x.active.pluginname, self.getLimit(x)) for x in self.threads if x.active and x.active.hasPlugin() and x.active.plugin.account])
- inuse = map(lambda x: (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])) ,inuse)
+ inuse = map(lambda x: (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])), inuse)
onlimit = [x[0] for x in inuse if x[1] > 0 and x[2] >= x[1]]
occ = [x.active.pluginname for x in self.threads if x.active and x.active.hasPlugin() and not x.active.plugin.multiDL] + onlimit
@@ -266,7 +261,7 @@ class ThreadManager(object):
job.release()
return
- if job.plugin.__type == "hoster":
+ if job.plugin.grtPluginType() == "hoster":
spaceLeft = freeSpace(self.core.config["general"]["download_folder"]) / 1024 / 1024
if spaceLeft < self.core.config["general"]["min_free_space"]:
self.core.log.warning(_("Not enough space left on device"))
@@ -278,18 +273,17 @@ class ThreadManager(object):
thread.put(job)
else:
- #put job back
+ # put job back
if occ not in self.core.files.jobCache:
self.core.files.jobCache[occ] = []
self.core.files.jobCache[occ].append(job.id)
- #check for decrypt jobs
+ # check for decrypt jobs
job = self.core.files.getDecryptJob()
if job:
job.initPlugin()
thread = DecrypterThread(self, job)
-
else:
thread = DecrypterThread(self, job)
diff --git a/pyload/manager/event/Scheduler.py b/pyload/manager/event/Scheduler.py
index fd428a956..2cb537383 100644
--- a/pyload/manager/event/Scheduler.py
+++ b/pyload/manager/event/Scheduler.py
@@ -5,6 +5,7 @@ from time import time
from heapq import heappop, heappush
from threading import Lock, Thread
+
class AlreadyCalled(Exception):
pass
@@ -40,7 +41,6 @@ class Scheduler(object):
self.queue.put((t, j))
return d
-
def removeJob(self, d):
"""
:param d: defered object
diff --git a/pyload/manager/thread/Decrypter.py b/pyload/manager/thread/Decrypter.py
index 51544d1b9..12806163c 100644
--- a/pyload/manager/thread/Decrypter.py
+++ b/pyload/manager/thread/Decrypter.py
@@ -19,6 +19,7 @@ from pyload.plugin.Plugin import Abort, Fail, Retry
class DecrypterThread(PluginThread):
+
"""thread for decrypting"""
def __init__(self, manager, pyfile):
@@ -42,12 +43,12 @@ class DecrypterThread(PluginThread):
retry = False
try:
- self.m.log.info(_("Decrypting starts: %s") % pyfile.name)
+ self.m.core.log.info(_("Decrypting starts: %s") % pyfile.name)
pyfile.error = ""
pyfile.plugin.preprocessing(self)
except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ self.m.core.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
return
except Fail, e:
@@ -55,10 +56,12 @@ class DecrypterThread(PluginThread):
if msg == "offline":
pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ self.m.core.log.warning(
+ _("Download is offline: %s") % pyfile.name)
else:
pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ self.m.core.log.error(
+ _("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
pyfile.error = msg
if self.m.core.debug:
@@ -66,7 +69,7 @@ class DecrypterThread(PluginThread):
return
except Abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
pyfile.setStatus("aborted")
if self.m.core.debug:
@@ -74,13 +77,14 @@ class DecrypterThread(PluginThread):
return
except Retry:
- self.m.log.info(_("Retrying %s") % pyfile.name)
+ self.m.core.log.info(_("Retrying %s") % pyfile.name)
retry = True
return self.run()
except Exception, e:
pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ self.m.core.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {
+ "name": pyfile.name, "msg": str(e)})
pyfile.error = str(e)
if self.m.core.debug:
diff --git a/pyload/manager/thread/Download.py b/pyload/manager/thread/Download.py
index c7d21a4ba..37fe844ec 100644
--- a/pyload/manager/thread/Download.py
+++ b/pyload/manager/thread/Download.py
@@ -22,6 +22,7 @@ class DownloadThread(PluginThread):
"""thread for downloading files from 'real' hoster plugins"""
#--------------------------------------------------------------------------
+
def __init__(self, manager):
"""Constructor"""
PluginThread.__init__(self, manager)
@@ -49,22 +50,22 @@ class DownloadThread(PluginThread):
try:
if not pyfile.hasPlugin():
continue
- #this pyfile was deleted while queueing
+ # this pyfile was deleted while queueing
pyfile.plugin.checkForSameFiles(starting=True)
- self.m.log.info(_("Download starts: %s" % pyfile.name))
+ self.m.core.log.info(_("Download starts: %s" % pyfile.name))
# start download
self.m.core.addonManager.downloadPreparing(pyfile)
pyfile.error = ""
pyfile.plugin.preprocessing(self)
- self.m.log.info(_("Download finished: %s") % pyfile.name)
+ self.m.core.log.info(_("Download finished: %s") % pyfile.name)
self.m.core.addonManager.downloadFinished(pyfile)
self.m.core.files.checkPackageFinished(pyfile)
except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ self.m.core.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
pyfile.setStatus("failed")
pyfile.error = "Plugin does not work"
self.clean(pyfile)
@@ -72,7 +73,7 @@ class DownloadThread(PluginThread):
except Abort:
try:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
except Exception:
pass
@@ -86,7 +87,7 @@ class DownloadThread(PluginThread):
except Reconnect:
self.queue.put(pyfile)
- #pyfile.req.clearCookies()
+ # pyfile.req.clearCookies()
while self.m.reconnecting.isSet():
sleep(0.5)
@@ -95,7 +96,7 @@ class DownloadThread(PluginThread):
except Retry, e:
reason = e.args[0]
- self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
+ self.m.core.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
self.queue.put(pyfile)
continue
@@ -104,13 +105,13 @@ class DownloadThread(PluginThread):
if msg == "offline":
pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ self.m.core.log.warning(_("Download is offline: %s") % pyfile.name)
elif msg == "temp. offline":
pyfile.setStatus("temp. offline")
- self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name)
+ self.m.core.log.warning(_("Download is temporary offline: %s") % pyfile.name)
else:
pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ self.m.core.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
pyfile.error = msg
if self.m.core.debug:
@@ -127,10 +128,10 @@ class DownloadThread(PluginThread):
code = 0
msg = e.args
- self.m.log.debug("pycurl exception %s: %s" % (code, msg))
+ self.m.core.log.debug("pycurl exception %s: %s" % (code, msg))
if code in (7, 18, 28, 52, 56):
- self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
+ self.m.core.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
wait = time() + 60
pyfile.waitUntil = wait
@@ -141,7 +142,7 @@ class DownloadThread(PluginThread):
break
if pyfile.abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
pyfile.setStatus("aborted")
self.clean(pyfile)
@@ -152,7 +153,7 @@ class DownloadThread(PluginThread):
else:
pyfile.setStatus("failed")
- self.m.log.error("pycurl error %s: %s" % (code, msg))
+ self.m.core.log.error("pycurl error %s: %s" % (code, msg))
if self.m.core.debug:
print_exc()
self.writeDebugReport(pyfile)
@@ -165,8 +166,7 @@ class DownloadThread(PluginThread):
except SkipDownload, e:
pyfile.setStatus("skipped")
- self.m.log.info(
- _("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
+ self.m.core.log.info(_("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
self.clean(pyfile)
@@ -177,10 +177,9 @@ class DownloadThread(PluginThread):
continue
-
except Exception, e:
pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ self.m.core.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
pyfile.error = str(e)
if self.m.core.debug:
@@ -196,18 +195,16 @@ class DownloadThread(PluginThread):
pyfile.checkIfProcessed()
exc_clear()
- #pyfile.plugin.req.clean()
+ # pyfile.plugin.req.clean()
self.active = False
pyfile.finishIfDone()
self.m.core.files.save()
-
def put(self, job):
"""assing job to thread"""
self.queue.put(job)
-
def stop(self):
"""stops the thread"""
self.put("quit")
diff --git a/pyload/manager/thread/Info.py b/pyload/manager/thread/Info.py
index 4526a07ed..edc9489e9 100644
--- a/pyload/manager/thread/Info.py
+++ b/pyload/manager/thread/Info.py
@@ -26,13 +26,13 @@ class InfoThread(PluginThread):
PluginThread.__init__(self, manager)
self.data = data
- self.pid = pid # package id
+ self.pid = pid # package id
# [ .. (name, plugin) .. ]
- self.rid = rid #result id
- self.add = add #add packages instead of return result
+ self.rid = rid # result id
+ self.add = add # add packages instead of return result
- self.cache = [] #accumulated data
+ self.cache = [] # accumulated data
self.start()
@@ -42,35 +42,32 @@ class InfoThread(PluginThread):
plugins = {}
container = []
- for url, plugintype, pluginname in data:
- try:
- plugins[plugintype][pluginname].append(url)
- except Exception:
- plugins[plugintype][pluginname] = [url]
-
- # filter out container plugins
- for name in self.m.core.pluginManager.containerPlugins:
- if name in plugins:
- container.extend([(name, url) for url in plugins[name]])
-
- del plugins[name]
+ for url, plugintype, pluginname in self.data:
+ # filter out container plugins
+ if plugintype == 'container':
+ container.appen((pluginname, url))
+ else:
+ if (plugintype, pluginname) in plugins:
+ plugins[(plugintype, pluginname)].append(url)
+ else:
+ plugins[(plugintype, pluginname)] = [url]
- #directly write to database
+ # directly write to database
if self.pid > -1:
- for plugintype, pluginname, urls in plugins.iteritems():
+ for (plugintype, pluginname), urls in plugins.iteritems():
plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateDB)
self.m.core.files.save()
elif self.add:
- for plugintype, pluginname, urls in plugins.iteritems():
+ for (plugintype, pluginname), urls in plugins.iteritems():
plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True)
else:
- #generate default result
+ # generate default result
result = [(url, 0, 3, url) for url in urls]
self.updateCache(pluginname, result)
@@ -82,14 +79,13 @@ class InfoThread(PluginThread):
for k, v in packs:
self.m.core.api.addPackage(k, v)
- #empty cache
+ # empty cache
del self.cache[:]
- else: #post the results
-
+ else: # post the results
for name, url in container:
- #attach container content
+ # attach container content
try:
data = self.decryptContainer(name, url)
except Exception:
@@ -110,12 +106,12 @@ class InfoThread(PluginThread):
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True)
- #force to process cache
+ # force to process cache
if self.cache:
self.updateResult(pluginname, [], True)
else:
- #generate default result
+ # generate default result
result = [(url, 0, 3, url) for url in urls]
self.updateResult(pluginname, result, True)
@@ -124,20 +120,18 @@ class InfoThread(PluginThread):
self.m.timestamp = time() + 5 * 60
-
def updateDB(self, plugin, result):
self.m.core.files.updateFileInfo(result, self.pid)
def updateResult(self, plugin, result, force=False):
- #parse package name and generate result
- #accumulate results
+ # parse package name and generate result
+ # accumulate results
self.cache.extend(result)
if len(self.cache) >= 20 or force:
- #used for package generating
- tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size))))
- for name, size, status, url in self.cache]
+ # used for package generating
+ tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size)))) for name, size, status, url in self.cache]
data = parseNames(tmp)
result = {}
@@ -155,8 +149,8 @@ class InfoThread(PluginThread):
def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None):
try:
- result = [] #result loaded from cache
- process = [] #urls to process
+ result = [] # result loaded from cache
+ process = [] # urls to process
for url in urls:
if url in self.m.infoCache:
result.append(self.m.infoCache[url])
@@ -170,19 +164,18 @@ class InfoThread(PluginThread):
if process:
self.m.log.debug("Run Info Fetching for %s" % pluginname)
for result in plugin.getInfo(process):
- #result = [ .. (name, size, status, url) .. ]
+ # result = [ .. (name, size, status, url) .. ]
if not type(result) == list:
result = [result]
for res in result:
- self.m.infoCache[res[3]] = res #: why don't assign res dict directly?
+ self.m.infoCache[res[3]] = res # : why don't assign res dict directly?
cb(pluginname, result)
self.m.log.debug("Finished Info Fetching for %s" % pluginname)
except Exception, e:
- self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") %
- {"name": pluginname, "err": str(e)})
+ self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") % {"name": pluginname, "err": str(e)})
if self.m.core.debug:
print_exc()
@@ -191,7 +184,6 @@ class InfoThread(PluginThread):
result = [(url, 0, 3, url) for url in urls]
cb(pluginname, result)
-
def decryptContainer(self, plugin, url):
data = []
# only works on container plugins