summaryrefslogtreecommitdiffstats
path: root/pyload/manager/thread
diff options
context:
space:
mode:
authorGravatar Armin <Armin@Armin-PC.diedering.lan> 2015-04-12 17:58:45 +0200
committerGravatar Armin <Armin@Armin-PC.diedering.lan> 2015-04-12 17:58:45 +0200
commitc1d652f22a8f90cacb6749b1661c6ff87ffa625b (patch)
treef7fc0b037f38c8354ca2df5ef4d596f83398555e /pyload/manager/thread
parentmoved the config-nicer from config-parser to webui-app (diff)
downloadpyload-c1d652f22a8f90cacb6749b1661c6ff87ffa625b.tar.xz
fix, fix and more fixes
Diffstat (limited to 'pyload/manager/thread')
-rw-r--r--pyload/manager/thread/Decrypter.py19
-rw-r--r--pyload/manager/thread/Download.py38
-rw-r--r--pyload/manager/thread/Info.py68
3 files changed, 60 insertions, 65 deletions
diff --git a/pyload/manager/thread/Decrypter.py b/pyload/manager/thread/Decrypter.py
index 51544d1b9..7fcf93e4c 100644
--- a/pyload/manager/thread/Decrypter.py
+++ b/pyload/manager/thread/Decrypter.py
@@ -19,6 +19,7 @@ from pyload.plugin.Plugin import Abort, Fail, Retry
class DecrypterThread(PluginThread):
+
"""thread for decrypting"""
def __init__(self, manager, pyfile):
@@ -42,12 +43,13 @@ class DecrypterThread(PluginThread):
retry = False
try:
- self.m.log.info(_("Decrypting starts: %s") % pyfile.name)
+ self.m.core.log.info(_("Decrypting starts: %s") % pyfile.name)
pyfile.error = ""
pyfile.plugin.preprocessing(self)
except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ self.m.core.log.error(
+ _("Plugin %s is missing a function.") % pyfile.pluginname)
return
except Fail, e:
@@ -55,10 +57,12 @@ class DecrypterThread(PluginThread):
if msg == "offline":
pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ self.m.core.log.warning(
+ _("Download is offline: %s") % pyfile.name)
else:
pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ self.m.core.log.error(
+ _("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
pyfile.error = msg
if self.m.core.debug:
@@ -66,7 +70,7 @@ class DecrypterThread(PluginThread):
return
except Abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
pyfile.setStatus("aborted")
if self.m.core.debug:
@@ -74,13 +78,14 @@ class DecrypterThread(PluginThread):
return
except Retry:
- self.m.log.info(_("Retrying %s") % pyfile.name)
+ self.m.core.log.info(_("Retrying %s") % pyfile.name)
retry = True
return self.run()
except Exception, e:
pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ self.m.core.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {
+ "name": pyfile.name, "msg": str(e)})
pyfile.error = str(e)
if self.m.core.debug:
diff --git a/pyload/manager/thread/Download.py b/pyload/manager/thread/Download.py
index c7d21a4ba..d4006d307 100644
--- a/pyload/manager/thread/Download.py
+++ b/pyload/manager/thread/Download.py
@@ -22,6 +22,7 @@ class DownloadThread(PluginThread):
"""thread for downloading files from 'real' hoster plugins"""
#--------------------------------------------------------------------------
+
def __init__(self, manager):
"""Constructor"""
PluginThread.__init__(self, manager)
@@ -49,22 +50,22 @@ class DownloadThread(PluginThread):
try:
if not pyfile.hasPlugin():
continue
- #this pyfile was deleted while queueing
+ # this pyfile was deleted while queueing
pyfile.plugin.checkForSameFiles(starting=True)
- self.m.log.info(_("Download starts: %s" % pyfile.name))
+ self.m.core.log.info(_("Download starts: %s" % pyfile.name))
# start download
self.m.core.addonManager.downloadPreparing(pyfile)
pyfile.error = ""
pyfile.plugin.preprocessing(self)
- self.m.log.info(_("Download finished: %s") % pyfile.name)
+ self.m.core.log.info(_("Download finished: %s") % pyfile.name)
self.m.core.addonManager.downloadFinished(pyfile)
self.m.core.files.checkPackageFinished(pyfile)
except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ self.m.core.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
pyfile.setStatus("failed")
pyfile.error = "Plugin does not work"
self.clean(pyfile)
@@ -72,7 +73,7 @@ class DownloadThread(PluginThread):
except Abort:
try:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
except Exception:
pass
@@ -86,7 +87,7 @@ class DownloadThread(PluginThread):
except Reconnect:
self.queue.put(pyfile)
- #pyfile.req.clearCookies()
+ # pyfile.req.clearCookies()
while self.m.reconnecting.isSet():
sleep(0.5)
@@ -95,7 +96,7 @@ class DownloadThread(PluginThread):
except Retry, e:
reason = e.args[0]
- self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
+ self.m.core.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
self.queue.put(pyfile)
continue
@@ -104,13 +105,13 @@ class DownloadThread(PluginThread):
if msg == "offline":
pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ self.m.core.log.warning(_("Download is offline: %s") % pyfile.name)
elif msg == "temp. offline":
pyfile.setStatus("temp. offline")
- self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name)
+ self.m.core.log.warning(_("Download is temporary offline: %s") % pyfile.name)
else:
pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ self.m.core.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
pyfile.error = msg
if self.m.core.debug:
@@ -127,10 +128,10 @@ class DownloadThread(PluginThread):
code = 0
msg = e.args
- self.m.log.debug("pycurl exception %s: %s" % (code, msg))
+ self.m.core.log.debug("pycurl exception %s: %s" % (code, msg))
if code in (7, 18, 28, 52, 56):
- self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
+ self.m.core.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
wait = time() + 60
pyfile.waitUntil = wait
@@ -141,7 +142,7 @@ class DownloadThread(PluginThread):
break
if pyfile.abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
pyfile.setStatus("aborted")
self.clean(pyfile)
@@ -152,7 +153,7 @@ class DownloadThread(PluginThread):
else:
pyfile.setStatus("failed")
- self.m.log.error("pycurl error %s: %s" % (code, msg))
+ self.m.core.log.error("pycurl error %s: %s" % (code, msg))
if self.m.core.debug:
print_exc()
self.writeDebugReport(pyfile)
@@ -165,7 +166,7 @@ class DownloadThread(PluginThread):
except SkipDownload, e:
pyfile.setStatus("skipped")
- self.m.log.info(
+ self.m.core.log.info(
_("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
self.clean(pyfile)
@@ -177,10 +178,9 @@ class DownloadThread(PluginThread):
continue
-
except Exception, e:
pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ self.m.core.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
pyfile.error = str(e)
if self.m.core.debug:
@@ -196,18 +196,16 @@ class DownloadThread(PluginThread):
pyfile.checkIfProcessed()
exc_clear()
- #pyfile.plugin.req.clean()
+ # pyfile.plugin.req.clean()
self.active = False
pyfile.finishIfDone()
self.m.core.files.save()
-
def put(self, job):
"""assing job to thread"""
self.queue.put(job)
-
def stop(self):
"""stops the thread"""
self.put("quit")
diff --git a/pyload/manager/thread/Info.py b/pyload/manager/thread/Info.py
index 4526a07ed..af958fa74 100644
--- a/pyload/manager/thread/Info.py
+++ b/pyload/manager/thread/Info.py
@@ -26,13 +26,13 @@ class InfoThread(PluginThread):
PluginThread.__init__(self, manager)
self.data = data
- self.pid = pid # package id
+ self.pid = pid # package id
# [ .. (name, plugin) .. ]
- self.rid = rid #result id
- self.add = add #add packages instead of return result
+ self.rid = rid # result id
+ self.add = add # add packages instead of return result
- self.cache = [] #accumulated data
+ self.cache = [] # accumulated data
self.start()
@@ -42,35 +42,32 @@ class InfoThread(PluginThread):
plugins = {}
container = []
- for url, plugintype, pluginname in data:
- try:
- plugins[plugintype][pluginname].append(url)
- except Exception:
- plugins[plugintype][pluginname] = [url]
-
- # filter out container plugins
- for name in self.m.core.pluginManager.containerPlugins:
- if name in plugins:
+ for url, plugintype, pluginname in self.data:
+ # filter out container plugins
+ if plugintype == 'container':
container.extend([(name, url) for url in plugins[name]])
+ else:
+ if (plugintype, pluginname) in plugins:
+ plugins[(plugintype, pluginname)].append(url)
+ else:
+ plugins[(plugintype, pluginname)] = [url]
- del plugins[name]
-
- #directly write to database
+ # directly write to database
if self.pid > -1:
- for plugintype, pluginname, urls in plugins.iteritems():
+ for (plugintype, pluginname), urls in plugins.iteritems():
plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateDB)
self.m.core.files.save()
elif self.add:
- for plugintype, pluginname, urls in plugins.iteritems():
+ for (plugintype, pluginname), urls in plugins.iteritems():
plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True)
else:
- #generate default result
+ # generate default result
result = [(url, 0, 3, url) for url in urls]
self.updateCache(pluginname, result)
@@ -82,14 +79,13 @@ class InfoThread(PluginThread):
for k, v in packs:
self.m.core.api.addPackage(k, v)
- #empty cache
+ # empty cache
del self.cache[:]
- else: #post the results
-
+ else: # post the results
for name, url in container:
- #attach container content
+ # attach container content
try:
data = self.decryptContainer(name, url)
except Exception:
@@ -110,12 +106,12 @@ class InfoThread(PluginThread):
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True)
- #force to process cache
+ # force to process cache
if self.cache:
self.updateResult(pluginname, [], True)
else:
- #generate default result
+ # generate default result
result = [(url, 0, 3, url) for url in urls]
self.updateResult(pluginname, result, True)
@@ -124,20 +120,18 @@ class InfoThread(PluginThread):
self.m.timestamp = time() + 5 * 60
-
def updateDB(self, plugin, result):
self.m.core.files.updateFileInfo(result, self.pid)
def updateResult(self, plugin, result, force=False):
- #parse package name and generate result
- #accumulate results
+ # parse package name and generate result
+ # accumulate results
self.cache.extend(result)
if len(self.cache) >= 20 or force:
- #used for package generating
- tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size))))
- for name, size, status, url in self.cache]
+ # used for package generating
+ tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size)))) for name, size, status, url in self.cache]
data = parseNames(tmp)
result = {}
@@ -155,8 +149,8 @@ class InfoThread(PluginThread):
def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None):
try:
- result = [] #result loaded from cache
- process = [] #urls to process
+ result = [] # result loaded from cache
+ process = [] # urls to process
for url in urls:
if url in self.m.infoCache:
result.append(self.m.infoCache[url])
@@ -170,19 +164,18 @@ class InfoThread(PluginThread):
if process:
self.m.log.debug("Run Info Fetching for %s" % pluginname)
for result in plugin.getInfo(process):
- #result = [ .. (name, size, status, url) .. ]
+ # result = [ .. (name, size, status, url) .. ]
if not type(result) == list:
result = [result]
for res in result:
- self.m.infoCache[res[3]] = res #: why don't assign res dict directly?
+ self.m.infoCache[res[3]] = res # : why don't assign res dict directly?
cb(pluginname, result)
self.m.log.debug("Finished Info Fetching for %s" % pluginname)
except Exception, e:
- self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") %
- {"name": pluginname, "err": str(e)})
+ self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") % {"name": pluginname, "err": str(e)})
if self.m.core.debug:
print_exc()
@@ -191,7 +184,6 @@ class InfoThread(PluginThread):
result = [(url, 0, 3, url) for url in urls]
cb(pluginname, result)
-
def decryptContainer(self, plugin, url):
data = []
# only works on container plugins