summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Armin <Armin@Armin-PC.diedering.lan> 2015-04-12 17:58:45 +0200
committerGravatar Armin <Armin@Armin-PC.diedering.lan> 2015-04-12 17:58:45 +0200
commitc1d652f22a8f90cacb6749b1661c6ff87ffa625b (patch)
treef7fc0b037f38c8354ca2df5ef4d596f83398555e
parentmoved the config-nicer from config-parser to webui-app (diff)
downloadpyload-c1d652f22a8f90cacb6749b1661c6ff87ffa625b.tar.xz
fix, fix and more fixes
-rw-r--r--.gitignore1
-rw-r--r--pyload/database/File.py45
-rw-r--r--pyload/datatype/File.py40
-rw-r--r--pyload/manager/Plugin.py66
-rw-r--r--pyload/manager/Thread.py30
-rw-r--r--pyload/manager/event/Scheduler.py2
-rw-r--r--pyload/manager/thread/Decrypter.py19
-rw-r--r--pyload/manager/thread/Download.py38
-rw-r--r--pyload/manager/thread/Info.py68
-rw-r--r--pyload/network/HTTPRequest.py22
-rw-r--r--pyload/plugin/OCR.py30
-rw-r--r--pyload/plugin/Plugin.py76
-rw-r--r--pyload/plugin/addon/UpdateManager.py44
-rw-r--r--pyload/plugin/captcha/AdYouLike.py16
-rw-r--r--pyload/plugin/captcha/ReCaptcha.py58
-rw-r--r--pyload/plugin/crypter/NCryptIn.py2
-rw-r--r--pyload/plugin/crypter/SafelinkingNet.py2
-rw-r--r--pyload/plugin/hoster/BitshareCom.py2
-rw-r--r--pyload/plugin/hoster/CatShareNet.py2
-rw-r--r--pyload/plugin/hoster/CrockoCom.py2
-rw-r--r--pyload/plugin/hoster/DateiTo.py2
-rw-r--r--pyload/plugin/hoster/DepositfilesCom.py2
-rw-r--r--pyload/plugin/hoster/DlFreeFr.py2
-rw-r--r--pyload/plugin/hoster/FilecloudIo.py2
-rw-r--r--pyload/plugin/hoster/FilepostCom.py2
-rw-r--r--pyload/plugin/hoster/FilerNet.py10
-rw-r--r--pyload/plugin/hoster/KingfilesNet.py2
-rw-r--r--pyload/plugin/hoster/LoadTo.py2
-rw-r--r--pyload/plugin/hoster/LuckyShareNet.py2
-rw-r--r--pyload/plugin/hoster/OboomCom.py2
-rw-r--r--pyload/plugin/hoster/TurbobitNet.py2
-rw-r--r--pyload/plugin/hoster/UpstoreNet.py2
-rw-r--r--pyload/plugin/internal/SimpleHoster.py82
-rw-r--r--pyload/plugin/internal/XFSHoster.py2
-rw-r--r--pyload/utils/__init__.py15
-rw-r--r--pyload/webui/app/pyloadweb.py42
36 files changed, 307 insertions, 431 deletions
diff --git a/.gitignore b/.gitignore
index 5b222e058..14e8aa543 100644
--- a/.gitignore
+++ b/.gitignore
@@ -80,3 +80,4 @@ _build/
module/
paver-minilib.zip
/setup.sh
+/tesseract/
diff --git a/pyload/database/File.py b/pyload/database/File.py
index cd1b0d044..1fab376b1 100644
--- a/pyload/database/File.py
+++ b/pyload/database/File.py
@@ -27,18 +27,18 @@ class FileHandler(object):
# translations
self.statusMsg = [_("finished"), _("offline"), _("online"), _("queued"), _("skipped"), _("waiting"), _("temp. offline"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), _("downloading"), _("processing"), _("unknown")]
- self.cache = {} #holds instances for files
+ self.cache = {} # holds instances for files
self.packageCache = {} # same for packages
#@TODO: purge the cache
self.jobCache = {}
- self.lock = RLock() #@TODO should be a Lock w/o R
+ self.lock = RLock() # @TODO should be a Lock w/o R
#self.lock._Verbose__verbose = True
- self.filecount = -1 # if an invalid value is set get current value from db
- self.queuecount = -1 #number of package to be loaded
- self.unchanged = False #determines if any changes was made since last call
+ self.filecount = -1 # if an invalid value is set get current value from db
+ self.queuecount = -1 # number of package to be loaded
+ self.unchanged = False # determines if any changes was made since last call
self.db = self.core.db
@@ -300,7 +300,7 @@ class FileHandler(object):
pyfile = self.getFile(self.jobCache[occ].pop())
else:
- self.jobCache = {} #better not caching to much
+ self.jobCache = {} # better not caching to much
jobs = self.db.getJob(occ)
jobs.reverse()
self.jobCache[occ] = jobs
@@ -313,7 +313,6 @@ class FileHandler(object):
#@TODO: maybe the new job has to be approved...
-
#pyfile = self.getFile(self.jobCache[occ].pop())
return pyfile
@@ -401,7 +400,6 @@ class FileHandler(object):
self.cache[id].error = ""
self.cache[id].abortDownload()
-
self.db.restartFile(id)
e = UpdateEvent("file", id, "collector" if not self.getFile(id).package().queue else "queue")
@@ -515,7 +513,6 @@ class FileHandler(object):
self.core.addonManager.packageFinished(pyfile.package())
pyfile.package().setFinished = True
-
def reCheckPackage(self, pid):
""" recheck links in package """
data = self.db.getPackageData(pid)
@@ -540,7 +537,7 @@ class FileHandler(object):
new_packs = self.db.getAllPackages(0)
new_packs.update(self.db.getAllPackages(1))
- #get new packages only from db
+ # get new packages only from db
deleted = []
for id in old_packs.iterkeys():
@@ -556,6 +553,7 @@ class FileHandler(object):
""" restart all failed links """
self.db.restartFailed()
+
class FileMethods(object):
@style.queue
def filecount(self, queue):
@@ -596,7 +594,7 @@ class FileMethods(object):
@style.queue
def addLink(self, url, name, plugin, package):
order = self._nextFileOrder(package)
- self.c.execute('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', (url, name, (plugintype, pluginname), package, order))
+ self.c.execute('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', (url, name, ".".join(plugintype, pluginname), package, order))
return self.c.lastrowid
@style.queue
@@ -604,7 +602,7 @@ class FileMethods(object):
""" links is a list of tupels (url, plugin)"""
order = self._nextFileOrder(package)
orders = [order + x for x in range(len(links))]
- links = [(x[0], x[0], (x[1], x[2]), package, o) for x, o in zip(links, orders)]
+ links = [(x[0], x[0], ".".join((x[1], x[2])), package, o) for x, o in zip(links, orders)]
self.c.executemany('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', links)
@style.queue
@@ -615,18 +613,15 @@ class FileMethods(object):
@style.queue
def deletePackage(self, p):
-
self.c.execute('DELETE FROM links WHERE package=?', (str(p.id),))
self.c.execute('DELETE FROM packages WHERE id=?', (str(p.id),))
self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=?', (p.order, p.queue))
@style.queue
def deleteLink(self, f):
-
self.c.execute('DELETE FROM links WHERE id=?', (str(f.id),))
self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder > ? AND package=?', (f.order, str(f.packageid)))
-
@style.queue
def getAllLinks(self, q):
"""return information about all links in queue q
@@ -653,7 +648,7 @@ class FileMethods(object):
'status': r[4],
'statusmsg': self.manager.statusMsg[r[4]],
'error': r[5],
- 'plugin': r[6],
+ 'plugin': tuple(r[6].split('.')),
'package': r[7],
'order': r[8],
}
@@ -689,7 +684,7 @@ class FileMethods(object):
'queue': r[5],
'order': r[6],
'sizetotal': int(r[7]),
- 'sizedone': r[8] if r[8] else 0, #these can be None
+ 'sizedone': r[8] if r[8] else 0, # these can be None
'linksdone': r[9] if r[9] else 0,
'linkstotal': r[10],
'links': {}
@@ -714,7 +709,7 @@ class FileMethods(object):
'status': r[4],
'statusmsg': self.manager.statusMsg[r[4]],
'error': r[5],
- 'plugin': r[6],
+ 'plugin': tuple(r[6].split('.')),
'package': r[7],
'order': r[8],
}
@@ -737,17 +732,16 @@ class FileMethods(object):
'status': r[4],
'statusmsg': self.manager.statusMsg[r[4]],
'error': r[5],
- 'plugin': r[6],
+ 'plugin': tuple(r[6].split('.')),
'package': r[7],
'order': r[8],
}
return data
-
@style.async
def updateLink(self, f):
- self.c.execute('UPDATE links SET url=?, name=?, size=?, status=?, error=?, package=? WHERE id=?', (f.url, f.name, f.size, f.status, f.error, str(f.packageid), str(f.id)))
+ self.c.execute('UPDATE links SET url=?, name=?, size=?, status=?, error=?, package=? WHERE id=?', (f.url, f.name, f.size, f.status, str(f.error), str(f.packageid), str(f.id)))
@style.queue
def updatePackage(self, p):
@@ -813,15 +807,16 @@ class FileMethods(object):
self.c.execute("SELECT url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?", (str(id),))
r = self.c.fetchone()
if not r: return None
+ r = list(r)
+ r[5] = tuple(r[5].split('.'))
return PyFile(self.manager, id, * r)
-
@style.queue
def getJob(self, occ):
"""return pyfile ids, which are suitable for download and dont use a occupied plugin"""
#@TODO improve this hardcoded method
- pre = "('CCF', 'DLC', 'LinkList', 'RSDF', 'TXT')" #plugins which are processed in collector
+ pre = "('CCF', 'DLC', 'LinkList', 'RSDF', 'TXT')" # plugins which are processed in collector
cmd = "("
for i, item in enumerate(occ):
@@ -832,7 +827,7 @@ class FileMethods(object):
cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE ((p.queue=1 AND l.plugin NOT IN %s) OR l.plugin IN %s) AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % (cmd, pre)
- self.c.execute(cmd) # very bad!
+ self.c.execute(cmd) # very bad!
return [x[0] for x in self.c]
@@ -841,7 +836,7 @@ class FileMethods(object):
"""returns pyfile ids with suited plugins"""
cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE l.plugin IN %s AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % plugins
- self.c.execute(cmd) # very bad!
+ self.c.execute(cmd) # very bad!
return [x[0] for x in self.c]
diff --git a/pyload/datatype/File.py b/pyload/datatype/File.py
index 1df0a8590..a3ae82ec2 100644
--- a/pyload/datatype/File.py
+++ b/pyload/datatype/File.py
@@ -9,32 +9,33 @@ from time import sleep, time
from threading import RLock
statusMap = {
- "finished": 0,
- "offline": 1,
- "online": 2,
- "queued": 3,
- "skipped": 4,
- "waiting": 5,
+ "finished": 0,
+ "offline": 1,
+ "online": 2,
+ "queued": 3,
+ "skipped": 4,
+ "waiting": 5,
"temp. offline": 6,
- "starting": 7,
- "failed": 8,
- "aborted": 9,
- "decrypting": 10,
- "custom": 11,
+ "starting": 7,
+ "failed": 8,
+ "aborted": 9,
+ "decrypting": 10,
+ "custom": 11,
"downloading": 12,
- "processing": 13,
- "unknown": 14,
+ "processing": 13,
+ "unknown": 14,
}
def setSize(self, value):
self._size = int(value)
+
class PyFile(object):
"""
Represents a file object at runtime
"""
- __slots__ = ("m", "id", "url", "name", "size", "_size", "status", "plugin",
+ __slots__ = ("m", "id", "url", "name", "size", "_size", "status", "plugintype", "pluginname",
"packageid", "error", "order", "lock", "plugin", "waitUntil",
"active", "abort", "statusname", "reconnected", "progress",
"maxprogress", "pluginmodule", "pluginclass")
@@ -47,8 +48,8 @@ class PyFile(object):
self.name = name
self.size = size
self.status = status
- self.plugin = self.plugintype, self.pluginname = plugin
- self.packageid = package #should not be used, use package() instead
+ self.plugintype, self.pluginname = plugin
+ self.packageid = package # should not be used, use package() instead
self.error = error
self.order = order
# database information ends here
@@ -58,10 +59,10 @@ class PyFile(object):
self.plugin = None
#self.download = None
- self.waitUntil = 0 # time() + time to wait
+ self.waitUntil = 0 # time() + time to wait
# status attributes
- self.active = False #obsolete?
+ self.active = False # obsolete?
self.abort = False
self.reconnected = False
@@ -72,7 +73,6 @@ class PyFile(object):
self.m.cache[int(id)] = self
-
# will convert all sizes to ints
size = property(lambda self: self._size, setSize)
@@ -101,7 +101,7 @@ class PyFile(object):
def setStatus(self, status):
self.status = statusMap[status]
- self.sync() #@TODO needed aslong no better job approving exists
+ self.sync() # @TODO needed aslong no better job approving exists
def setCustomStatus(self, msg, status="processing"):
self.statusname = msg
diff --git a/pyload/manager/Plugin.py b/pyload/manager/Plugin.py
index 222ed9c93..918f6de8a 100644
--- a/pyload/manager/Plugin.py
+++ b/pyload/manager/Plugin.py
@@ -25,37 +25,34 @@ class PluginManager(object):
CONFIG = re.compile(r'__config\s*=\s*\[([^\]]+)', re.M)
DESC = re.compile(r'__description\s*=\s*("|"""|\')([^"\']+)')
-
def __init__(self, core):
self.core = core
self.plugins = {}
self.createIndex()
- #register for import addon
+ # register for import addon
sys.meta_path.append(self)
-
def loadTypes(self):
rootdir = join(pypath, "pyload", "plugin")
userdir = "userplugins"
types = set().union(*[[d for d in listdir(p) if isdir(join(p, d))]
- for p in (rootdir, userdir) if exists(p)])
+ for p in (rootdir, userdir) if exists(p)])
if not types:
self.core.log.critical(_("No plugins found!"))
self.TYPES = list(set(self.TYPES) | types)
-
def createIndex(self):
"""create information for all plugins available"""
sys.path.append(abspath(""))
self.loadTypes()
-
+
configs = []
for type in self.TYPES:
@@ -67,7 +64,6 @@ class PluginManager(object):
self.core.log.debug("Created index of plugins")
-
def parse(self, folder, rootplugins={}):
"""
returns dict with information
@@ -168,7 +164,7 @@ class PluginManager(object):
except Exception:
self.core.log.error("Invalid config in %s: %s" % (name, config))
- elif folder in ("addon", "hook"): #force config creation
+ elif folder in ("addon", "hook"): # force config creation
desc = self.DESC.findall(content)
desc = desc[0][1] if desc else ""
config = (["activated", "bool", "Activated", False],)
@@ -183,7 +179,6 @@ class PluginManager(object):
return plugins
-
def parseUrls(self, urls):
"""parse plugins for given list of urls"""
@@ -200,28 +195,28 @@ class PluginManager(object):
res.append((url, last[0], last[1]))
continue
- for type in self.TYPES:
- for name, plugin in self.plugins[type]:
-
- m = None
+ for plugintype in self.TYPES:
+ m = None
+ for name, plugin in self.plugins[plugintype].iteritems():
try:
if 'pattern' in plugin:
m = plugin['re'].match(url)
except KeyError:
self.core.log.error(_("Plugin [%(type)s] %(name)s skipped due broken pattern")
- % {'name': name, 'type': type})
+ % {'name': plugin['name'], 'type': plugintype})
if m:
- res.append((url, type, name))
- last = (type, name, plugin)
+ res.append((url, plugintype, name))
+ last = (plugintype, name, plugin)
break
- else:
- res.append((url, "internal", "BasePlugin"))
-
+ if m:
+ break
+ else:
+ res.append((url, "internal", "BasePlugin"))
+ print res
return res
-
def findPlugin(self, type, name):
if type not in self.plugins:
return None
@@ -234,7 +229,6 @@ class PluginManager(object):
else:
return self.plugins[type][name]
-
def getPlugin(self, type, name, original=False):
"""return plugin module from hoster|decrypter|container"""
plugin = self.findPlugin(type, name)
@@ -247,7 +241,6 @@ class PluginManager(object):
else:
return self.loadModule(type, name)
-
def getPluginName(self, type, name):
""" used to obtain new name if other plugin was injected"""
plugin = self.findPlugin(type, name)
@@ -260,7 +253,6 @@ class PluginManager(object):
return name
-
def loadModule(self, type, name):
""" Returns loaded module for plugin
@@ -284,13 +276,12 @@ class PluginManager(object):
print_exc()
else:
- plugins[name]['module'] = module #: cache import, maybe unneeded
+ plugins[name]['module'] = module # : cache import, maybe unneeded
self.core.log.debug(_("Loaded plugin: [%(type)s] %(name)s (v%(version).2f)")
% {'name': name, 'type': type, 'version': plugins[name]['version']})
return module
-
def loadClass(self, type, name):
"""Returns the class of a plugin with the same name"""
module = self.loadModule(type, name)
@@ -299,33 +290,30 @@ class PluginManager(object):
else:
return None
-
def getAccountPlugins(self):
"""return list of account plugin names"""
return self.accountPlugins.keys()
-
def find_module(self, fullname, path=None):
- #redirecting imports if necesarry
- if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #seperate pyload plugins
+ # redirecting imports if necesarry
+ if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): # seperate pyload plugins
if fullname.startswith(self.USERROOT): user = 1
- else: user = 0 #used as bool and int
+ else: user = 0 # used as bool and int
split = fullname.split(".")
if len(split) != 4 - user: return
type, name = split[2 - user:4 - user]
if type in self.plugins and name in self.plugins[type]:
- #userplugin is a newer version
+ # userplugin is a newer version
if not user and self.plugins[type][name]['user']:
return self
- #imported from userdir, but pyloads is newer
+ # imported from userdir, but pyloads is newer
if user and not self.plugins[type][name]['user']:
return self
-
def load_module(self, name, replace=True):
- if name not in sys.modules: #could be already in modules
+ if name not in sys.modules: # could be already in modules
if replace:
if self.ROOT in name:
newname = name.replace(self.ROOT, self.USERROOT)
@@ -339,13 +327,12 @@ class PluginManager(object):
self.core.log.debug("Redirected import %s -> %s" % (name, newname))
module = __import__(newname, globals(), locals(), [plugin])
- #inject under new an old name
+ # inject under new an old name
sys.modules[name] = module
sys.modules[newname] = module
return sys.modules[name]
-
def reloadPlugins(self, type_plugins):
""" reload and reindex plugins """
if not type_plugins:
@@ -356,14 +343,14 @@ class PluginManager(object):
reloaded = []
as_dict = {}
- for t,n in type_plugins:
+ for t, n in type_plugins:
if t in as_dict:
as_dict[t].append(n)
else:
as_dict[t] = [n]
for type in as_dict.iterkeys():
- if type in ("addon", "internal"): #: do not reload them because would cause to much side effects
+ if type in ("addon", "internal"): # : do not reload them because would cause to much side effects
self.core.log.debug("Skipping reload for plugin: [%(type)s] %(name)s" % {'name': plugin, 'type': type})
continue
@@ -381,7 +368,7 @@ class PluginManager(object):
else:
reloaded.append((type, plugin))
- #index creation
+ # index creation
self.plugins[type] = self.parse(type)
setattr(self, "%sPlugins" % type, self.plugins[type])
@@ -391,7 +378,6 @@ class PluginManager(object):
return reloaded #: return a list of the plugins successfully reloaded
-
def reloadPlugin(self, type_plugin):
""" reload and reindex ONE plugin """
return True if self.reloadPlugins(type_plugin) else False
diff --git a/pyload/manager/Thread.py b/pyload/manager/Thread.py
index 6c9304e87..753a8c251 100644
--- a/pyload/manager/Thread.py
+++ b/pyload/manager/Thread.py
@@ -22,7 +22,6 @@ from pyload.utils import freeSpace, lock
class ThreadManager(object):
"""manages the download threads, assign jobs, reconnect etc"""
-
def __init__(self, core):
"""Constructor"""
self.core = core
@@ -34,7 +33,7 @@ class ThreadManager(object):
self.reconnecting = Event()
self.reconnecting.clear()
- self.downloaded = 0 #number of files downloaded since last cleanup
+ self.downloaded = 0 # number of files downloaded since last cleanup
self.lock = Lock()
@@ -47,7 +46,7 @@ class ThreadManager(object):
# threads which are fetching hoster results
self.infoResults = {}
- #timeout for cache purge
+ # timeout for cache purge
self.timestamp = 0
pycurl.global_init(pycurl.GLOBAL_DEFAULT)
@@ -55,7 +54,6 @@ class ThreadManager(object):
for i in range(0, self.core.config.get("download", "max_downloads")):
self.createThread()
-
def createThread(self):
"""create a download thread"""
@@ -83,7 +81,6 @@ class ThreadManager(object):
return rid
-
@lock
def getInfoResult(self, rid):
"""returns result and clears it"""
@@ -112,13 +109,12 @@ class ThreadManager(object):
"""get a id list of all pyfiles processed"""
return [x.id for x in self.getActiveFiles()]
-
def work(self):
"""run all task which have to be done (this is for repetivive call by core)"""
try:
self.tryReconnect()
except Exception, e:
- self.core.log.error(_("Reconnect Failed: %s") % str(e) )
+ self.core.log.error(_("Reconnect Failed: %s") % str(e))
self.reconnecting.clear()
if self.core.debug:
print_exc()
@@ -133,7 +129,7 @@ class ThreadManager(object):
sleep(0.5)
self.assignJob()
- #it may be failed non critical so we try it again
+ # it may be failed non critical so we try it again
if (self.infoCache or self.infoResults) and self.timestamp < time():
self.infoCache.clear()
@@ -162,7 +158,7 @@ class ThreadManager(object):
self.reconnecting.set()
- #Do reconnect
+ # Do reconnect
self.core.log.info(_("Starting reconnect"))
while [x.active.plugin.waiting for x in self.threads if x.active].count(True) != 0:
@@ -175,7 +171,7 @@ class ThreadManager(object):
self.core.log.debug("Old IP: %s" % ip)
try:
- reconn = Popen(self.core.config['reconnect']['method'], bufsize=-1, shell=True)#, stdout=subprocess.PIPE)
+ reconn = Popen(self.core.config['reconnect']['method'], bufsize=-1, shell=True) # , stdout=subprocess.PIPE)
except Exception:
self.core.log.warning(_("Failed executing reconnect script!"))
self.core.config["reconnect"]["activated"] = False
@@ -196,7 +192,7 @@ class ThreadManager(object):
def getIP(self):
"""retrieve current ip"""
services = [("http://automation.whatismyip.com/n09230945.asp", "(\S+)"),
- ("http://checkip.dyndns.org/",".*Current IP Address: (\S+)</body>.*")]
+ ("http://checkip.dyndns.org/", ".*Current IP Address: (\S+)</body>.*")]
ip = ""
for i in range(10):
@@ -224,7 +220,6 @@ class ThreadManager(object):
if free:
free[0].put("quit")
-
def cleanPycurl(self):
""" make a global curl cleanup (currently ununused) """
if self.processingIds():
@@ -241,13 +236,13 @@ class ThreadManager(object):
if self.pause or not self.core.api.isTimeDownload(): return
- #if self.downloaded > 20:
+ # if self.downloaded > 20:
# if not self.cleanPyCurl(): return
free = [x for x in self.threads if not x.active]
inuse = set([(x.active.pluginname, self.getLimit(x)) for x in self.threads if x.active and x.active.hasPlugin() and x.active.plugin.account])
- inuse = map(lambda x: (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])) ,inuse)
+ inuse = map(lambda x: (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])), inuse)
onlimit = [x[0] for x in inuse if x[1] > 0 and x[2] >= x[1]]
occ = [x.active.pluginname for x in self.threads if x.active and x.active.hasPlugin() and not x.active.plugin.multiDL] + onlimit
@@ -266,7 +261,7 @@ class ThreadManager(object):
job.release()
return
- if job.plugin.__type == "hoster":
+ if job.plugin.grtPluginType() == "hoster":
spaceLeft = freeSpace(self.core.config["general"]["download_folder"]) / 1024 / 1024
if spaceLeft < self.core.config["general"]["min_free_space"]:
self.core.log.warning(_("Not enough space left on device"))
@@ -278,18 +273,17 @@ class ThreadManager(object):
thread.put(job)
else:
- #put job back
+ # put job back
if occ not in self.core.files.jobCache:
self.core.files.jobCache[occ] = []
self.core.files.jobCache[occ].append(job.id)
- #check for decrypt jobs
+ # check for decrypt jobs
job = self.core.files.getDecryptJob()
if job:
job.initPlugin()
thread = DecrypterThread(self, job)
-
else:
thread = DecrypterThread(self, job)
diff --git a/pyload/manager/event/Scheduler.py b/pyload/manager/event/Scheduler.py
index fd428a956..2cb537383 100644
--- a/pyload/manager/event/Scheduler.py
+++ b/pyload/manager/event/Scheduler.py
@@ -5,6 +5,7 @@ from time import time
from heapq import heappop, heappush
from threading import Lock, Thread
+
class AlreadyCalled(Exception):
pass
@@ -40,7 +41,6 @@ class Scheduler(object):
self.queue.put((t, j))
return d
-
def removeJob(self, d):
"""
:param d: defered object
diff --git a/pyload/manager/thread/Decrypter.py b/pyload/manager/thread/Decrypter.py
index 51544d1b9..7fcf93e4c 100644
--- a/pyload/manager/thread/Decrypter.py
+++ b/pyload/manager/thread/Decrypter.py
@@ -19,6 +19,7 @@ from pyload.plugin.Plugin import Abort, Fail, Retry
class DecrypterThread(PluginThread):
+
"""thread for decrypting"""
def __init__(self, manager, pyfile):
@@ -42,12 +43,13 @@ class DecrypterThread(PluginThread):
retry = False
try:
- self.m.log.info(_("Decrypting starts: %s") % pyfile.name)
+ self.m.core.log.info(_("Decrypting starts: %s") % pyfile.name)
pyfile.error = ""
pyfile.plugin.preprocessing(self)
except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ self.m.core.log.error(
+ _("Plugin %s is missing a function.") % pyfile.pluginname)
return
except Fail, e:
@@ -55,10 +57,12 @@ class DecrypterThread(PluginThread):
if msg == "offline":
pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ self.m.core.log.warning(
+ _("Download is offline: %s") % pyfile.name)
else:
pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ self.m.core.log.error(
+ _("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
pyfile.error = msg
if self.m.core.debug:
@@ -66,7 +70,7 @@ class DecrypterThread(PluginThread):
return
except Abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
pyfile.setStatus("aborted")
if self.m.core.debug:
@@ -74,13 +78,14 @@ class DecrypterThread(PluginThread):
return
except Retry:
- self.m.log.info(_("Retrying %s") % pyfile.name)
+ self.m.core.log.info(_("Retrying %s") % pyfile.name)
retry = True
return self.run()
except Exception, e:
pyfile.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ self.m.core.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {
+ "name": pyfile.name, "msg": str(e)})
pyfile.error = str(e)
if self.m.core.debug:
diff --git a/pyload/manager/thread/Download.py b/pyload/manager/thread/Download.py
index c7d21a4ba..d4006d307 100644
--- a/pyload/manager/thread/Download.py
+++ b/pyload/manager/thread/Download.py
@@ -22,6 +22,7 @@ class DownloadThread(PluginThread):
"""thread for downloading files from 'real' hoster plugins"""
#--------------------------------------------------------------------------
+
def __init__(self, manager):
"""Constructor"""
PluginThread.__init__(self, manager)
@@ -49,22 +50,22 @@ class DownloadThread(PluginThread):
try:
if not pyfile.hasPlugin():
continue
- #this pyfile was deleted while queueing
+ # this pyfile was deleted while queueing
pyfile.plugin.checkForSameFiles(starting=True)
- self.m.log.info(_("Download starts: %s" % pyfile.name))
+ self.m.core.log.info(_("Download starts: %s" % pyfile.name))
# start download
self.m.core.addonManager.downloadPreparing(pyfile)
pyfile.error = ""
pyfile.plugin.preprocessing(self)
- self.m.log.info(_("Download finished: %s") % pyfile.name)
+ self.m.core.log.info(_("Download finished: %s") % pyfile.name)
self.m.core.addonManager.downloadFinished(pyfile)
self.m.core.files.checkPackageFinished(pyfile)
except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ self.m.core.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
pyfile.setStatus("failed")
pyfile.error = "Plugin does not work"
self.clean(pyfile)
@@ -72,7 +73,7 @@ class DownloadThread(PluginThread):
except Abort:
try:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
except Exception:
pass
@@ -86,7 +87,7 @@ class DownloadThread(PluginThread):
except Reconnect:
self.queue.put(pyfile)
- #pyfile.req.clearCookies()
+ # pyfile.req.clearCookies()
while self.m.reconnecting.isSet():
sleep(0.5)
@@ -95,7 +96,7 @@ class DownloadThread(PluginThread):
except Retry, e:
reason = e.args[0]
- self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
+ self.m.core.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
self.queue.put(pyfile)
continue
@@ -104,13 +105,13 @@ class DownloadThread(PluginThread):
if msg == "offline":
pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ self.m.core.log.warning(_("Download is offline: %s") % pyfile.name)
elif msg == "temp. offline":
pyfile.setStatus("temp. offline")
- self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name)
+ self.m.core.log.warning(_("Download is temporary offline: %s") % pyfile.name)
else:
pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ self.m.core.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
pyfile.error = msg
if self.m.core.debug:
@@ -127,10 +128,10 @@ class DownloadThread(PluginThread):
code = 0
msg = e.args
- self.m.log.debug("pycurl exception %s: %s" % (code, msg))
+ self.m.core.log.debug("pycurl exception %s: %s" % (code, msg))
if code in (7, 18, 28, 52, 56):
- self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
+ self.m.core.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
wait = time() + 60
pyfile.waitUntil = wait
@@ -141,7 +142,7 @@ class DownloadThread(PluginThread):
break
if pyfile.abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ self.m.core.log.info(_("Download aborted: %s") % pyfile.name)
pyfile.setStatus("aborted")
self.clean(pyfile)
@@ -152,7 +153,7 @@ class DownloadThread(PluginThread):
else:
pyfile.setStatus("failed")
- self.m.log.error("pycurl error %s: %s" % (code, msg))
+ self.m.core.log.error("pycurl error %s: %s" % (code, msg))
if self.m.core.debug:
print_exc()
self.writeDebugReport(pyfile)
@@ -165,7 +166,7 @@ class DownloadThread(PluginThread):
except SkipDownload, e:
pyfile.setStatus("skipped")
- self.m.log.info(
+ self.m.core.log.info(
_("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
self.clean(pyfile)
@@ -177,10 +178,9 @@ class DownloadThread(PluginThread):
continue
-
except Exception, e:
pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ self.m.core.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
pyfile.error = str(e)
if self.m.core.debug:
@@ -196,18 +196,16 @@ class DownloadThread(PluginThread):
pyfile.checkIfProcessed()
exc_clear()
- #pyfile.plugin.req.clean()
+ # pyfile.plugin.req.clean()
self.active = False
pyfile.finishIfDone()
self.m.core.files.save()
-
def put(self, job):
"""assing job to thread"""
self.queue.put(job)
-
def stop(self):
"""stops the thread"""
self.put("quit")
diff --git a/pyload/manager/thread/Info.py b/pyload/manager/thread/Info.py
index 4526a07ed..af958fa74 100644
--- a/pyload/manager/thread/Info.py
+++ b/pyload/manager/thread/Info.py
@@ -26,13 +26,13 @@ class InfoThread(PluginThread):
PluginThread.__init__(self, manager)
self.data = data
- self.pid = pid # package id
+ self.pid = pid # package id
# [ .. (name, plugin) .. ]
- self.rid = rid #result id
- self.add = add #add packages instead of return result
+ self.rid = rid # result id
+ self.add = add # add packages instead of return result
- self.cache = [] #accumulated data
+ self.cache = [] # accumulated data
self.start()
@@ -42,35 +42,32 @@ class InfoThread(PluginThread):
plugins = {}
container = []
- for url, plugintype, pluginname in data:
- try:
- plugins[plugintype][pluginname].append(url)
- except Exception:
- plugins[plugintype][pluginname] = [url]
-
- # filter out container plugins
- for name in self.m.core.pluginManager.containerPlugins:
- if name in plugins:
+ for url, plugintype, pluginname in self.data:
+ # filter out container plugins
+ if plugintype == 'container':
container.extend([(name, url) for url in plugins[name]])
+ else:
+ if (plugintype, pluginname) in plugins:
+ plugins[(plugintype, pluginname)].append(url)
+ else:
+ plugins[(plugintype, pluginname)] = [url]
- del plugins[name]
-
- #directly write to database
+ # directly write to database
if self.pid > -1:
- for plugintype, pluginname, urls in plugins.iteritems():
+ for (plugintype, pluginname), urls in plugins.iteritems():
plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateDB)
self.m.core.files.save()
elif self.add:
- for plugintype, pluginname, urls in plugins.iteritems():
+ for (plugintype, pluginname), urls in plugins.iteritems():
plugin = self.m.core.pluginManager.getPlugin(plugintype, pluginname, True)
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True)
else:
- #generate default result
+ # generate default result
result = [(url, 0, 3, url) for url in urls]
self.updateCache(pluginname, result)
@@ -82,14 +79,13 @@ class InfoThread(PluginThread):
for k, v in packs:
self.m.core.api.addPackage(k, v)
- #empty cache
+ # empty cache
del self.cache[:]
- else: #post the results
-
+ else: # post the results
for name, url in container:
- #attach container content
+ # attach container content
try:
data = self.decryptContainer(name, url)
except Exception:
@@ -110,12 +106,12 @@ class InfoThread(PluginThread):
if hasattr(plugin, "getInfo"):
self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True)
- #force to process cache
+ # force to process cache
if self.cache:
self.updateResult(pluginname, [], True)
else:
- #generate default result
+ # generate default result
result = [(url, 0, 3, url) for url in urls]
self.updateResult(pluginname, result, True)
@@ -124,20 +120,18 @@ class InfoThread(PluginThread):
self.m.timestamp = time() + 5 * 60
-
def updateDB(self, plugin, result):
self.m.core.files.updateFileInfo(result, self.pid)
def updateResult(self, plugin, result, force=False):
- #parse package name and generate result
- #accumulate results
+ # parse package name and generate result
+ # accumulate results
self.cache.extend(result)
if len(self.cache) >= 20 or force:
- #used for package generating
- tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size))))
- for name, size, status, url in self.cache]
+ # used for package generating
+ tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size)))) for name, size, status, url in self.cache]
data = parseNames(tmp)
result = {}
@@ -155,8 +149,8 @@ class InfoThread(PluginThread):
def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None):
try:
- result = [] #result loaded from cache
- process = [] #urls to process
+ result = [] # result loaded from cache
+ process = [] # urls to process
for url in urls:
if url in self.m.infoCache:
result.append(self.m.infoCache[url])
@@ -170,19 +164,18 @@ class InfoThread(PluginThread):
if process:
self.m.log.debug("Run Info Fetching for %s" % pluginname)
for result in plugin.getInfo(process):
- #result = [ .. (name, size, status, url) .. ]
+ # result = [ .. (name, size, status, url) .. ]
if not type(result) == list:
result = [result]
for res in result:
- self.m.infoCache[res[3]] = res #: why don't assign res dict directly?
+ self.m.infoCache[res[3]] = res # : why don't assign res dict directly?
cb(pluginname, result)
self.m.log.debug("Finished Info Fetching for %s" % pluginname)
except Exception, e:
- self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") %
- {"name": pluginname, "err": str(e)})
+ self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") % {"name": pluginname, "err": str(e)})
if self.m.core.debug:
print_exc()
@@ -191,7 +184,6 @@ class InfoThread(PluginThread):
result = [(url, 0, 3, url) for url in urls]
cb(pluginname, result)
-
def decryptContainer(self, plugin, url):
data = []
# only works on container plugins
diff --git a/pyload/network/HTTPRequest.py b/pyload/network/HTTPRequest.py
index 49d1d042e..fe7e26c48 100644
--- a/pyload/network/HTTPRequest.py
+++ b/pyload/network/HTTPRequest.py
@@ -19,12 +19,14 @@ from pyload.utils import encode
def myquote(url):
return quote(encode(url), safe="%/:=&?~#+!$,;'@()*[]")
+
def myurlencode(data):
data = dict(data)
return urlencode(dict((encode(x), encode(y)) for x, y in data.iteritems()))
bad_headers = range(400, 404) + range(405, 418) + range(500, 506)
+
class BadHeader(Exception):
def __init__(self, code, content=""):
Exception.__init__(self, "Bad server response: %s %s" % (code, responses[int(code)]))
@@ -37,16 +39,16 @@ class HTTPRequest(object):
self.c = pycurl.Curl()
self.rep = StringIO()
- self.cj = cookies #cookiejar
+ self.cj = cookies # cookiejar
self.lastURL = None
self.lastEffectiveURL = None
self.abort = False
- self.code = 0 # last http code
+ self.code = 0 # last http code
self.header = ""
- self.headers = [] #temporary request header
+ self.headers = [] # temporary request header
self.initHandle()
self.setInterface(options)
@@ -56,7 +58,6 @@ class HTTPRequest(object):
self.log = getLogger("log")
-
def initHandle(self):
""" sets common options to curl handle """
self.c.setopt(pycurl.FOLLOWLOCATION, 1)
@@ -69,7 +70,8 @@ class HTTPRequest(object):
self.c.setopt(pycurl.SSL_VERIFYPEER, 0)
self.c.setopt(pycurl.LOW_SPEED_TIME, 60)
self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5)
- self.c.setopt(pycurl.USE_SSL, pycurl.CURLUSESSL_TRY)
+ if hasattr(pycurl, "USE_SSL"):
+ self.c.setopt(pycurl.USE_SSL, pycurl.CURLUSESSL_TRY)
#self.c.setopt(pycurl.VERBOSE, 1)
@@ -117,7 +119,6 @@ class HTTPRequest(object):
if "timeout" in options:
self.c.setopt(pycurl.LOW_SPEED_TIME, options["timeout"])
-
def addCookies(self):
""" put cookies from curl handle to cj """
if self.cj:
@@ -149,7 +150,7 @@ class HTTPRequest(object):
self.c.setopt(pycurl.POST, 1)
if not multipart:
if type(post) == unicode:
- post = str(post) #unicode not allowed
+ post = str(post) # unicode not allowed
elif type(post) == str:
pass
else:
@@ -170,7 +171,6 @@ class HTTPRequest(object):
self.c.setopt(pycurl.COOKIEJAR, "")
self.getCookies()
-
def load(self, url, get={}, post={}, referer=True, cookies=True, just_header=False, multipart=False, decode=False, follow_location=True, save_cookies=True):
""" load and returns a given page """
@@ -216,7 +216,7 @@ class HTTPRequest(object):
""" raise an exceptions on bad headers """
code = int(self.c.getinfo(pycurl.RESPONSE_CODE))
if code in bad_headers:
- #404 will NOT raise an exception
+ # 404 will NOT raise an exception
raise BadHeader(code, self.getResponse())
return code
@@ -237,7 +237,7 @@ class HTTPRequest(object):
def decodeResponse(self, rep):
""" decode with correct encoding, relies on header """
header = self.header.splitlines()
- encoding = "utf8" # default encoding
+ encoding = "utf8" # default encoding
for line in header:
line = line.lower().replace(" ", "")
@@ -259,7 +259,7 @@ class HTTPRequest(object):
decoder = getincrementaldecoder(encoding)("replace")
rep = decoder.decode(rep, True)
- #TODO: html_unescape as default
+ # TODO: html_unescape as default
except LookupError:
self.log.debug("No Decoder foung for %s" % encoding)
diff --git a/pyload/plugin/OCR.py b/pyload/plugin/OCR.py
index 01ba6d534..df32b9f23 100644
--- a/pyload/plugin/OCR.py
+++ b/pyload/plugin/OCR.py
@@ -11,7 +11,7 @@ except ImportError:
import logging
import os
import subprocess
-#import tempfile
+# import tempfile
from pyload.plugin.Plugin import Base
from pyload.utils import fs_join
@@ -20,32 +20,27 @@ from pyload.utils import fs_join
class OCR(Base):
__name = "OCR"
__type = "ocr"
- __version = "0.11"
+ __version = "0.12"
__description = """OCR base plugin"""
__license = "GPLv3"
__authors = [("pyLoad Team", "admin@pyload.org")]
-
def __init__(self):
self.logger = logging.getLogger("log")
-
def load_image(self, image):
self.image = Image.open(image)
self.pixels = self.image.load()
self.result_captcha = ''
-
def deactivate(self):
"""delete all tmp images"""
pass
-
def threshold(self, value):
self.image = self.image.point(lambda a: a * value + 10)
-
def run(self, command):
"""Run a command"""
@@ -56,14 +51,13 @@ class OCR(Base):
popen.stderr.close()
self.logger.debug("Tesseract ReturnCode %s Output: %s" % (popen.returncode, output))
-
- def run_tesser(self, subset=False, digits=True, lowercase=True, uppercase=True):
- #tmpTif = tempfile.NamedTemporaryFile(suffix=".tif")
+ def run_tesser(self, subset=False, digits=True, lowercase=True, uppercase=True, pagesegmode=None):
+ # tmpTif = tempfile.NamedTemporaryFile(suffix=".tif")
try:
tmpTif = open(fs_join("tmp", "tmpTif_%s.tif" % self.__class__.__name__), "wb")
tmpTif.close()
- #tmpTxt = tempfile.NamedTemporaryFile(suffix=".txt")
+ # tmpTxt = tempfile.NamedTemporaryFile(suffix=".txt")
tmpTxt = open(fs_join("tmp", "tmpTxt_%s.txt" % self.__class__.__name__), "wb")
tmpTxt.close()
@@ -79,10 +73,13 @@ class OCR(Base):
else:
tessparams = ["tesseract"]
- tessparams.extend([os.path.abspath(tmpTif.name), os.path.abspath(tmpTxt.name).replace(".txt", "")] )
+ tessparams.extend([os.path.abspath(tmpTif.name), os.path.abspath(tmpTxt.name).replace(".txt", "")])
+
+ if pagesegmode:
+ tessparams.extend(["-psm", str(pagesegmode)])
if subset and (digits or lowercase or uppercase):
- #tmpSub = tempfile.NamedTemporaryFile(suffix=".subset")
+ # tmpSub = tempfile.NamedTemporaryFile(suffix=".subset")
with open(fs_join("tmp", "tmpSub_%s.subset" % self.__class__.__name__), "wb") as tmpSub:
tmpSub.write("tessedit_char_whitelist ")
@@ -116,18 +113,15 @@ class OCR(Base):
except Exception:
pass
-
def get_captcha(self, name):
raise NotImplementedError
-
def to_greyscale(self):
if self.image.mode != 'L':
self.image = self.image.convert('L')
self.pixels = self.image.load()
-
def eval_black_white(self, limit):
self.pixels = self.image.load()
w, h = self.image.size
@@ -138,7 +132,6 @@ class OCR(Base):
else:
self.pixels[x, y] = 0
-
def clean(self, allowed):
pixels = self.pixels
@@ -184,7 +177,6 @@ class OCR(Base):
self.pixels = pixels
-
def derotate_by_average(self):
"""rotate by checking each angle and guess most suitable"""
@@ -258,7 +250,6 @@ class OCR(Base):
self.pixels = pixels
-
def split_captcha_letters(self):
captcha = self.image
started = False
@@ -298,7 +289,6 @@ class OCR(Base):
return letters
-
def correct(self, values, var=None):
if var:
result = var
diff --git a/pyload/plugin/Plugin.py b/pyload/plugin/Plugin.py
index 486dbeb0f..af70232e0 100644
--- a/pyload/plugin/Plugin.py
+++ b/pyload/plugin/Plugin.py
@@ -59,44 +59,38 @@ class Base(object):
#: Core instance
self.core = core
-
def _log(self, type, args):
- msg = " | ".join([encode(a).strip() for a in args if a])
+ msg = " | ".join([encode(str(a)).strip() for a in args if a])
logger = getattr(self.core.log, type)
logger("%s: %s" % (self.__class__.__name__, msg or _("%s MARK" % type.upper())))
-
def logDebug(self, *args):
if self.core.debug:
return self._log("debug", args)
-
def logInfo(self, *args):
return self._log("info", args)
-
def logWarning(self, *args):
return self._log("warning", args)
-
def logError(self, *args):
return self._log("error", args)
-
def logCritical(self, *args):
return self._log("critical", args)
+ def grtPluginType(self):
+ return getattr(self, "_%s__type" % self.__class__.__name__)
def getPluginConfSection(self):
- return "%s_%s" % (self.__class__.__name__, getattr(self, "_%s__type" % self.__class__.__name__))
-
+ return "%s_%s" % (self.__class__.__name__, getattr(self, "_%s__type" % self.__class__.__name__))
#: Deprecated method
def setConf(self, option, value):
""" see `setConfig` """
self.setConfig(option, value)
-
def setConfig(self, option, value):
""" Set config value for current plugin
@@ -106,12 +100,10 @@ class Base(object):
"""
self.core.config.setPlugin(self.getPluginConfSection(), option, value)
-
#: Deprecated method
def getConf(self, option):
""" see `getConfig` """
- return self.getConfig(option)
-
+ return self.core.config.getPlugin(self.getPluginConfSection(), option)
def getConfig(self, option):
""" Returns config value for current plugin
@@ -121,29 +113,24 @@ class Base(object):
"""
return self.core.config.getPlugin(self.getPluginConfSection(), option)
-
def setStorage(self, key, value):
""" Saves a value persistently to the database """
self.core.db.setStorage(self.getPluginConfSection(), key, value)
-
def store(self, key, value):
""" same as `setStorage` """
self.core.db.setStorage(self.getPluginConfSection(), key, value)
-
def getStorage(self, key=None, default=None):
""" Retrieves saved value or dict of all saved entries if key is None """
if key:
return self.core.db.getStorage(self.getPluginConfSection(), key) or default
return self.core.db.getStorage(self.getPluginConfSection(), key)
-
def retrieve(self, *args, **kwargs):
""" same as `getStorage` """
return self.getStorage(*args, **kwargs)
-
def delStorage(self, key):
""" Delete entry in db """
self.core.db.delStorage(self.__class__.__name__, key)
@@ -164,13 +151,11 @@ class Plugin(Base):
__description = """Base plugin"""
__license = "GPLv3"
__authors = [("RaNaN", "RaNaN@pyload.org"),
- ("spoob", "spoob@pyload.org"),
- ("mkaay", "mkaay@mkaay.de")]
-
+ ("spoob", "spoob@pyload.org"),
+ ("mkaay", "mkaay@mkaay.de")]
info = {} #: file info dict
-
def __init__(self, pyfile):
Base.__init__(self, pyfile.m.core)
@@ -207,10 +192,10 @@ class Plugin(Base):
self.user, data = self.account.selectAccount()
#: Browser instance, see `network.Browser`
self.req = self.account.getAccountRequest(self.user)
- self.chunkLimit = -1 # chunk limit, -1 for unlimited
+ self.chunkLimit = -1 # chunk limit, -1 for unlimited
#: enables resume (will be ignored if server dont accept chunks)
self.resumeDownload = True
- self.multiDL = True #every hoster with account should provide multiple downloads
+ self.multiDL = True # every hoster with account should provide multiple downloads
#: premium status
self.premium = self.account.isPremium(self.user)
else:
@@ -219,7 +204,7 @@ class Plugin(Base):
#: associated pyfile instance, see `PyFile`
self.pyfile = pyfile
- self.thread = None # holds thread in future
+ self.thread = None # holds thread in future
#: location where the last call to download was saved
self.lastDownload = ""
@@ -232,32 +217,27 @@ class Plugin(Base):
#: captcha task
self.cTask = None
- self.html = None #@TODO: Move to hoster class in 0.4.10
+ self.html = None # @TODO: Move to hoster class in 0.4.10
self.retries = 0
self.init()
-
def getChunkCount(self):
if self.chunkLimit <= 0:
return self.core.config['download']['chunks']
return min(self.core.config['download']['chunks'], self.chunkLimit)
-
def __call__(self):
return self.__class__.__name__
-
def init(self):
"""initialize the plugin (in addition to `__init__`)"""
pass
-
def setup(self):
""" setup for enviroment and other things, called before downloading (possibly more than one time)"""
pass
-
def preprocessing(self, thread):
""" handles important things to do before starting """
self.thread = thread
@@ -273,19 +253,16 @@ class Plugin(Base):
return self.process(self.pyfile)
-
def process(self, pyfile):
"""the 'main' method of every plugin, you **have to** overwrite it"""
raise NotImplementedError
-
def resetAccount(self):
""" dont use account and retry download """
self.account = None
self.req = self.core.requestFactory.getRequest(self.__class__.__name__)
self.retry()
-
def checksum(self, local_file=None):
"""
return codes:
@@ -299,13 +276,11 @@ class Plugin(Base):
return True, 10
-
def setReconnect(self, reconnect):
reconnect = bool(reconnect)
self.logDebug("Set wantReconnect to: %s (previous: %s)" % (reconnect, self.wantReconnect))
self.wantReconnect = reconnect
-
def setWait(self, seconds, reconnect=None):
"""Set a specific wait time later used with `wait`
@@ -323,7 +298,6 @@ class Plugin(Base):
if reconnect is not None:
self.setReconnect(reconnect)
-
def wait(self, seconds=None, reconnect=None):
""" waits the time previously set """
@@ -369,19 +343,16 @@ class Plugin(Base):
pyfile.status = status
-
def fail(self, reason):
""" fail and give reason """
raise Fail(reason)
-
def abort(self, reason=""):
""" abort and give reason """
if reason:
self.pyfile.error = str(reason)
raise Abort
-
def error(self, reason="", type=""):
if not reason and not type:
type = "unknown"
@@ -392,21 +363,18 @@ class Plugin(Base):
raise Fail(msg)
-
def offline(self, reason=""):
""" fail and indicate file is offline """
if reason:
self.pyfile.error = str(reason)
raise Fail("offline")
-
def tempOffline(self, reason=""):
""" fail and indicates file ist temporary offline, the core may take consequences """
if reason:
self.pyfile.error = str(reason)
raise Fail("temp. offline")
-
def retry(self, max_tries=5, wait_time=1, reason=""):
"""Retries and begin again from the beginning
@@ -422,19 +390,16 @@ class Plugin(Base):
self.retries += 1
raise Retry(reason)
-
def invalidCaptcha(self):
self.logError(_("Invalid captcha"))
if self.cTask:
self.cTask.invalid()
-
def correctCaptcha(self):
self.logInfo(_("Correct captcha"))
if self.cTask:
self.cTask.correct()
-
def decryptCaptcha(self, url, get={}, post={}, cookies=False, forceUser=False, imgtype='jpg',
result_type='textual', timeout=290):
""" Loads a captcha and decrypts it with ocr, plugin, user input
@@ -487,7 +452,7 @@ class Plugin(Base):
captchaManager.removeTask(task)
- if task.error and has_plugin: #ignore default error message since the user could use OCR
+ if task.error and has_plugin: # ignore default error message since the user could use OCR
self.fail(_("Pil and tesseract not installed and no Client connected for captcha decrypting"))
elif task.error:
self.fail(task.error)
@@ -505,7 +470,6 @@ class Plugin(Base):
return result
-
def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=False, follow_location=True, save_cookies=True):
"""Load content at url and returns it
@@ -526,7 +490,7 @@ class Plugin(Base):
if not url:
self.fail(_("No url given"))
- url = encode(url).strip() #@NOTE: utf8 vs decode -> please use decode attribute in all future plugins
+ url = encode(url).strip() # @NOTE: utf8 vs decode -> please use decode attribute in all future plugins
if self.core.debug:
self.logDebug("Load url: " + url, *["%s=%s" % (key, val) for key, val in locals().iteritems() if key not in ("self", "url")])
@@ -552,7 +516,7 @@ class Plugin(Base):
self.logError(e)
if just_header:
- #parse header
+ # parse header
header = {"code": self.req.code}
for line in res.splitlines():
line = line.strip()
@@ -573,7 +537,6 @@ class Plugin(Base):
return res
-
def download(self, url, get={}, post={}, ref=True, cookies=True, disposition=False):
"""Downloads the content at url to download folder
@@ -660,7 +623,6 @@ class Plugin(Base):
self.lastDownload = filename
return self.lastDownload
-
def checkDownload(self, rules, api_size=0, max_size=50000, delete=True, read_size=0):
""" checks the content of the last downloaded file, re match is saved to `lastCheck`
@@ -685,7 +647,7 @@ class Plugin(Base):
with open(lastDownload, "rb") as f:
content = f.read(read_size if read_size else -1)
- #produces encoding errors, better log to other file in the future?
+ # produces encoding errors, better log to other file in the future?
#self.logDebug("Content: %s" % content)
for name, rule in rules.iteritems():
if isinstance(rule, basestring):
@@ -701,14 +663,12 @@ class Plugin(Base):
self.lastCheck = m
return name
-
def getPassword(self):
""" get the password the user provided in the package"""
password = self.pyfile.package().password
if not password: return ""
return password
-
def checkForSameFiles(self, starting=False):
""" checks if same file was/is downloaded within same package
@@ -720,10 +680,9 @@ class Plugin(Base):
for pyfile in self.core.files.cache.values():
if pyfile != self.pyfile and pyfile.name == self.pyfile.name and pyfile.package().folder == pack.folder:
- if pyfile.status in (0, 12): #finished or downloading
+ if pyfile.status in (0, 12): # finished or downloading
raise SkipDownload(pyfile.pluginname)
- elif pyfile.status in (
- 5, 7) and starting: #a download is waiting/starting and was appenrently started before
+ elif pyfile.status in (5, 7) and starting: # a download is waiting/starting and was appenrently started before
raise SkipDownload(pyfile.pluginname)
download_folder = self.core.config['general']['download_folder']
@@ -741,7 +700,6 @@ class Plugin(Base):
self.logDebug("File %s not skipped, because it does not exists." % self.pyfile.name)
-
def clean(self):
""" clean everything and remove references """
if hasattr(self, "pyfile"):
diff --git a/pyload/plugin/addon/UpdateManager.py b/pyload/plugin/addon/UpdateManager.py
index 41a1d7f2c..34ef771c2 100644
--- a/pyload/plugin/addon/UpdateManager.py
+++ b/pyload/plugin/addon/UpdateManager.py
@@ -12,6 +12,7 @@ from operator import itemgetter
from pyload.network.RequestFactory import getURL
from pyload.plugin.Addon import Expose, Addon, threaded
from pyload.utils import fs_join
+from pyload import __status_code__ as release_status
# Case-sensitive os.path.exists
@@ -31,29 +32,30 @@ class UpdateManager(Addon):
__type = "addon"
__version = "0.50"
- __config = [("activated" , "bool", "Activated" , True ),
- ("checkinterval", "int" , "Check interval in hours" , 8 ),
- ("autorestart" , "bool", "Auto-restart pyLoad when required" , True ),
- ("checkonstart" , "bool", "Check for updates on startup" , True ),
- ("checkperiod" , "bool", "Check for updates periodically" , True ),
- ("reloadplugins", "bool", "Monitor plugin code changes in debug mode", True ),
- ("nodebugupdate", "bool", "Don't update plugins in debug mode" , False)]
+ __config = [("activated", "bool", "Activated", True),
+ ("checkinterval", "int", "Check interval in hours", 8),
+ ("autorestart", "bool",
+ "Auto-restart pyLoad when required", True),
+ ("checkonstart", "bool", "Check for updates on startup", True),
+ ("checkperiod", "bool",
+ "Check for updates periodically", True),
+ ("reloadplugins", "bool",
+ "Monitor plugin code changes in debug mode", True),
+ ("nodebugupdate", "bool", "Don't update plugins in debug mode", False)]
__description = """ Check for updates """
__license = "GPLv3"
__authors = [("Walter Purcaro", "vuolter@gmail.com")]
- SERVER_URL = "http://updatemanager.pyload.org"
+ SERVER_URL = "http://updatemanager.pyload.org" if release_status == 5 else None
MIN_CHECK_INTERVAL = 3 * 60 * 60 #: 3 hours
-
def activate(self):
if self.checkonstart:
self.update()
self.initPeriodical()
-
def setup(self):
self.interval = 10
self.info = {'pyload': False, 'version': None, 'plugins': False, 'last_check': time.time()}
@@ -65,7 +67,6 @@ class UpdateManager(Addon):
else:
self.checkonstart = False
-
def periodical(self):
if self.core.debug:
if self.getConfig('reloadplugins'):
@@ -78,14 +79,13 @@ class UpdateManager(Addon):
and time.time() - max(self.MIN_CHECK_INTERVAL, self.getConfig('checkinterval') * 60 * 60) > self.info['last_check']:
self.update()
-
@Expose
def autoreloadPlugins(self):
""" reload and reindex all modified plugins """
modules = filter(
lambda m: m and (m.__name__.startswith("module.plugins.") or
m.__name__.startswith("userplugins.")) and
- m.__name__.count(".") >= 2, sys.modules.itervalues()
+ m.__name__.count(".") >= 2, sys.modules.itervalues()
)
reloads = []
@@ -108,7 +108,6 @@ class UpdateManager(Addon):
return True if self.core.pluginManager.reloadPlugins(reloads) else False
-
def server_response(self):
try:
return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines()
@@ -116,7 +115,6 @@ class UpdateManager(Addon):
except Exception:
self.logWarning(_("Unable to contact server to get updates"))
-
@Expose
@threaded
def update(self):
@@ -129,7 +127,6 @@ class UpdateManager(Addon):
else:
self.core.api.unpauseServer()
-
def _update(self):
data = self.server_response()
@@ -159,7 +156,6 @@ class UpdateManager(Addon):
# 2 = Plugins updated, but restart required
return exitcode
-
def _updatePlugins(self, data):
""" check for plugin updates """
@@ -220,7 +216,7 @@ class UpdateManager(Addon):
plugins = getattr(self.core.pluginManager, "%sPlugins" % type)
- oldver = float(plugins[name]['v']) if name in plugins else None
+ oldver = float(plugins[name]['version']) if name in plugins else None
newver = float(version)
if not oldver:
@@ -230,8 +226,8 @@ class UpdateManager(Addon):
else:
continue
- self.logInfo(_(msg) % {'type' : type,
- 'name' : name,
+ self.logInfo(_(msg) % {'type': type,
+ 'name': name,
'oldver': oldver,
'newver': newver})
try:
@@ -239,10 +235,10 @@ class UpdateManager(Addon):
m = VERSION.search(content)
if m and m.group(2) == version:
- with open(fs_join("userplugins", prefix, filename), "wb") as f:
+ with open(fs_join("userplugins", type, filename), "wb") as f:
f.write(content)
- updated.append((prefix, name))
+ updated.append((type, name))
else:
raise Exception, _("Version mismatch")
@@ -269,7 +265,6 @@ class UpdateManager(Addon):
# 2 = Plugins updated, but restart required
return exitcode
-
@Expose
def removePlugins(self, type_plugins):
""" delete plugins from disk """
@@ -309,4 +304,5 @@ class UpdateManager(Addon):
id = (type, name)
removed.add(id)
- return list(removed) #: return a list of the plugins successfully removed
+ #: return a list of the plugins successfully removed
+ return list(removed)
diff --git a/pyload/plugin/captcha/AdYouLike.py b/pyload/plugin/captcha/AdYouLike.py
index 42441ee86..83fc4e1a3 100644
--- a/pyload/plugin/captcha/AdYouLike.py
+++ b/pyload/plugin/captcha/AdYouLike.py
@@ -15,11 +15,9 @@ class AdYouLike(Captcha):
__license = "GPLv3"
__authors = [("Walter Purcaro", "vuolter@gmail.com")]
-
AYL_PATTERN = r'Adyoulike\.create\s*\((.+?)\)'
CALLBACK_PATTERN = r'(Adyoulike\.g\._jsonp_\d+)'
-
def detect_key(self, html=None):
if not html:
if hasattr(self.plugin, "html") and self.plugin.html:
@@ -39,7 +37,6 @@ class AdYouLike(Captcha):
self.logDebug("Ayl or callback not found")
return None
-
def challenge(self, key=None, html=None):
if not key:
if self.detect_key(html):
@@ -56,8 +53,8 @@ class AdYouLike(Captcha):
ayl = json_loads(ayl)
html = self.plugin.req.load("http://api-ayl.appspot.com/challenge",
- get={'key' : ayl['adyoulike']['key'],
- 'env' : ayl['all']['env'],
+ get={'key': ayl['adyoulike']['key'],
+ 'env': ayl['all']['env'],
'callback': callback})
try:
challenge = json_loads(re.search(callback + r'\s*\((.+?)\)', html).group(1))
@@ -71,7 +68,6 @@ class AdYouLike(Captcha):
return self.result(ayl, challenge), challenge
-
def result(self, server, challenge):
# Adyoulike.g._jsonp_5579316662423138
# ({"translations":{"fr":{"instructions_visual":"Recopiez « Soonnight » ci-dessous :"}},
@@ -98,11 +94,11 @@ class AdYouLike(Captcha):
self.plugin.fail(errmsg)
raise AttributeError(errmsg)
- result = {'_ayl_captcha_engine' : "adyoulike",
- '_ayl_env' : server['all']['env'],
- '_ayl_tid' : challenge['tid'],
+ result = {'_ayl_captcha_engine': "adyoulike",
+ '_ayl_env': server['all']['env'],
+ '_ayl_tid': challenge['tid'],
'_ayl_token_challenge': challenge['token'],
- '_ayl_response' : response}
+ '_ayl_response': response}
self.logDebug("Result: %s" % result)
diff --git a/pyload/plugin/captcha/ReCaptcha.py b/pyload/plugin/captcha/ReCaptcha.py
index 4f35ed30b..b1dfd813c 100644
--- a/pyload/plugin/captcha/ReCaptcha.py
+++ b/pyload/plugin/captcha/ReCaptcha.py
@@ -18,14 +18,12 @@ class ReCaptcha(Captcha):
__description = """ReCaptcha captcha service plugin"""
__license = "GPLv3"
__authors = [("pyLoad Team", "admin@pyload.org"),
- ("Walter Purcaro", "vuolter@gmail.com"),
- ("zapp-brannigan", "fuerst.reinje@web.de")]
-
+ ("Walter Purcaro", "vuolter@gmail.com"),
+ ("zapp-brannigan", "fuerst.reinje@web.de")]
KEY_V2_PATTERN = r'(?:data-sitekey=["\']|["\']sitekey["\']:\s*["\'])([\w-]+)'
KEY_V1_PATTERN = r'(?:recaptcha(?:/api|\.net)/(?:challenge|noscript)\?k=|Recaptcha\.create\s*\(\s*["\'])([\w-]+)'
-
def detect_key(self, html=None):
if not html:
if hasattr(self.plugin, "html") and self.plugin.html:
@@ -44,7 +42,6 @@ class ReCaptcha(Captcha):
self.logDebug("Key not found")
return None
-
def challenge(self, key=None, html=None, version=None):
if not key:
if self.detect_key(html):
@@ -66,7 +63,6 @@ class ReCaptcha(Captcha):
self.plugin.fail(errmsg)
raise TypeError(errmsg)
-
def _challenge_v1(self, key):
html = self.plugin.req.load("http://www.google.com/recaptcha/api/challenge",
get={'k': key})
@@ -83,7 +79,6 @@ class ReCaptcha(Captcha):
return self.result(server, challenge), challenge
-
def result(self, server, challenge):
result = self.plugin.decryptCaptcha("%simage" % server,
get={'c': challenge},
@@ -95,13 +90,12 @@ class ReCaptcha(Captcha):
return result
-
def _collectApiInfo(self):
html = self.plugin.req.load("http://www.google.com/recaptcha/api.js")
a = re.search(r'po.src = \'(.*?)\';', html).group(1)
vers = a.split("/")[5]
- self.logDebug("API version: %s" %vers)
+ self.logDebug("API version: %s" % vers)
language = a.split("__")[1].split(".")[0]
@@ -115,7 +109,6 @@ class ReCaptcha(Captcha):
return vers, language, jsh
-
def _prepareTimeAndRpc(self):
self.plugin.req.load("http://www.google.com/recaptcha/api2/demo")
@@ -131,7 +124,6 @@ class ReCaptcha(Captcha):
return millis, rpc
-
def _challenge_v2(self, key, parent=None):
if parent is None:
try:
@@ -145,26 +137,26 @@ class ReCaptcha(Captcha):
millis, rpc = self._prepareTimeAndRpc()
html = self.plugin.req.load("https://www.google.com/recaptcha/api2/anchor",
- get={'k' : key,
- 'hl' : language,
- 'v' : vers,
- 'usegapi' : "1",
- 'jsh' : "%s#id=IO_%s" % (jsh, millis),
- 'parent' : parent,
- 'pfname' : "",
+ get={'k': key,
+ 'hl': language,
+ 'v': vers,
+ 'usegapi': "1",
+ 'jsh': "%s#id=IO_%s" % (jsh, millis),
+ 'parent': parent,
+ 'pfname': "",
'rpctoken': rpc})
token1 = re.search(r'id="recaptcha-token" value="(.*?)">', html)
self.logDebug("Token #1: %s" % token1.group(1))
html = self.plugin.req.load("https://www.google.com/recaptcha/api2/frame",
- get={'c' : token1.group(1),
- 'hl' : language,
- 'v' : vers,
- 'bg' : botguardstring,
- 'k' : key,
+ get={'c': token1.group(1),
+ 'hl': language,
+ 'v': vers,
+ 'bg': botguardstring,
+ 'k': key,
'usegapi': "1",
- 'jsh' : jsh}).decode('unicode-escape')
+ 'jsh': jsh}).decode('unicode-escape')
token2 = re.search(r'"finput","(.*?)",', html)
self.logDebug("Token #2: %s" % token2.group(1))
@@ -173,17 +165,17 @@ class ReCaptcha(Captcha):
self.logDebug("Token #3: %s" % token3.group(1))
html = self.plugin.req.load("https://www.google.com/recaptcha/api2/reload",
- post={'k' : key,
- 'c' : token2.group(1),
+ post={'k': key,
+ 'c': token2.group(1),
'reason': "fi",
- 'fbg' : token3.group(1)})
+ 'fbg': token3.group(1)})
token4 = re.search(r'"rresp","(.*?)",', html)
self.logDebug("Token #4: %s" % token4.group(1))
millis_captcha_loading = int(round(time.time() * 1000))
captcha_response = self.plugin.decryptCaptcha("https://www.google.com/recaptcha/api2/payload",
- get={'c':token4.group(1), 'k':key},
+ get={'c': token4.group(1), 'k': key},
cookies=True,
forceUser=True)
response = b64encode('{"response":"%s"}' % captcha_response)
@@ -194,12 +186,12 @@ class ReCaptcha(Captcha):
timeToSolveMore = timeToSolve + int(float("0." + str(randint(1, 99999999))) * 500)
html = self.plugin.req.load("https://www.google.com/recaptcha/api2/userverify",
- post={'k' : key,
- 'c' : token4.group(1),
+ post={'k': key,
+ 'c': token4.group(1),
'response': response,
- 't' : timeToSolve,
- 'ct' : timeToSolveMore,
- 'bg' : botguardstring})
+ 't': timeToSolve,
+ 'ct': timeToSolveMore,
+ 'bg': botguardstring})
token5 = re.search(r'"uvresp","(.*?)",', html)
self.logDebug("Token #5: %s" % token5.group(1))
diff --git a/pyload/plugin/crypter/NCryptIn.py b/pyload/plugin/crypter/NCryptIn.py
index 94808db3b..a7f1b0bb9 100644
--- a/pyload/plugin/crypter/NCryptIn.py
+++ b/pyload/plugin/crypter/NCryptIn.py
@@ -6,7 +6,7 @@ import re
from Crypto.Cipher import AES
from pyload.plugin.Crypter import Crypter
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
class NCryptIn(Crypter):
diff --git a/pyload/plugin/crypter/SafelinkingNet.py b/pyload/plugin/crypter/SafelinkingNet.py
index 0c93d6919..720766d22 100644
--- a/pyload/plugin/crypter/SafelinkingNet.py
+++ b/pyload/plugin/crypter/SafelinkingNet.py
@@ -6,7 +6,7 @@ from BeautifulSoup import BeautifulSoup
from pyload.utils import json_loads
from pyload.plugin.Crypter import Crypter
-from pyload.plugin.internal.captcha import SolveMedia
+from pyload.plugin.captcha import SolveMedia
class SafelinkingNet(Crypter):
diff --git a/pyload/plugin/hoster/BitshareCom.py b/pyload/plugin/hoster/BitshareCom.py
index f4be88401..ef65b1b80 100644
--- a/pyload/plugin/hoster/BitshareCom.py
+++ b/pyload/plugin/hoster/BitshareCom.py
@@ -4,7 +4,7 @@ from __future__ import with_statement
import re
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/CatShareNet.py b/pyload/plugin/hoster/CatShareNet.py
index 0aca297b8..08666e573 100644
--- a/pyload/plugin/hoster/CatShareNet.py
+++ b/pyload/plugin/hoster/CatShareNet.py
@@ -2,7 +2,7 @@
import re
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/CrockoCom.py b/pyload/plugin/hoster/CrockoCom.py
index 55bb126cd..b2748f6b1 100644
--- a/pyload/plugin/hoster/CrockoCom.py
+++ b/pyload/plugin/hoster/CrockoCom.py
@@ -2,7 +2,7 @@
import re
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/DateiTo.py b/pyload/plugin/hoster/DateiTo.py
index 6c160dfb9..c91557747 100644
--- a/pyload/plugin/hoster/DateiTo.py
+++ b/pyload/plugin/hoster/DateiTo.py
@@ -2,7 +2,7 @@
import re
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/DepositfilesCom.py b/pyload/plugin/hoster/DepositfilesCom.py
index 736350f0c..7dfd9dd5e 100644
--- a/pyload/plugin/hoster/DepositfilesCom.py
+++ b/pyload/plugin/hoster/DepositfilesCom.py
@@ -4,7 +4,7 @@ import re
from urllib import unquote
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/DlFreeFr.py b/pyload/plugin/hoster/DlFreeFr.py
index 83e7c5d77..127462d61 100644
--- a/pyload/plugin/hoster/DlFreeFr.py
+++ b/pyload/plugin/hoster/DlFreeFr.py
@@ -5,7 +5,7 @@ import re
from pyload.network.Browser import Browser
from pyload.network.CookieJar import CookieJar
-from pyload.plugin.internal.captcha import AdYouLike
+from pyload.plugin.captcha import AdYouLike
from pyload.plugin.internal.SimpleHoster import SimpleHoster, replace_patterns
from pyload.utils import json_loads
diff --git a/pyload/plugin/hoster/FilecloudIo.py b/pyload/plugin/hoster/FilecloudIo.py
index 07a743292..33256b6a8 100644
--- a/pyload/plugin/hoster/FilecloudIo.py
+++ b/pyload/plugin/hoster/FilecloudIo.py
@@ -3,7 +3,7 @@
import re
from pyload.utils import json_loads
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/FilepostCom.py b/pyload/plugin/hoster/FilepostCom.py
index 883c001ab..a32b46fbb 100644
--- a/pyload/plugin/hoster/FilepostCom.py
+++ b/pyload/plugin/hoster/FilepostCom.py
@@ -4,7 +4,7 @@ import re
import time
from pyload.utils import json_loads
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/FilerNet.py b/pyload/plugin/hoster/FilerNet.py
index 3138780e4..7cefa6d9f 100644
--- a/pyload/plugin/hoster/FilerNet.py
+++ b/pyload/plugin/hoster/FilerNet.py
@@ -9,7 +9,7 @@ import re
from urlparse import urljoin
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha.ReCaptcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
@@ -24,8 +24,7 @@ class FilerNet(SimpleHoster):
__description = """Filer.net hoster plugin"""
__license = "GPLv3"
__authors = [("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
+ ("Walter Purcaro", "vuolter@gmail.com")]
INFO_PATTERN = r'<h1 class="page-header">Free Download (?P<N>\S+) <small>(?P<S>[\w.]+) (?P<U>[\w^_]+)</small></h1>'
OFFLINE_PATTERN = r'Nicht gefunden'
@@ -34,7 +33,6 @@ class FilerNet(SimpleHoster):
LINK_FREE_PATTERN = LINK_PREMIUM_PATTERN = r'href="([^"]+)">Get download</a>'
-
def handleFree(self, pyfile):
inputs = self.parseHtmlForm(input_names={'token': re.compile(r'.+')})[1]
if 'token' not in inputs:
@@ -53,8 +51,8 @@ class FilerNet(SimpleHoster):
#@TODO: Check for v0.4.10
self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
self.load(pyfile.url, post={'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field' : response,
- 'hash' : inputs['hash']})
+ 'recaptcha_response_field': response,
+ 'hash': inputs['hash']})
self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
if 'location' in self.req.http.header.lower():
diff --git a/pyload/plugin/hoster/KingfilesNet.py b/pyload/plugin/hoster/KingfilesNet.py
index a47defebc..92942fbeb 100644
--- a/pyload/plugin/hoster/KingfilesNet.py
+++ b/pyload/plugin/hoster/KingfilesNet.py
@@ -2,7 +2,7 @@
import re
-from pyload.plugin.internal.captcha import SolveMedia
+from pyload.plugin.captcha import SolveMedia
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/LoadTo.py b/pyload/plugin/hoster/LoadTo.py
index 6616114e3..2c34b7c03 100644
--- a/pyload/plugin/hoster/LoadTo.py
+++ b/pyload/plugin/hoster/LoadTo.py
@@ -6,7 +6,7 @@
import re
-from pyload.plugin.internal.captcha import SolveMedia
+from pyload.plugin.captcha import SolveMedia
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/LuckyShareNet.py b/pyload/plugin/hoster/LuckyShareNet.py
index 3300f989c..b23428938 100644
--- a/pyload/plugin/hoster/LuckyShareNet.py
+++ b/pyload/plugin/hoster/LuckyShareNet.py
@@ -4,7 +4,7 @@ import re
from bottle import json_loads
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/hoster/OboomCom.py b/pyload/plugin/hoster/OboomCom.py
index f8a9fb8bb..c24e4c9ab 100644
--- a/pyload/plugin/hoster/OboomCom.py
+++ b/pyload/plugin/hoster/OboomCom.py
@@ -7,7 +7,7 @@ import re
from pyload.utils import json_loads
from pyload.plugin.Hoster import Hoster
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
class OboomCom(Hoster):
diff --git a/pyload/plugin/hoster/TurbobitNet.py b/pyload/plugin/hoster/TurbobitNet.py
index d8daf79f7..af28f7d74 100644
--- a/pyload/plugin/hoster/TurbobitNet.py
+++ b/pyload/plugin/hoster/TurbobitNet.py
@@ -10,7 +10,7 @@ from pycurl import HTTPHEADER
from urllib import quote
from pyload.network.RequestFactory import getURL
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster, timestamp
diff --git a/pyload/plugin/hoster/UpstoreNet.py b/pyload/plugin/hoster/UpstoreNet.py
index f022ef2dd..dcbf7ea9e 100644
--- a/pyload/plugin/hoster/UpstoreNet.py
+++ b/pyload/plugin/hoster/UpstoreNet.py
@@ -2,7 +2,7 @@
import re
-from pyload.plugin.internal.captcha import ReCaptcha
+from pyload.plugin.captcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
diff --git a/pyload/plugin/internal/SimpleHoster.py b/pyload/plugin/internal/SimpleHoster.py
index 60f13324f..08fef5a70 100644
--- a/pyload/plugin/internal/SimpleHoster.py
+++ b/pyload/plugin/internal/SimpleHoster.py
@@ -25,14 +25,14 @@ statusMap = dict((v, k) for k, v in _statusMap.iteritems())
#@TODO: Remove in 0.4.10 and redirect to self.error instead
def _error(self, reason, type):
- if not reason and not type:
- type = "unknown"
+ if not reason and not type:
+ type = "unknown"
- msg = _("%s error") % type.strip().capitalize() if type else _("Error")
- msg += ": %s" % reason.strip() if reason else ""
- msg += _(" | Plugin may be out of date")
+ msg = _("%s error") % type.strip().capitalize() if type else _("Error")
+ msg += ": %s" % reason.strip() if reason else ""
+ msg += _(" | Plugin may be out of date")
- raise Fail(msg)
+ raise Fail(msg)
#@TODO: Remove in 0.4.10
@@ -125,13 +125,13 @@ def parseFileInfo(plugin, url="", html=""):
# def create_getInfo(plugin):
# def generator(list):
- # for x in list:
- # yield x
+ # for x in list:
+ # yield x
# if hasattr(plugin, "parseInfos"):
- # fn = lambda urls: generator((info['name'], info['size'], info['status'], info['url']) for info in plugin.parseInfos(urls))
+ # fn = lambda urls: generator((info['name'], info['size'], info['status'], info['url']) for info in plugin.parseInfos(urls))
# else:
- # fn = lambda urls: generator(parseFileInfo(url) for url in urls)
+ # fn = lambda urls: generator(parseFileInfo(url) for url in urls)
# return fn
@@ -238,7 +238,7 @@ def secondsToMidnight(gmt=0):
if hasattr(td, 'total_seconds'):
res = td.total_seconds()
else: #: work-around for python 2.5 and 2.6 missing datetime.timedelta.total_seconds
- res = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
+ res = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 10 ** 6
return int(res)
@@ -253,8 +253,7 @@ class SimpleHoster(Hoster):
__description = """Simple hoster plugin"""
__license = "GPLv3"
- __authors = [("Walter Purcaro", "vuolter@gmail.com" )]
-
+ __authors = [("Walter Purcaro", "vuolter@gmail.com")]
"""
Info patterns should be defined by each hoster:
@@ -310,27 +309,24 @@ class SimpleHoster(Hoster):
LOGIN_ACCOUNT = False #: Set to True to require account login
DISPOSITION = True #: Work-around to `filename*=UTF-8` bug; remove in 0.4.10
- directLink = getFileURL #@TODO: Remove in 0.4.10
-
+ directLink = getFileURL # @TODO: Remove in 0.4.10
@classmethod
- def parseInfos(cls, urls): #@TODO: Built-in in 0.4.10 core, then remove from plugins
+ def parseInfos(cls, urls): # @TODO: Built-in in 0.4.10 core, then remove from plugins
for url in urls:
url = replace_patterns(url, cls.URL_REPLACEMENTS)
yield cls.getInfo(url)
-
@classmethod
def apiInfo(cls, url="", get={}, post={}):
url = unquote(url)
url_p = urlparse(url)
- return {'name' : (url_p.path.split('/')[-1]
- or url_p.query.split('=', 1)[::-1][0].split('&', 1)[0]
- or url_p.netloc.split('.', 1)[0]),
- 'size' : 0,
+ return {'name': (url_p.path.split('/')[-1]
+ or url_p.query.split('=', 1)[::-1][0].split('&', 1)[0]
+ or url_p.netloc.split('.', 1)[0]),
+ 'size': 0,
'status': 3 if url else 8,
- 'url' : url}
-
+ 'url': url}
@classmethod
def getInfo(cls, url="", html=""):
@@ -411,19 +407,17 @@ class SimpleHoster(Hoster):
return info
-
def setup(self):
self.resumeDownload = self.multiDL = self.premium
-
def prepare(self):
- self.pyfile.error = "" #@TODO: Remove in 0.4.10
+ self.pyfile.error = "" # @TODO: Remove in 0.4.10
self.info = {}
self.html = ""
- self.link = "" #@TODO: Move to hoster class in 0.4.10
- self.directDL = False #@TODO: Move to hoster class in 0.4.10
- self.multihost = False #@TODO: Move to hoster class in 0.4.10
+ self.link = "" # @TODO: Move to hoster class in 0.4.10
+ self.directDL = False # @TODO: Move to hoster class in 0.4.10
+ self.multihost = False # @TODO: Move to hoster class in 0.4.10
if not self.getConfig('use_premium', True):
self.retryFree()
@@ -449,14 +443,12 @@ class SimpleHoster(Hoster):
self.pyfile.url = replace_patterns(self.pyfile.url, self.URL_REPLACEMENTS)
-
def preload(self):
self.html = self.load(self.pyfile.url, cookies=bool(self.COOKIES), decode=not self.TEXT_ENCODING)
if isinstance(self.TEXT_ENCODING, basestring):
self.html = unicode(self.html, self.TEXT_ENCODING)
-
def process(self, pyfile):
try:
self.prepare()
@@ -489,14 +481,13 @@ class SimpleHoster(Hoster):
self.downloadLink(self.link, self.DISPOSITION) #: Remove `self.DISPOSITION` in 0.4.10
self.checkFile()
- except Fail, e: #@TODO: Move to PluginThread in 0.4.10
+ except Fail, e: # @TODO: Move to PluginThread in 0.4.10
if self.premium:
self.logWarning(_("Premium download failed"))
self.retryFree()
else:
raise Fail(e)
-
def downloadLink(self, link, disposition=True):
if link and isinstance(link, basestring):
self.correctCaptcha()
@@ -508,7 +499,6 @@ class SimpleHoster(Hoster):
self.download(link, ref=False, disposition=disposition)
-
def checkFile(self, rules={}):
if self.cTask and not self.lastDownload:
self.invalidCaptcha()
@@ -523,14 +513,14 @@ class SimpleHoster(Hoster):
'Html error': re.compile(r'\A(?:\s*<.+>)?((?:[\w\s]*(?:[Ee]rror|ERROR)\s*\:?)?\s*\d{3})(?:\Z|\s+)')})
if not errmsg:
- for r, p in [('Html file' , re.compile(r'\A\s*<!DOCTYPE html') ),
+ for r, p in [('Html file', re.compile(r'\A\s*<!DOCTYPE html')),
('Request error', re.compile(r'([Aa]n error occured while processing your request)'))]:
if r not in rules:
rules[r] = p
- for r, a in [('Error' , "ERROR_PATTERN" ),
+ for r, a in [('Error', "ERROR_PATTERN"),
('Premium only', "PREMIUM_ONLY_PATTERN"),
- ('Wait error' , "WAIT_PATTERN" )]:
+ ('Wait error', "WAIT_PATTERN")]:
if r not in rules and hasattr(self, a):
rules[r] = getattr(self, a)
@@ -549,7 +539,6 @@ class SimpleHoster(Hoster):
self.logWarning("Check result: " + errmsg, "Waiting 1 minute and retry")
self.retry(3, 60, errmsg)
-
def checkErrors(self):
if not self.html:
self.logWarning(_("No html code to check"))
@@ -594,7 +583,6 @@ class SimpleHoster(Hoster):
self.info.pop('error', None)
-
def checkStatus(self, getinfo=True):
if not self.info or getinfo:
self.logDebug("Update file info...")
@@ -617,7 +605,6 @@ class SimpleHoster(Hoster):
finally:
self.logDebug("File status: %s" % statusMap[status])
-
def checkNameSize(self, getinfo=True):
if not self.info or getinfo:
self.logDebug("Update file info...")
@@ -645,7 +632,6 @@ class SimpleHoster(Hoster):
self.logDebug("File name: %s" % self.pyfile.name,
"File size: %s byte" % self.pyfile.size if self.pyfile.size > 0 else "File size: Unknown")
-
def checkInfo(self):
self.checkNameSize()
@@ -655,14 +641,12 @@ class SimpleHoster(Hoster):
self.checkStatus(getinfo=False)
-
#: Deprecated
def getFileInfo(self):
self.info = {}
self.checkInfo()
return self.info
-
def handleDirect(self, pyfile):
link = self.directLink(pyfile.url, self.resumeDownload)
@@ -673,11 +657,9 @@ class SimpleHoster(Hoster):
else:
self.logDebug("Direct download link not found")
-
def handleMulti(self, pyfile): #: Multi-hoster handler
pass
-
def handleFree(self, pyfile):
if not hasattr(self, 'LINK_FREE_PATTERN'):
self.logError(_("Free download not implemented"))
@@ -688,7 +670,6 @@ class SimpleHoster(Hoster):
else:
self.link = m.group(1)
-
def handlePremium(self, pyfile):
if not hasattr(self, 'LINK_PREMIUM_PATTERN'):
self.logError(_("Premium download not implemented"))
@@ -701,7 +682,6 @@ class SimpleHoster(Hoster):
else:
self.link = m.group(1)
-
def longWait(self, wait_time=None, max_tries=3):
if wait_time and isinstance(wait_time, (int, long, float)):
time_str = "%dh %dm" % divmod(wait_time / 60, 60)
@@ -715,11 +695,9 @@ class SimpleHoster(Hoster):
self.wait(wait_time, True)
self.retry(max_tries=max_tries, reason=_("Download limit reached"))
-
def parseHtmlForm(self, attr_str="", input_names={}):
return parseHtmlForm(attr_str, self.html, input_names)
-
def checkTrafficLeft(self):
if not self.account:
return True
@@ -735,8 +713,7 @@ class SimpleHoster(Hoster):
self.logInfo(_("Filesize: %i KiB, Traffic left for user %s: %i KiB") % (size, self.user, traffic))
return size <= traffic
-
- def getConfig(self, option, default=''): #@TODO: Remove in 0.4.10
+ def getConfig(self, option, default=''): # @TODO: Remove in 0.4.10
"""getConfig with default value - sublass may not implements all config options"""
try:
return self.getConf(option)
@@ -744,7 +721,6 @@ class SimpleHoster(Hoster):
except KeyError:
return default
-
def retryFree(self):
if not self.premium:
return
@@ -754,11 +730,9 @@ class SimpleHoster(Hoster):
self.retries = 0
raise Retry(_("Fallback to free download"))
-
#@TODO: Remove in 0.4.10
def wait(self, seconds=0, reconnect=None):
return _wait(self, seconds, reconnect)
-
def error(self, reason="", type="parse"):
return _error(self, reason, type)
diff --git a/pyload/plugin/internal/XFSHoster.py b/pyload/plugin/internal/XFSHoster.py
index e87b6b0ee..b1370eb93 100644
--- a/pyload/plugin/internal/XFSHoster.py
+++ b/pyload/plugin/internal/XFSHoster.py
@@ -6,7 +6,7 @@ import time
from random import random
from urlparse import urljoin, urlparse
-from pyload.plugin.internal.captcha import ReCaptcha, SolveMedia
+from pyload.plugin.captcha import ReCaptcha, SolveMedia
from pyload.plugin.internal.SimpleHoster import SimpleHoster, secondsToMidnight
from pyload.utils import html_unescape
diff --git a/pyload/utils/__init__.py b/pyload/utils/__init__.py
index 44bff909c..13567f1a2 100644
--- a/pyload/utils/__init__.py
+++ b/pyload/utils/__init__.py
@@ -70,6 +70,7 @@ def safe_filename(name):
else:
return remove_chars(name, u'\0\\"')
+
#: Deprecated method
def save_path(name):
return safe_filename(name)
@@ -79,6 +80,7 @@ def fs_join(*args):
""" joins a path, encoding aware """
return fs_encode(join(*[x if type(x) == unicode else decode(x) for x in args]))
+
#: Deprecated method
def save_join(*args):
return fs_join(*args)
@@ -91,7 +93,7 @@ if sys.getfilesystemencoding().startswith('ANSI'):
def fs_encode(string):
return safe_filename(encode(string))
- fs_decode = decode #decode utf8
+ fs_decode = decode # decode utf8
else:
fs_encode = fs_decode = lambda x: x # do nothing
@@ -99,7 +101,7 @@ else:
def get_console_encoding(enc):
if os.name == "nt":
- if enc == "cp65001": # aka UTF-8
+ if enc == "cp65001": # aka UTF-8
print "WARNING: Windows codepage 65001 is not supported."
enc = "cp850"
else:
@@ -170,7 +172,7 @@ def uniqify(seq): #: Originally by Dave Kirby
return [x for x in seq if x not in seen and not seen_add(x)]
-def parseFileSize(string, unit=None): #returns bytes
+def parseFileSize(string, unit=None): # returns bytes
if not unit:
m = re.match(r"([\d.,]+) *([a-zA-Z]*)", string.strip().lower())
if m:
@@ -184,7 +186,7 @@ def parseFileSize(string, unit=None): #returns bytes
else:
traffic = string
- #ignore case
+ # ignore case
unit = unit.lower().strip()
if unit in ("eb", "ebyte", "exabyte", "eib", "e"):
@@ -205,7 +207,7 @@ def parseFileSize(string, unit=None): #returns bytes
def lock(func):
def new(*args):
- #print "Handler: %s args: %s" % (func, args[1:])
+ # print "Handler: %s args: %s" % (func, args[1:])
args[0].lock.acquire()
try:
return func(*args)
@@ -234,7 +236,7 @@ def fixup(m):
except KeyError:
pass
- return text # leave as is
+ return text # leave as is
def has_method(obj, name):
@@ -265,4 +267,3 @@ def load_translation(name, locale, default="en"):
else:
translation.install(True)
return translation
-
diff --git a/pyload/webui/app/pyloadweb.py b/pyload/webui/app/pyloadweb.py
index 8bf8060d4..3f83dd33a 100644
--- a/pyload/webui/app/pyloadweb.py
+++ b/pyload/webui/app/pyloadweb.py
@@ -25,6 +25,7 @@ from pyload.utils import formatSize, fs_join, fs_encode, fs_decode
# Helper
+
def pre_processor():
s = request.environ.get('beaker.session')
user = parse_userdata(s)
@@ -45,13 +46,12 @@ def pre_processor():
if info["plugins"] == "True":
plugins = True
-
- return {"user" : user,
- 'status' : status,
+ return {"user": user,
+ 'status': status,
'captcha': captcha,
- 'perms' : perms,
- 'url' : request.url,
- 'update' : update,
+ 'perms': perms,
+ 'url': request.url,
+ 'update': update,
'plugins': plugins}
@@ -59,7 +59,7 @@ def base(messages):
return render_to_response('base.html', {'messages': messages}, [pre_processor])
-## Views
+# Views
@error(403)
def error403(code):
return "The parameter you passed has the wrong format"
@@ -240,7 +240,8 @@ def get_download(path):
__TYPES = ("account", "addon", "container", "crypter", "extractor", "hook", "hoster", "internal", "ocr")
-__TYPE_REPLACES = ( ('_account', ' (Account)'), ('_addon', ' (Addon)'), ('_container', ''), ('_crypter', ' (Crypter)'), ('_extractor', ''), ('_hook', ' (Hook)'), ('_hoster', ' (Hoster)'))
+__TYPE_REPLACES = (('_account', ' (Account)'), ('_addon', ' (Addon)'), ('_container', ''), ('_crypter', ' (Crypter)'), ('_extractor', ''), ('_hook', ' (Hook)'), ('_hoster', ' (Hoster)'))
+
@route('/settings')
@login_required('SETTINGS')
@@ -258,9 +259,9 @@ def config():
desc = plugin[entry].description
name, none, type = desc.partition("_")
if type in __TYPES:
- if len([a for a,b in plugin.iteritems() if b.description.startswith(name+"_")]) > 1:
+ if len([a for a, b in plugin.iteritems() if b.description.startswith(name + "_")]) > 1:
for search, repl in __TYPE_REPLACES:
- if desc.endswith(search):
+ if desc.endswith(search):
desc = desc.replace(search, repl)
break
else:
@@ -279,7 +280,7 @@ def config():
if data.validuntil == -1:
data.validuntil = _("unlimited")
- elif not data.validuntil :
+ elif not data.validuntil:
data.validuntil = _("not available")
else:
t = time.localtime(data.validuntil)
@@ -296,8 +297,8 @@ def config():
data.options["limitdl"] = "0"
return render_to_response('settings.html',
- {'conf': {'plugin': plugin_menu, 'general': conf_menu, 'accs': accs}, 'types': PYLOAD.getAccountTypes()},
- [pre_processor])
+ {'conf': {'plugin': plugin_menu, 'general': conf_menu, 'accs': accs}, 'types': PYLOAD.getAccountTypes()},
+ [pre_processor])
@route('/filechooser')
@@ -386,8 +387,8 @@ def path(file="", path=""):
files = sorted(files, key=itemgetter('type', 'sort'))
return render_to_response('pathchooser.html',
- {'cwd': cwd, 'files': files, 'parentdir': parentdir, 'type': type, 'oldfile': oldfile,
- 'absolute': abs}, [])
+ {'cwd': cwd, 'files': files, 'parentdir': parentdir, 'type': type, 'oldfile': oldfile,
+ 'absolute': abs}, [])
@route('/logs')
@@ -437,7 +438,7 @@ def logs(item=-1):
if item < 1 or type(item) is not int:
item = 1 if len(log) - perpage + 1 < 1 else len(log) - perpage + 1
- if type(fro) is datetime: # we will search for datetime
+ if type(fro) is datetime: # we will search for datetime
item = -1
data = []
@@ -457,16 +458,16 @@ def logs(item=-1):
level = '?'
message = l
if item == -1 and dtime is not None and fro <= dtime:
- item = counter #found our datetime
+ item = counter # found our datetime
if item >= 0:
data.append({'line': counter, 'date': date + " " + time, 'level': level, 'message': message})
perpagecheck += 1
- if fro is None and dtime is not None: #if fro not set set it to first showed line
+ if fro is None and dtime is not None: # if fro not set set it to first showed line
fro = dtime
if perpagecheck >= perpage > 0:
break
- if fro is None: #still not set, empty log?
+ if fro is None: # still not set, empty log?
fro = datetime.now()
if reversed:
data.reverse()
@@ -490,7 +491,6 @@ def admin():
get_permission(data["perms"], data["permission"])
data["perms"]["admin"] = True if data["role"] is 0 else False
-
s = request.environ.get('beaker.session')
if request.environ.get('REQUEST_METHOD', "GET") == "POST":
for name in user:
@@ -536,7 +536,7 @@ def info():
"download": abspath(conf["general"]["download_folder"]["value"]),
"freespace": formatSize(PYLOAD.freeSpace()),
"remote": conf["remote"]["port"]["value"],
- "webif": conf["webinterface"]["port"]["value"],
+ "webif": conf["webui"]["port"]["value"],
"language": conf["general"]["language"]["value"]}
return render_to_response("info.html", data, [pre_processor])