diff options
author | RaNaN <Mast3rRaNaN@hotmail.de> | 2013-10-08 18:47:27 +0200 |
---|---|---|
committer | RaNaN <Mast3rRaNaN@hotmail.de> | 2013-10-08 18:47:27 +0200 |
commit | c0f373528c55fc11d3ef592b3b1f7207306852c0 (patch) | |
tree | 822f0e717d167043887be058a56540f47d9ab4e2 /pyload | |
parent | gzip compression for api (diff) | |
download | pyload-c0f373528c55fc11d3ef592b3b1f7207306852c0.tar.xz |
adapter simplecrypter plugin
Diffstat (limited to 'pyload')
-rw-r--r-- | pyload/api/DownloadPreparingApi.py | 4 | ||||
-rw-r--r-- | pyload/plugins/Crypter.py | 57 | ||||
-rw-r--r-- | pyload/plugins/internal/SimpleCrypter.py | 30 | ||||
-rw-r--r-- | pyload/threads/DecrypterThread.py | 10 | ||||
-rw-r--r-- | pyload/threads/InfoThread.py | 2 | ||||
-rw-r--r-- | pyload/threads/ThreadManager.py | 8 | ||||
-rw-r--r-- | pyload/utils/__init__.py | 2 | ||||
-rw-r--r-- | pyload/web/api_app.py | 2 |
8 files changed, 51 insertions, 64 deletions
diff --git a/pyload/api/DownloadPreparingApi.py b/pyload/api/DownloadPreparingApi.py index 131f73b1d..a7e32c4eb 100644 --- a/pyload/api/DownloadPreparingApi.py +++ b/pyload/api/DownloadPreparingApi.py @@ -44,9 +44,9 @@ class DownloadPreparingApi(ApiComponent): #: TODO: withhold crypter, derypt or add later # initial result does not contain the crypter links - tmp = [(url, LinkStatus(url, url, -1, DS.Queued, pluginname)) for url, pluginname in hoster + crypter] + tmp = [(url, LinkStatus(url, url, -1, DS.Queued, pluginname)) for url, pluginname in hoster] data = parseNames(tmp) - rid = self.core.threadManager.createResultThread(self.primaryUID, data) + rid = self.core.threadManager.createResultThread(self.primaryUID, hoster + crypter) return OnlineCheck(rid, data) diff --git a/pyload/plugins/Crypter.py b/pyload/plugins/Crypter.py index 2a65a9da2..af3d5aba7 100644 --- a/pyload/plugins/Crypter.py +++ b/pyload/plugins/Crypter.py @@ -3,10 +3,9 @@ from pyload.Api import LinkStatus, DownloadStatus as DS from pyload.utils import to_list, has_method, uniqify from pyload.utils.fs import exists, remove, fs_encode -from pyload.utils.packagetools import parseNames - from Base import Base, Retry + class Package: """ Container that indicates that a new package should be created """ @@ -186,15 +185,6 @@ class Crypter(Base): """ raise NotImplementedError - def generatePackages(self, urls): - """Generates :class:`Package` instances and names from urls. Useful for many different links and no\ - given package name. - - :param urls: list of urls - :return: list of `Package` - """ - return [Package(name, purls) for name, purls in parseNames([(url, url) for url in urls]).iteritems()] - def _decrypt(self, urls): """Internal method to select decrypting method @@ -205,16 +195,9 @@ class Crypter(Base): # separate local and remote files content, urls = self.getLocalContent(urls) + result = [] - if has_method(cls, "decryptURLs"): - self.setup() - result = to_list(self.decryptURLs(urls)) - elif has_method(cls, "decryptURL"): - result = [] - for url in urls: - self.setup() - result.extend(to_list(self.decryptURL(url))) - elif has_method(cls, "decrypt"): + if urls and has_method(cls, "decrypt"): self.logDebug("Deprecated .decrypt() method in Crypter plugin") result = [] for url in urls: @@ -222,20 +205,28 @@ class Crypter(Base): self.setup() self.decrypt(self.pyfile) result.extend(self.convertPackages()) - else: - if not has_method(cls, "decryptFile") or urls: - self.logDebug("No suited decrypting method was overwritten in plugin") - result = [] - - if has_method(cls, "decryptFile"): - for f, c in content: + elif urls: + method = True + try: self.setup() - result.extend(to_list(self.decryptFile(c))) - try: - if f.startswith("tmp_"): remove(f) - except IOError: - self.logWarning(_("Could not remove file '%s'") % f) - self.core.print_exc() + result = to_list(self.decryptURLs(urls)) + except NotImplementedError: + method = False + + # this will raise error if not implemented + if not method: + for url in urls: + self.setup() + result.extend(to_list(self.decryptURL(url))) + + for f, c in content: + self.setup() + result.extend(to_list(self.decryptFile(c))) + try: + if f.startswith("tmp_"): remove(f) + except IOError: + self.logWarning(_("Could not remove file '%s'") % f) + self.core.print_exc() return result diff --git a/pyload/plugins/internal/SimpleCrypter.py b/pyload/plugins/internal/SimpleCrypter.py index f0fe0b764..e26bf6644 100644 --- a/pyload/plugins/internal/SimpleCrypter.py +++ b/pyload/plugins/internal/SimpleCrypter.py @@ -19,9 +19,8 @@ import re -from module.plugins.Crypter import Crypter -from module.utils import html_unescape - +from pyload.plugins.Crypter import Crypter, Package +from pyload.utils import html_unescape class SimpleCrypter(Crypter): __name__ = "SimpleCrypter" @@ -52,11 +51,10 @@ class SimpleCrypter(Crypter): must return the html of the page number 'page_n' """ - def decrypt(self, pyfile): - self.html = self.load(pyfile.url, decode=True) - - package_name, folder_name = self.getPackageNameAndFolder() + def decryptURL(self, url): + self.html = self.load(url, decode=True) + package_name = self.getPackageName() self.package_links = self.getLinks() if hasattr(self, 'PAGES_PATTERN') and hasattr(self, 'loadPage'): @@ -65,10 +63,11 @@ class SimpleCrypter(Crypter): self.logDebug('Package has %d links' % len(self.package_links)) if self.package_links: - self.packages = [(package_name, self.package_links, folder_name)] + return Package(package_name, self.package_links) else: self.fail('Could not extract any links') + def getLinks(self): """ Returns the links extracted from self.html @@ -76,18 +75,15 @@ class SimpleCrypter(Crypter): """ return re.findall(self.LINK_PATTERN, self.html) - def getPackageNameAndFolder(self): + def getPackageName(self): if hasattr(self, 'TITLE_PATTERN'): m = re.search(self.TITLE_PATTERN, self.html) if m: - name = folder = html_unescape(m.group('title').strip()) - self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder)) - return name, folder - - name = self.pyfile.package().name - folder = self.pyfile.package().folder - self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder)) - return name, folder + name = html_unescape(m.group('title').strip()) + self.logDebug("Found name [%s] in package info" % (name)) + return name + + return None def handleMultiPages(self): pages = re.search(self.PAGES_PATTERN, self.html) diff --git a/pyload/threads/DecrypterThread.py b/pyload/threads/DecrypterThread.py index e8b889ac8..22a2d0037 100644 --- a/pyload/threads/DecrypterThread.py +++ b/pyload/threads/DecrypterThread.py @@ -3,6 +3,7 @@ from time import sleep +from pyload.Api import LinkStatus, DownloadStatus as DS from pyload.utils import uniqify, accumulate from pyload.plugins.Base import Abort, Retry from pyload.plugins.Crypter import Package @@ -34,7 +35,7 @@ class DecrypterThread(BaseThread): for p in packages: self.m.core.api.addPackage(p.name, p.getURLs(), pack.password) - def decrypt(self, plugin_map, password=None): + def decrypt(self, plugin_map, password=None, err=None): result = [] # TODO QUEUE_DECRYPT @@ -54,6 +55,11 @@ class DecrypterThread(BaseThread): plugin.logInfo(_("Decrypting aborted")) except Exception, e: plugin.logError(_("Decrypting failed"), e) + + # generate error linkStatus + if err: + plugin_result.extend(LinkStatus(url, url, -1, DS.Failed, name) for url in urls) + if self.core.debug: self.core.print_exc() self.writeDebugReport(plugin.__name__, plugin=plugin) @@ -75,7 +81,7 @@ class DecrypterThread(BaseThread): pack_names[p.name].urls.extend(p.urls) else: if not p.name: - urls.append(p) + urls.extend(p.links) else: pack_names[p.name] = p else: diff --git a/pyload/threads/InfoThread.py b/pyload/threads/InfoThread.py index 8aa5e2d24..f516d2cca 100644 --- a/pyload/threads/InfoThread.py +++ b/pyload/threads/InfoThread.py @@ -37,7 +37,7 @@ class InfoThread(DecrypterThread): if crypter: # decrypt them - links, packages = self.decrypt(crypter) + links, packages = self.decrypt(crypter, err=True) # push these as initial result and save package names self.updateResult(links) for pack in packages: diff --git a/pyload/threads/ThreadManager.py b/pyload/threads/ThreadManager.py index 07b0cd6e9..55cfcbfd2 100644 --- a/pyload/threads/ThreadManager.py +++ b/pyload/threads/ThreadManager.py @@ -94,13 +94,7 @@ class ThreadManager: oc = OnlineCheck(rid, user) self.infoResults[rid] = oc - # maps url to plugin - urls = [] - for links in data.itervalues(): - for link in links: - urls.append((link.url, link.plugin)) - - InfoThread(self, user, urls, oc=oc) + InfoThread(self, user, data, oc=oc) return rid diff --git a/pyload/utils/__init__.py b/pyload/utils/__init__.py index 1655be857..577213dd1 100644 --- a/pyload/utils/__init__.py +++ b/pyload/utils/__init__.py @@ -193,7 +193,7 @@ def fixup(m): def has_method(obj, name): """ checks if 'name' was defined in obj, (false if it was inhereted) """ - return name in obj.__dict__ + return hasattr(obj, '__dict__') and name in obj.__dict__ def accumulate(it, inv_map=None): """ accumulate (key, value) data to {value : [keylist]} dictionary """ diff --git a/pyload/web/api_app.py b/pyload/web/api_app.py index 56e901b06..39747d5ea 100644 --- a/pyload/web/api_app.py +++ b/pyload/web/api_app.py @@ -25,7 +25,7 @@ except ImportError: def json_response(obj): accept = 'gzip' in request.headers.get('Accept-Encoding', '') result = dumps(obj) - # don't compress small files + # don't compress small string if gzip and accept and len(result) > 500: response.headers['Vary'] = 'Accept-Encoding' response.headers['Content-Encoding'] = 'gzip' |