diff options
| author | 2014-07-16 00:46:26 +0200 | |
|---|---|---|
| committer | 2014-07-16 00:46:26 +0200 | |
| commit | a1e78f33dc2b0b6777fdcbc415673f3965b25542 (patch) | |
| tree | 4f2f0c9d7592482409740be8647f8f6849d30681 /module/plugins | |
| parent | [StealthTo] Mark dead (diff) | |
| download | pyload-a1e78f33dc2b0b6777fdcbc415673f3965b25542.tar.xz | |
Prefer self.urls and self.packages for adding links
Diffstat (limited to 'module/plugins')
27 files changed, 60 insertions, 104 deletions
| diff --git a/module/plugins/container/CCF.py b/module/plugins/container/CCF.py index a55357af4..a5a071820 100644 --- a/module/plugins/container/CCF.py +++ b/module/plugins/container/CCF.py @@ -41,4 +41,4 @@ class CCF(Container):          tempdlc.write(re.search(r'<dlc>(.*)</dlc>', tempdlc_content, re.DOTALL).group(1))          tempdlc.close() -        self.packages.append((tempdlc_name, [tempdlc_name], tempdlc_name)) +        self.urls = [tempdlc_name] diff --git a/module/plugins/container/RSDF.py b/module/plugins/container/RSDF.py index 2444ec752..c35efacc6 100644 --- a/module/plugins/container/RSDF.py +++ b/module/plugins/container/RSDF.py @@ -40,14 +40,12 @@ class RSDF(Container):              data = binascii.unhexlify(''.join(data.split()))              data = data.splitlines() -            links = []              for link in data:                  if not link:                      continue                  link = base64.b64decode(link)                  link = obj.decrypt(link)                  decryptedUrl = link.replace('CCF: ', '') -                links.append(decryptedUrl) +                self.urls.append(decryptedUrl) -            self.logDebug("%s: adding package %s with %d links" % (self.__name__, pyfile.package().name, len(links))) -            self.packages.append((pyfile.package().name, links)) +            self.log.debug("%s: adding package %s with %d links" % (self.__name__,pyfile.package().name,len(links))) diff --git a/module/plugins/crypter/ChipDe.py b/module/plugins/crypter/ChipDe.py index 8e48b1085..f59903170 100644 --- a/module/plugins/crypter/ChipDe.py +++ b/module/plugins/crypter/ChipDe.py @@ -19,9 +19,9 @@ class ChipDe(Crypter):      def decrypt(self, pyfile):          self.html = self.load(pyfile.url)          try: -            url = re.search(r'"(http://video.chip.de/\d+?/.*)"', self.html).group(1) -            self.logDebug('The file URL is %s' % url) +            f = re.search(r'"(http://video.chip.de/\d+?/.*)"', self.html)          except:              self.fail('Failed to find the URL') - -        self.packages.append((pyfile.package().name, [url], pyfile.package().folder)) +        else: +            self.urls = [f.group(1)] +            self.logDebug('The file URL is %s' % self.urls[0]) diff --git a/module/plugins/crypter/CzshareComFolder.py b/module/plugins/crypter/CzshareComFolder.py index c60d5a87d..e0c311ba8 100644 --- a/module/plugins/crypter/CzshareComFolder.py +++ b/module/plugins/crypter/CzshareComFolder.py @@ -17,19 +17,15 @@ class CzshareComFolder(Crypter):      FOLDER_PATTERN = r'<tr class="subdirectory">\s*<td>\s*<table>(.*?)</table>'      LINK_PATTERN = r'<td class="col2"><a href="([^"]+)">info</a></td>' -    #NEXT_PAGE_PATTERN = r'<a class="next " href="/([^"]+)"> </a>'      def decrypt(self, pyfile):          html = self.load(pyfile.url) -        new_links = []          found = re.search(self.FOLDER_PATTERN, html, re.DOTALL)          if not found:              self.fail("Parse error (FOLDER)") -        new_links.extend(re.findall(self.LINK_PATTERN, found.group(1))) -        if new_links: -            self.core.files.addLinks(new_links, pyfile.package().id) -        else: +        self.urls.extend(re.findall(self.LINK_PATTERN, found.group(1))) +        if not self.urls:              self.fail('Could not extract any links') diff --git a/module/plugins/crypter/DDLMusicOrg.py b/module/plugins/crypter/DDLMusicOrg.py index fcdd507a7..c80ec471e 100644 --- a/module/plugins/crypter/DDLMusicOrg.py +++ b/module/plugins/crypter/DDLMusicOrg.py @@ -43,6 +43,6 @@ class DDLMusicOrg(Crypter):                                             "linknr": linknr})          m = re.search(r"<form id=\"ff\" action=\"(.*?)\" method=\"post\">", htmlwithlink)          if m: -            self.packages.append((pyfile.package().name, [m.group(1)], pyfile.package().folder)) +            self.urls = [m.group(1)]          else:              self.retry() diff --git a/module/plugins/crypter/Dereferer.py b/module/plugins/crypter/Dereferer.py index 6870f2f24..405893560 100644 --- a/module/plugins/crypter/Dereferer.py +++ b/module/plugins/crypter/Dereferer.py @@ -35,4 +35,4 @@ class Dereferer(Crypter):      def decrypt(self, pyfile):          link = re.match(self.__pattern__, pyfile.url).group('url') -        self.core.files.addLinks([urllib.unquote(link).rstrip('+')], pyfile.package().id) +        self.urls = [urllib.unquote(link).rstrip('+')] diff --git a/module/plugins/crypter/DontKnowMe.py b/module/plugins/crypter/DontKnowMe.py index d05355c2e..dac385e7c 100644 --- a/module/plugins/crypter/DontKnowMe.py +++ b/module/plugins/crypter/DontKnowMe.py @@ -22,4 +22,4 @@ class DontKnowMe(Crypter):      def decrypt(self, pyfile):          link = re.findall(self.LINK_PATTERN, pyfile.url)[0] -        self.core.files.addLinks([urllib.unquote(link)], pyfile.package().id) +        self.urls = [urllib.unquote(link)] diff --git a/module/plugins/crypter/DuckCryptInfo.py b/module/plugins/crypter/DuckCryptInfo.py index 6f5efdd1b..aa3a6d2a1 100644 --- a/module/plugins/crypter/DuckCryptInfo.py +++ b/module/plugins/crypter/DuckCryptInfo.py @@ -52,8 +52,6 @@ class DuckCryptInfo(Crypter):      def handleLink(self, url):          src = self.load(url)          soup = BeautifulSoup(src) -        link = soup.find("iframe")['src'] -        if not link: +        self.urls = [soup.find("iframe")["src"]] +        if not self.urls:              self.logDebug('no links found - (Plugin out of date?)') -        else: -            self.core.files.addLinks([link], self.pyfile.package().id) diff --git a/module/plugins/crypter/EmbeduploadCom.py b/module/plugins/crypter/EmbeduploadCom.py index 6126527a9..823e3f25f 100644 --- a/module/plugins/crypter/EmbeduploadCom.py +++ b/module/plugins/crypter/EmbeduploadCom.py @@ -24,7 +24,6 @@ class EmbeduploadCom(Crypter):      def decrypt(self, pyfile):          self.html = self.load(pyfile.url, decode=True)          tmp_links = [] -        new_links = []          found = re.findall(self.LINK_PATTERN, self.html)          if found: @@ -32,21 +31,20 @@ class EmbeduploadCom(Crypter):              prefered_set = map(lambda s: s.lower().split('.')[0], prefered_set)              print "PF", prefered_set              tmp_links.extend([x[1] for x in found if x[0] in prefered_set]) -            self.getLocation(tmp_links, new_links) +            self.urls = self.getLocation(tmp_links) -            if not new_links: +            if not self.urls:                  ignored_set = set(self.getConfig("ignoredHoster").split('|'))                  ignored_set = map(lambda s: s.lower().split('.')[0], ignored_set)                  print "IG", ignored_set                  tmp_links.extend([x[1] for x in found if x[0] not in ignored_set]) -                self.getLocation(tmp_links, new_links) +                self.urls = self.getLocation(tmp_links) -        if new_links: -            self.core.files.addLinks(new_links, pyfile.package().id) -        else: +        if not self.urls:              self.fail('Could not extract any links') -    def getLocation(self, tmp_links, new_links): +    def getLocation(self, tmp_links): +        new_links = []          for link in tmp_links:              try:                  header = self.load(link, just_header=True) @@ -54,3 +52,4 @@ class EmbeduploadCom(Crypter):                      new_links.append(header['location'])              except BadHeader:                  pass +        return new_links diff --git a/module/plugins/crypter/FileserveComFolder.py b/module/plugins/crypter/FileserveComFolder.py index 30662f02d..28d89c75d 100644 --- a/module/plugins/crypter/FileserveComFolder.py +++ b/module/plugins/crypter/FileserveComFolder.py @@ -32,6 +32,6 @@ class FileserveComFolder(Crypter):          new_links.extend(re.findall(self.LINK_PATTERN, folder.group(1)))          if new_links: -            self.core.files.addLinks(map(lambda s: "http://fileserve.com%s" % s, new_links), pyfile.package().id) +            self.urls = [map(lambda s: "http://fileserve.com%s" % s, new_links)]          else:              self.fail('Could not extract any links') diff --git a/module/plugins/crypter/FourChanOrg.py b/module/plugins/crypter/FourChanOrg.py index dc0cdcb6c..a911c64d6 100644 --- a/module/plugins/crypter/FourChanOrg.py +++ b/module/plugins/crypter/FourChanOrg.py @@ -21,10 +21,5 @@ class FourChanOrg(Crypter):      def decrypt(self, pyfile):          pagehtml = self.load(pyfile.url) -          images = set(re.findall(r'(images\.4chan\.org/[^/]*/src/[^"<]*)', pagehtml)) -        urls = [] -        for image in images: -            urls.append("http://" + image) - -        self.core.files.addLinks(urls, pyfile.package().id) +        self.urls = ["http://" + image for image in images] diff --git a/module/plugins/crypter/GooGl.py b/module/plugins/crypter/GooGl.py index 37531c11e..52bf93ed3 100644 --- a/module/plugins/crypter/GooGl.py +++ b/module/plugins/crypter/GooGl.py @@ -38,6 +38,6 @@ class GooGl(Crypter):          rep = json_loads(rep)          if 'longUrl' in rep: -            self.core.files.addLinks([rep['longUrl']], pyfile.package().id) +            self.urls = [rep['longUrl']]          else:              self.fail('Unable to expand shortened link') diff --git a/module/plugins/crypter/HoerbuchIn.py b/module/plugins/crypter/HoerbuchIn.py index df81cc207..b46293bc7 100644 --- a/module/plugins/crypter/HoerbuchIn.py +++ b/module/plugins/crypter/HoerbuchIn.py @@ -33,11 +33,9 @@ class HoerbuchIn(Crypter):                  package = "%s (%s)" % (abookname, a.previousSibling.previousSibling.text[:-1])                  links = self.decryptFolder(a['href']) -                self.packages.append((package, links, pyfile.package().folder)) +                self.packages.append((package, links, package))          else: -            links = self.decryptFolder(pyfile.url) - -            self.packages.append((pyfile.package().name, links, pyfile.package().folder)) +            self.urls = self.decryptFolder(pyfile.url)      def decryptFolder(self, url):          m = self.protection.search(url) diff --git a/module/plugins/crypter/HotfileFolderCom.py b/module/plugins/crypter/HotfileFolderCom.py index 02c937553..433dc9755 100644 --- a/module/plugins/crypter/HotfileFolderCom.py +++ b/module/plugins/crypter/HotfileFolderCom.py @@ -27,4 +27,4 @@ class HotfileFolderCom(Crypter):          new_links = [x[0] for x in new_links] -        self.packages.append((name, new_links, name)) +        self.packages = [(name, new_links, name)] diff --git a/module/plugins/crypter/LetitbitNetFolder.py b/module/plugins/crypter/LetitbitNetFolder.py index 6ea27813c..19e780690 100644 --- a/module/plugins/crypter/LetitbitNetFolder.py +++ b/module/plugins/crypter/LetitbitNetFolder.py @@ -22,15 +22,11 @@ class LetitbitNetFolder(Crypter):      def decrypt(self, pyfile):          html = self.load(pyfile.url) -        new_links = [] -          folder = re.search(self.FOLDER_PATTERN, html, re.DOTALL)          if not folder:              self.fail("Parse error (FOLDER)") -        new_links.extend(re.findall(self.LINK_PATTERN, folder.group(0))) +        self.urls.extend(re.findall(self.LINK_PATTERN, folder.group(0))) -        if new_links: -            self.core.files.addLinks(new_links, pyfile.package().id) -        else: +        if not self.urls:              self.fail('Could not extract any links') diff --git a/module/plugins/crypter/LinkdecrypterCom.py b/module/plugins/crypter/LinkdecrypterCom.py index 96fe11951..31c4d36b7 100644 --- a/module/plugins/crypter/LinkdecrypterCom.py +++ b/module/plugins/crypter/LinkdecrypterCom.py @@ -41,10 +41,8 @@ class LinkdecrypterCom(Crypter):          self.passwords = self.getPassword().splitlines()          # API not working anymore -        new_links = self.decryptHTML() -        if new_links: -            self.core.files.addLinks(new_links, pyfile.package().id) -        else: +        self.urls = self.decryptHTML() +        if not self.urls:              self.fail('Could not extract any links')      def decryptAPI(self): diff --git a/module/plugins/crypter/LixIn.py b/module/plugins/crypter/LixIn.py index d3c41bd4c..e609c9da4 100644 --- a/module/plugins/crypter/LixIn.py +++ b/module/plugins/crypter/LixIn.py @@ -54,8 +54,6 @@ class LixIn(Crypter):          matches = re.search(self.LINK_PATTERN, self.html)          if not matches:              self.fail("can't find destination url") - -        new_link = matches.group("link") -        self.logDebug("Found link %s, adding to package" % new_link) - -        self.packages.append((pyfile.package().name, [new_link], pyfile.package().name)) +        else: +            self.urls = [matches.group("link")] +            self.logDebug("Found link %s, adding to package" % self.urls[0]) diff --git a/module/plugins/crypter/MediafireComFolder.py b/module/plugins/crypter/MediafireComFolder.py index 728bd6bbb..5f1b9ad1b 100644 --- a/module/plugins/crypter/MediafireComFolder.py +++ b/module/plugins/crypter/MediafireComFolder.py @@ -22,8 +22,6 @@ class MediafireComFolder(Crypter):      def decrypt(self, pyfile): -        new_links = [] -          url, result = checkHTMLHeader(pyfile.url)          self.logDebug('Location (%d): %s' % (result, url)) @@ -33,7 +31,7 @@ class MediafireComFolder(Crypter):              found = re.search(self.FILE_URL_PATTERN, html)              if found:                  # file page -                new_links.append("http://www.mediafire.com/file/%s" % found.group(1)) +                self.urls.append("http://www.mediafire.com/file/%s" % found.group(1))              else:                  # folder page                  found = re.search(self.FOLDER_KEY_PATTERN, html) @@ -46,15 +44,13 @@ class MediafireComFolder(Crypter):                      #self.logInfo(json_resp)                      if json_resp['response']['result'] == "Success":                          for link in json_resp['response']['folder_info']['files']: -                            new_links.append("http://www.mediafire.com/file/%s" % link['quickkey']) +                            self.urls.append("http://www.mediafire.com/file/%s" % link['quickkey'])                      else:                          self.fail(json_resp['response']['message'])          elif result == 1:              self.offline()          else: -            new_links.append(url) +            self.urls.append(url) -        if new_links: -            self.core.files.addLinks(new_links, pyfile.package().id) -        else: +        if not self.urls:              self.fail('Could not extract any links') diff --git a/module/plugins/crypter/MultiloadCz.py b/module/plugins/crypter/MultiloadCz.py index e6ffbe9b0..0d5fa4c94 100644 --- a/module/plugins/crypter/MultiloadCz.py +++ b/module/plugins/crypter/MultiloadCz.py @@ -23,23 +23,20 @@ class MultiloadCz(Crypter):      def decrypt(self, pyfile):          self.html = self.load(pyfile.url, decode=True) -        new_links = []          if re.match(self.__pattern__, pyfile.url).group(1) == "slozka":              found = re.search(self.FOLDER_PATTERN, self.html)              if found is not None: -                new_links.extend(found.group(1).split()) +                self.urls.extend(found.group(1).split())          else:              found = re.findall(self.LINK_PATTERN, self.html)              if found:                  prefered_set = set(self.getConfig("usedHoster").split('|')) -                new_links.extend([x[1] for x in found if x[0] in prefered_set]) +                self.urls.extend([x[1] for x in found if x[0] in prefered_set]) -                if not new_links: +                if not self.urls:                      ignored_set = set(self.getConfig("ignoredHoster").split('|')) -                    new_links.extend([x[1] for x in found if x[0] not in ignored_set]) +                    self.urls.extend([x[1] for x in found if x[0] not in ignored_set]) -        if new_links: -            self.core.files.addLinks(new_links, pyfile.package().id) -        else: +        if not self.urls:              self.fail('Could not extract any links') diff --git a/module/plugins/crypter/MultiuploadCom.py b/module/plugins/crypter/MultiuploadCom.py index cdc685fab..84d8d0830 100644 --- a/module/plugins/crypter/MultiuploadCom.py +++ b/module/plugins/crypter/MultiuploadCom.py @@ -32,34 +32,31 @@ class MultiuploadCom(Crypter):              "d": re.match(self.__pattern__, pyfile.url).group(1),              "r": str(int(time() * 1000))          })) -        new_links = []          prefered_set = map(lambda s: s.lower().split('.')[0], set(self.getConfig("preferedHoster").split('|')))          if ml_url and 'multiupload' in prefered_set: -            new_links.append(ml_url) +            self.urls.append(ml_url)          for link in json_list:              if link['service'].lower() in prefered_set and int(link['status']) and not int(link['deleted']):                  url = self.getLocation(link['url'])                  if url: -                    new_links.append(url) +                    self.urls.append(url) -        if not new_links: +        if not self.urls:              ignored_set = map(lambda s: s.lower().split('.')[0], set(self.getConfig("ignoredHoster").split('|')))              if 'multiupload' not in ignored_set: -                new_links.append(ml_url) +                self.urls.append(ml_url)              for link in json_list:                  if link['service'].lower() not in ignored_set and int(link['status']) and not int(link['deleted']):                      url = self.getLocation(link['url'])                      if url: -                        new_links.append(url) +                        self.urls.append(url) -        if new_links: -            self.core.files.addLinks(new_links, pyfile.package().id) -        else: +        if not self.urls:              self.fail('Could not extract any links')      def getLocation(self, url): diff --git a/module/plugins/crypter/OneKhDe.py b/module/plugins/crypter/OneKhDe.py index b370df1f7..91c591ec1 100644 --- a/module/plugins/crypter/OneKhDe.py +++ b/module/plugins/crypter/OneKhDe.py @@ -31,10 +31,8 @@ class OneKhDe(Crypter):      def proceed(self, url, location):          url = self.parent.url          self.html = self.req.load(url) -        temp_links = []          link_ids = re.findall(r"<a id=\"DownloadLink_(\d*)\" href=\"http://1kh.de/", self.html)          for id in link_ids:              new_link = unescape(                  re.search("width=\"100%\" src=\"(.*)\"></iframe>", self.req.load("http://1kh.de/l/" + id)).group(1)) -            temp_links.append(new_link) -        self.links = temp_links +            self.urls.append(new_link) diff --git a/module/plugins/crypter/QuickshareCzFolder.py b/module/plugins/crypter/QuickshareCzFolder.py index a8f97cf77..b8db5d88c 100644 --- a/module/plugins/crypter/QuickshareCzFolder.py +++ b/module/plugins/crypter/QuickshareCzFolder.py @@ -22,13 +22,10 @@ class QuickshareCzFolder(Crypter):      def decrypt(self, pyfile):          html = self.load(pyfile.url) -        new_links = []          found = re.search(self.FOLDER_PATTERN, html, re.DOTALL)          if not found:              self.fail("Parse error (FOLDER)") -        new_links.extend(re.findall(self.LINK_PATTERN, found.group(1))) +        self.urls.extend(re.findall(self.LINK_PATTERN, found.group(1))) -        if new_links: -            self.core.files.addLinks(new_links, pyfile.package().id) -        else: +        if not self.urls:              self.fail('Could not extract any links') diff --git a/module/plugins/crypter/RelinkUs.py b/module/plugins/crypter/RelinkUs.py index 5474ef3b6..160900144 100644 --- a/module/plugins/crypter/RelinkUs.py +++ b/module/plugins/crypter/RelinkUs.py @@ -94,10 +94,9 @@ class RelinkUs(Crypter):          self.fileid = re.match(self.__pattern__, pyfile.url).group('id')          self.package = pyfile.package()          self.password = self.getPassword() -        self.url = pyfile.url      def requestPackage(self): -        self.html = self.load(self.url, decode=True) +        self.html = self.load(self.pyfile.url, decode=True)      def isOnline(self):          if self.OFFLINE_TOKEN in self.html: diff --git a/module/plugins/crypter/SafelinkingNet.py b/module/plugins/crypter/SafelinkingNet.py index 55f4c0ff6..2687ed2f2 100644 --- a/module/plugins/crypter/SafelinkingNet.py +++ b/module/plugins/crypter/SafelinkingNet.py @@ -30,13 +30,12 @@ class SafelinkingNet(Crypter):              self.load(url)              m = re.search("^Location: (.+)$", self.req.http.header, re.MULTILINE)              if m: -                self.core.files.addLinks([m.group(1)], pyfile.package().id) +                self.urls = [m.group(1)]              else:                  self.fail("Couldn't find forwarded Link")          else:              password = "" -            packageLinks = []              postData = {"post-protect": "1"}              self.html = self.load(url) @@ -76,8 +75,6 @@ class SafelinkingNet(Crypter):                  linkDict = json_loads(m.group(1))                  for link in linkDict:                      if not "http://" in link['full']: -                        packageLinks.append("https://safelinking.net/d/" + link['full']) +                        self.urls.append("https://safelinking.net/d/" + link['full'])                      else: -                        packageLinks.append(link['full']) - -            self.core.files.addLinks(packageLinks, pyfile.package().id) +                        self.urls.append(link['full']) diff --git a/module/plugins/crypter/UlozToFolder.py b/module/plugins/crypter/UlozToFolder.py index 6ba4b0819..f9ddd3c84 100644 --- a/module/plugins/crypter/UlozToFolder.py +++ b/module/plugins/crypter/UlozToFolder.py @@ -40,6 +40,6 @@ class UlozToFolder(Crypter):              self.logInfo("Limit of 99 pages reached, aborting")          if new_links: -            self.core.files.addLinks(map(lambda s: "http://ulozto.net/%s" % s, new_links), pyfile.package().id) +            self.urls = [map(lambda s: "http://ulozto.net/%s" % s, new_links)]          else:              self.fail('Could not extract any links') diff --git a/module/plugins/crypter/XupPl.py b/module/plugins/crypter/XupPl.py index 09c3084ea..e89f4ccb0 100644 --- a/module/plugins/crypter/XupPl.py +++ b/module/plugins/crypter/XupPl.py @@ -18,6 +18,6 @@ class XupPl(Crypter):      def decrypt(self, pyfile):          header = self.load(pyfile.url, just_header=True)          if 'location' in header: -            self.core.files.addLinks([header['location']], pyfile.package().id) +            self.urls = [header['location']]          else:              self.fail('Unable to find link') diff --git a/module/plugins/hoster/Ftp.py b/module/plugins/hoster/Ftp.py index 725126d17..f448b99d1 100644 --- a/module/plugins/hoster/Ftp.py +++ b/module/plugins/hoster/Ftp.py @@ -75,13 +75,12 @@ class Ftp(Hoster):              #Naive ftp directory listing                        if re.search(r'^25\d.*?"', self.req.http.header, re.M):                  pyfile.url = pyfile.url.rstrip('/') -                pkgname = "/".join((pyfile.package().name, urlparse(pyfile.url).path.rpartition('/')[2])) +                pkgname = "/".join(pyfile.package().name, urlparse(pyfile.url).path.rpartition('/')[2])                  pyfile.url += '/'                  self.req.http.c.setopt(48, 1)  # CURLOPT_DIRLISTONLY                  response = self.load(pyfile.url, decode=False)                  links = [pyfile.url + quote(x) for x in response.splitlines()]                  self.logDebug("LINKS", links) -                self.core.api.addPackage(pkgname, links, 1) -                #self.core.files.addLinks(links, pyfile.package().id) +                self.core.api.addPackage(pkgname, links)              else:                  self.fail("Unexpected server response") | 
