diff options
author | Walter Purcaro <vuolter@gmail.com> | 2014-04-11 12:26:05 +0200 |
---|---|---|
committer | Stefano <l.stickell@yahoo.it> | 2014-04-21 17:22:19 +0200 |
commit | 41559c6d7c17862cc5640631e8a1bb7a00f923da (patch) | |
tree | d268d8d659b3099f561f03a18e5efe4fbf29ef8a | |
parent | Fix __pattern__ www (diff) | |
download | pyload-41559c6d7c17862cc5640631e8a1bb7a00f923da.tar.xz |
Use pyfile instead self.pyfile
Merges vuolter/pyload@b7f6e2e
(cherry picked from commit 4d978dedd16418a4e7d8c81bd819a6a5fa432057)
41 files changed, 78 insertions, 78 deletions
diff --git a/module/plugins/hoster/OverLoadMe.py b/module/plugins/hoster/OverLoadMe.py index 33c041973..5edbb2c57 100644 --- a/module/plugins/hoster/OverLoadMe.py +++ b/module/plugins/hoster/OverLoadMe.py @@ -51,9 +51,9 @@ class OverLoadMe(Hoster): self.logWarning(data["msg"]) self.tempOffline() else: - if self.pyfile.name is not None and self.pyfile.name.endswith('.tmp') and data["filename"]: - self.pyfile.name = data["filename"] - self.pyfile.size = parseFileSize(data["filesize"]) + if pyfile.name is not None and pyfile.name.endswith('.tmp') and data["filename"]: + pyfile.name = data["filename"] + pyfile.size = parseFileSize(data["filesize"]) new_url = data["downloadlink"] if self.getConfig("https"): diff --git a/pyload/plugins/crypter/CCF.py b/pyload/plugins/crypter/CCF.py index 8448c80ed..093a30a84 100644 --- a/pyload/plugins/crypter/CCF.py +++ b/pyload/plugins/crypter/CCF.py @@ -28,7 +28,7 @@ class CCF(Crypter): tempdlc_content = opener.open('http://service.jdownloader.net/dlcrypt/getDLC.php', params).read() download_folder = self.config['general']['download_folder'] - location = download_folder #join(download_folder, self.pyfile.package().folder.decode(sys.getfilesystemencoding())) + location = download_folder #join(download_folder, pyfile.package().folder.decode(sys.getfilesystemencoding())) if not exists(location): makedirs(location) diff --git a/pyload/plugins/crypter/CzshareComFolder.py b/pyload/plugins/crypter/CzshareComFolder.py index 500af0bb3..761c9e2bc 100644 --- a/pyload/plugins/crypter/CzshareComFolder.py +++ b/pyload/plugins/crypter/CzshareComFolder.py @@ -18,7 +18,7 @@ class CzshareComFolder(Crypter): #NEXT_PAGE_PATTERN = r'<a class="next " href="/([^"]+)"> </a>' def decrypt(self, pyfile): - html = self.load(self.pyfile.url) + html = self.load(pyfile.url) new_links = [] found = re.search(self.FOLDER_PATTERN, html, re.DOTALL) @@ -26,6 +26,6 @@ class CzshareComFolder(Crypter): new_links.extend(re.findall(self.LINK_PATTERN, found.group(1))) if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/DDLMusicOrg.py b/pyload/plugins/crypter/DDLMusicOrg.py index 16ab63594..1e8503a5a 100644 --- a/pyload/plugins/crypter/DDLMusicOrg.py +++ b/pyload/plugins/crypter/DDLMusicOrg.py @@ -19,7 +19,7 @@ class DDLMusicOrg(Crypter): self.multiDL = False def decrypt(self, pyfile): - html = self.req.load(self.pyfile.url, cookies=True) + html = self.req.load(pyfile.url, cookies=True) if re.search(r"Wer dies nicht rechnen kann", html) is not None: self.offline() @@ -34,11 +34,11 @@ class DDLMusicOrg(Crypter): else: solve = int(math.group(1)) - int(math.group(3)) sleep(3) - htmlwithlink = self.req.load(self.pyfile.url, cookies=True, + htmlwithlink = self.req.load(pyfile.url, cookies=True, post={"calc%s" % linknr: solve, "send%s" % linknr: "Send", "id": id, "linknr": linknr}) m = re.search(r"<form id=\"ff\" action=\"(.*?)\" method=\"post\">", htmlwithlink) if m: - self.packages.append((self.pyfile.package().name, [m.group(1)], self.pyfile.package().folder)) + self.packages.append((pyfile.package().name, [m.group(1)], pyfile.package().folder)) else: self.retry() diff --git a/pyload/plugins/crypter/Dereferer.py b/pyload/plugins/crypter/Dereferer.py index 46001918f..4b6309c17 100644 --- a/pyload/plugins/crypter/Dereferer.py +++ b/pyload/plugins/crypter/Dereferer.py @@ -31,5 +31,5 @@ class Dereferer(Crypter): __author_mail__ = "zoidberg@mujmail.cz" def decrypt(self, pyfile): - link = re.match(self.__pattern__, self.pyfile.url).group('url') - self.core.files.addLinks([urllib.unquote(link).rstrip('+')], self.pyfile.package().id) + link = re.match(self.__pattern__, pyfile.url).group('url') + self.core.files.addLinks([urllib.unquote(link).rstrip('+')], pyfile.package().id) diff --git a/pyload/plugins/crypter/DontKnowMe.py b/pyload/plugins/crypter/DontKnowMe.py index 1a7cf57bb..0df14f9ba 100644 --- a/pyload/plugins/crypter/DontKnowMe.py +++ b/pyload/plugins/crypter/DontKnowMe.py @@ -18,5 +18,5 @@ class DontKnowMe(Crypter): LINK_PATTERN = r"http://dontknow.me/at/\?(.+)$" def decrypt(self, pyfile): - link = re.findall(self.LINK_PATTERN, self.pyfile.url)[0] - self.core.files.addLinks([urllib.unquote(link)], self.pyfile.package().id) + link = re.findall(self.LINK_PATTERN, pyfile.url)[0] + self.core.files.addLinks([urllib.unquote(link)], pyfile.package().id) diff --git a/pyload/plugins/crypter/EmbeduploadCom.py b/pyload/plugins/crypter/EmbeduploadCom.py index dd0fcb533..2beabcefd 100644 --- a/pyload/plugins/crypter/EmbeduploadCom.py +++ b/pyload/plugins/crypter/EmbeduploadCom.py @@ -19,7 +19,7 @@ class EmbeduploadCom(Crypter): LINK_PATTERN = r'<div id="([^"]+)"[^>]*>\s*<a href="([^"]+)" target="_blank" (?:class="DownloadNow"|style="color:red")>' def decrypt(self, pyfile): - self.html = self.load(self.pyfile.url, decode=True) + self.html = self.load(pyfile.url, decode=True) tmp_links = [] new_links = [] @@ -39,7 +39,7 @@ class EmbeduploadCom(Crypter): self.getLocation(tmp_links, new_links) if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/FilefactoryComFolder.py b/pyload/plugins/crypter/FilefactoryComFolder.py index dce14cf04..07842c898 100644 --- a/pyload/plugins/crypter/FilefactoryComFolder.py +++ b/pyload/plugins/crypter/FilefactoryComFolder.py @@ -19,7 +19,7 @@ class FilefactoryComFolder(Crypter): NEXT_PAGE_PATTERN = r'<li class="current">.*?</li>\s*<li class=""><a href="([^"]+)">' def decrypt(self, pyfile): - url_base = re.match(self.__pattern__, self.pyfile.url).group(1) + url_base = re.match(self.__pattern__, pyfile.url).group(1) html = self.load(url_base) new_links = [] @@ -40,6 +40,6 @@ class FilefactoryComFolder(Crypter): self.logInfo("Limit of 99 pages reached, aborting") if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/FileserveComFolder.py b/pyload/plugins/crypter/FileserveComFolder.py index e6fbcdf9d..3c4048f79 100644 --- a/pyload/plugins/crypter/FileserveComFolder.py +++ b/pyload/plugins/crypter/FileserveComFolder.py @@ -18,7 +18,7 @@ class FileserveComFolder(Crypter): LINK_PATTERN = r'<a href="([^"]+)" class="sheet_icon wbold">' def decrypt(self, pyfile): - html = self.load(self.pyfile.url) + html = self.load(pyfile.url) new_links = [] @@ -28,6 +28,6 @@ class FileserveComFolder(Crypter): new_links.extend(re.findall(self.LINK_PATTERN, folder.group(1))) if new_links: - self.core.files.addLinks(map(lambda s: "http://fileserve.com%s" % s, new_links), self.pyfile.package().id) + self.core.files.addLinks(map(lambda s: "http://fileserve.com%s" % s, new_links), pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/FourChanOrg.py b/pyload/plugins/crypter/FourChanOrg.py index 9d4c9fd76..b109d5060 100644 --- a/pyload/plugins/crypter/FourChanOrg.py +++ b/pyload/plugins/crypter/FourChanOrg.py @@ -22,4 +22,4 @@ class FourChanOrg(Crypter): for image in images: urls.append("http://" + image) - self.core.files.addLinks(urls, self.pyfile.package().id) + self.core.files.addLinks(urls, pyfile.package().id) diff --git a/pyload/plugins/crypter/GooGl.py b/pyload/plugins/crypter/GooGl.py index b05009c58..49b8e1efd 100644 --- a/pyload/plugins/crypter/GooGl.py +++ b/pyload/plugins/crypter/GooGl.py @@ -35,6 +35,6 @@ class GooGl(Crypter): rep = json_loads(rep) if 'longUrl' in rep: - self.core.files.addLinks([rep['longUrl']], self.pyfile.package().id) + self.core.files.addLinks([rep['longUrl']], pyfile.package().id) else: self.fail('Unable to expand shortened link') diff --git a/pyload/plugins/crypter/HoerbuchIn.py b/pyload/plugins/crypter/HoerbuchIn.py index d087c2152..06897730e 100644 --- a/pyload/plugins/crypter/HoerbuchIn.py +++ b/pyload/plugins/crypter/HoerbuchIn.py @@ -21,8 +21,8 @@ class HoerbuchIn(Crypter): def decrypt(self, pyfile): self.pyfile = pyfile - if self.article.match(self.pyfile.url): - src = self.load(self.pyfile.url) + if self.article.match(pyfile.url): + src = self.load(pyfile.url) soup = BeautifulSoup(src, convertEntities=BeautifulStoneSoup.HTML_ENTITIES) abookname = soup.find("a", attrs={"rel": "bookmark"}).text @@ -30,11 +30,11 @@ class HoerbuchIn(Crypter): package = "%s (%s)" % (abookname, a.previousSibling.previousSibling.text[:-1]) links = self.decryptFolder(a["href"]) - self.packages.append((package, links, self.pyfile.package().folder)) + self.packages.append((package, links, pyfile.package().folder)) else: - links = self.decryptFolder(self.pyfile.url) + links = self.decryptFolder(pyfile.url) - self.packages.append((self.pyfile.package().name, links, self.pyfile.package().folder)) + self.packages.append((pyfile.package().name, links, pyfile.package().folder)) def decryptFolder(self, url): m = self.protection.search(url) diff --git a/pyload/plugins/crypter/LetitbitNetFolder.py b/pyload/plugins/crypter/LetitbitNetFolder.py index 96499a666..0135bd891 100644 --- a/pyload/plugins/crypter/LetitbitNetFolder.py +++ b/pyload/plugins/crypter/LetitbitNetFolder.py @@ -17,7 +17,7 @@ class LetitbitNetFolder(Crypter): LINK_PATTERN = r'<a href="([^"]+)" target="_blank">' def decrypt(self, pyfile): - html = self.load(self.pyfile.url) + html = self.load(pyfile.url) new_links = [] @@ -28,6 +28,6 @@ class LetitbitNetFolder(Crypter): new_links.extend(re.findall(self.LINK_PATTERN, folder.group(0))) if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/LinkSaveIn.py b/pyload/plugins/crypter/LinkSaveIn.py index 25290af2d..aaa654ec6 100644 --- a/pyload/plugins/crypter/LinkSaveIn.py +++ b/pyload/plugins/crypter/LinkSaveIn.py @@ -44,7 +44,7 @@ class LinkSaveIn(Crypter): self.req.cj.setCookie(self.HOSTER_DOMAIN, "Linksave_Language", "english") # Request package - self.html = self.load(self.pyfile.url) + self.html = self.load(pyfile.url) if not self.isOnline(): self.offline() diff --git a/pyload/plugins/crypter/LinkdecrypterCom.py b/pyload/plugins/crypter/LinkdecrypterCom.py index 7d2547010..a6e6faa40 100644 --- a/pyload/plugins/crypter/LinkdecrypterCom.py +++ b/pyload/plugins/crypter/LinkdecrypterCom.py @@ -41,7 +41,7 @@ class LinkdecrypterCom(Crypter): # API not working anymore new_links = self.decryptHTML() if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/LixIn.py b/pyload/plugins/crypter/LixIn.py index 6ba463459..4b6c3afcc 100644 --- a/pyload/plugins/crypter/LixIn.py +++ b/pyload/plugins/crypter/LixIn.py @@ -55,4 +55,4 @@ class LixIn(Crypter): new_link = matches.group("link") self.logDebug("Found link %s, adding to package" % new_link) - self.packages.append((self.pyfile.package().name, [new_link], self.pyfile.package().name)) + self.packages.append((pyfile.package().name, [new_link], pyfile.package().name)) diff --git a/pyload/plugins/crypter/MediafireComFolder.py b/pyload/plugins/crypter/MediafireComFolder.py index 917d93964..9702d80eb 100644 --- a/pyload/plugins/crypter/MediafireComFolder.py +++ b/pyload/plugins/crypter/MediafireComFolder.py @@ -52,6 +52,6 @@ class MediafireComFolder(Crypter): new_links.append(url) if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/MultiloadCz.py b/pyload/plugins/crypter/MultiloadCz.py index 6cc9a1171..d51a7821b 100644 --- a/pyload/plugins/crypter/MultiloadCz.py +++ b/pyload/plugins/crypter/MultiloadCz.py @@ -19,10 +19,10 @@ class MultiloadCz(Crypter): LINK_PATTERN = r'<p class="manager-server"><strong>([^<]+)</strong></p><p class="manager-linky"><a href="([^"]+)">' def decrypt(self, pyfile): - self.html = self.load(self.pyfile.url, decode=True) + self.html = self.load(pyfile.url, decode=True) new_links = [] - if re.match(self.__pattern__, self.pyfile.url).group(1) == "slozka": + if re.match(self.__pattern__, pyfile.url).group(1) == "slozka": found = re.search(self.FOLDER_PATTERN, self.html) if found is not None: new_links.extend(found.group(1).split()) @@ -37,6 +37,6 @@ class MultiloadCz(Crypter): new_links.extend([x[1] for x in found if x[0] not in ignored_set]) if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/MultiuploadCom.py b/pyload/plugins/crypter/MultiuploadCom.py index e3013fe0d..9a74f0e38 100644 --- a/pyload/plugins/crypter/MultiuploadCom.py +++ b/pyload/plugins/crypter/MultiuploadCom.py @@ -55,7 +55,7 @@ class MultiuploadCom(Crypter): new_links.append(url) if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/QuickshareCzFolder.py b/pyload/plugins/crypter/QuickshareCzFolder.py index c3a7e0ec6..6a4f7d5b4 100644 --- a/pyload/plugins/crypter/QuickshareCzFolder.py +++ b/pyload/plugins/crypter/QuickshareCzFolder.py @@ -17,7 +17,7 @@ class QuickshareCzFolder(Crypter): LINK_PATTERN = r'(http://www.quickshare.cz/\S+)' def decrypt(self, pyfile): - html = self.load(self.pyfile.url) + html = self.load(pyfile.url) new_links = [] found = re.search(self.FOLDER_PATTERN, html, re.DOTALL) @@ -26,6 +26,6 @@ class QuickshareCzFolder(Crypter): new_links.extend(re.findall(self.LINK_PATTERN, found.group(1))) if new_links: - self.core.files.addLinks(new_links, self.pyfile.package().id) + self.core.files.addLinks(new_links, pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/SafelinkingNet.py b/pyload/plugins/crypter/SafelinkingNet.py index 8a11d13a1..4a907c28d 100644 --- a/pyload/plugins/crypter/SafelinkingNet.py +++ b/pyload/plugins/crypter/SafelinkingNet.py @@ -27,7 +27,7 @@ class SafelinkingNet(Crypter): self.load(url) m = re.search("^Location: (.+)$", self.req.http.header, re.MULTILINE) if m: - self.core.files.addLinks([m.group(1)], self.pyfile.package().id) + self.core.files.addLinks([m.group(1)], pyfile.package().id) else: self.fail("Couldn't find forwarded Link") @@ -77,4 +77,4 @@ class SafelinkingNet(Crypter): else: packageLinks.append(link["full"]) - self.core.files.addLinks(packageLinks, self.pyfile.package().id) + self.core.files.addLinks(packageLinks, pyfile.package().id) diff --git a/pyload/plugins/crypter/SerienjunkiesOrg.py b/pyload/plugins/crypter/SerienjunkiesOrg.py index 0b698bf94..2dfa852de 100644 --- a/pyload/plugins/crypter/SerienjunkiesOrg.py +++ b/pyload/plugins/crypter/SerienjunkiesOrg.py @@ -270,7 +270,7 @@ class SerienjunkiesOrg(Crypter): framePattern = re.compile("^http://download.(serienjunkies.org|dokujunkies.org)/frame/go-.*?/$") url = pyfile.url if framePattern.match(url): - self.packages.append((self.pyfile.package().name, [self.handleFrame(url)], self.pyfile.package().name)) + self.packages.append((pyfile.package().name, [self.handleFrame(url)], pyfile.package().name)) elif episodePattern.match(url): self.handleEpisode(url) elif oldStyleLink.match(url): diff --git a/pyload/plugins/crypter/UlozToFolder.py b/pyload/plugins/crypter/UlozToFolder.py index 0526ed761..062882da9 100644 --- a/pyload/plugins/crypter/UlozToFolder.py +++ b/pyload/plugins/crypter/UlozToFolder.py @@ -18,7 +18,7 @@ class UlozToFolder(Crypter): NEXT_PAGE_PATTERN = r'<a class="next " href="/([^"]+)"> </a>' def decrypt(self, pyfile): - html = self.load(self.pyfile.url) + html = self.load(pyfile.url) new_links = [] for i in xrange(1, 100): @@ -37,6 +37,6 @@ class UlozToFolder(Crypter): self.logInfo("Limit of 99 pages reached, aborting") if new_links: - self.core.files.addLinks(map(lambda s: "http://ulozto.net/%s" % s, new_links), self.pyfile.package().id) + self.core.files.addLinks(map(lambda s: "http://ulozto.net/%s" % s, new_links), pyfile.package().id) else: self.fail('Could not extract any links') diff --git a/pyload/plugins/crypter/XupPl.py b/pyload/plugins/crypter/XupPl.py index ea675b2d2..a95dca3c8 100644 --- a/pyload/plugins/crypter/XupPl.py +++ b/pyload/plugins/crypter/XupPl.py @@ -13,8 +13,8 @@ class XupPl(Crypter): __author_mail__ = "z00nx0@gmail.com" def decrypt(self, pyfile): - header = self.load(self.pyfile.url, just_header=True) + header = self.load(pyfile.url, just_header=True) if 'location' in header: - self.core.files.addLinks([header['location']], self.pyfile.package().id) + self.core.files.addLinks([header['location']], pyfile.package().id) else: self.fail('Unable to find link') diff --git a/pyload/plugins/hoster/AlldebridCom.py b/pyload/plugins/hoster/AlldebridCom.py index b094c805b..a3386ea02 100644 --- a/pyload/plugins/hoster/AlldebridCom.py +++ b/pyload/plugins/hoster/AlldebridCom.py @@ -55,9 +55,9 @@ class AlldebridCom(Hoster): self.logWarning(data["error"]) self.tempOffline() else: - if self.pyfile.name and not self.pyfile.name.endswith('.tmp'): - self.pyfile.name = data["filename"] - self.pyfile.size = parseFileSize(data["filesize"]) + if pyfile.name and not pyfile.name.endswith('.tmp'): + pyfile.name = data["filename"] + pyfile.size = parseFileSize(data["filesize"]) new_url = data["link"] if self.getConfig("https"): diff --git a/pyload/plugins/hoster/BitshareCom.py b/pyload/plugins/hoster/BitshareCom.py index 82a1dcfab..6c9d3a6ef 100644 --- a/pyload/plugins/hoster/BitshareCom.py +++ b/pyload/plugins/hoster/BitshareCom.py @@ -36,12 +36,12 @@ class BitshareCom(SimpleHoster): self.pyfile = pyfile # File id - m = re.match(self.__pattern__, self.pyfile.url) + m = re.match(self.__pattern__, pyfile.url) self.file_id = max(m.group('id1'), m.group('id2')) self.logDebug("File id is [%s]" % self.file_id) # Load main page - self.html = self.load(self.pyfile.url, ref=False, decode=True) + self.html = self.load(pyfile.url, ref=False, decode=True) # Check offline if re.search(self.FILE_OFFLINE_PATTERN, self.html): @@ -55,11 +55,11 @@ class BitshareCom(SimpleHoster): self.retry() # File name - m = re.match(self.__pattern__, self.pyfile.url) + m = re.match(self.__pattern__, pyfile.url) name1 = m.group('name') if m else None m = re.search(self.FILE_INFO_PATTERN, self.html) name2 = m.group('N') if m else None - self.pyfile.name = max(name1, name2) + pyfile.name = max(name1, name2) # Ajax file id self.ajaxid = re.search(self.FILE_AJAXID_PATTERN, self.html).group(1) diff --git a/pyload/plugins/hoster/ChipDe.py b/pyload/plugins/hoster/ChipDe.py index 7fce22a61..6703fb638 100644 --- a/pyload/plugins/hoster/ChipDe.py +++ b/pyload/plugins/hoster/ChipDe.py @@ -21,4 +21,4 @@ class ChipDe(Crypter): except: self.fail('Failed to find the URL') - self.packages.append((self.pyfile.package().name, [url], self.pyfile.package().folder)) + self.packages.append((pyfile.package().name, [url], pyfile.package().folder)) diff --git a/pyload/plugins/hoster/FileshareInUa.py b/pyload/plugins/hoster/FileshareInUa.py index 95068e66b..48113ca95 100644 --- a/pyload/plugins/hoster/FileshareInUa.py +++ b/pyload/plugins/hoster/FileshareInUa.py @@ -29,7 +29,7 @@ class FileshareInUa(Hoster): if not self._checkOnline(): self.offline() - self.pyfile.name = self._getName() + pyfile.name = self._getName() self.link = self._getLink() diff --git a/pyload/plugins/hoster/FreakshareCom.py b/pyload/plugins/hoster/FreakshareCom.py index 86e48e849..005f06a90 100644 --- a/pyload/plugins/hoster/FreakshareCom.py +++ b/pyload/plugins/hoster/FreakshareCom.py @@ -32,7 +32,7 @@ class FreakshareCom(Hoster): self.prepare() self.get_file_url() - self.download(self.pyfile.url, post=self.req_opts) + self.download(pyfile.url, post=self.req_opts) check = self.checkDownload({"bad": "bad try", "paralell": "> Sorry, you cant download more then 1 files at time. <", diff --git a/pyload/plugins/hoster/GamefrontCom.py b/pyload/plugins/hoster/GamefrontCom.py index e130ca5c0..18908b5f7 100644 --- a/pyload/plugins/hoster/GamefrontCom.py +++ b/pyload/plugins/hoster/GamefrontCom.py @@ -30,7 +30,7 @@ class GamefrontCom(Hoster): if not self._checkOnline(): self.offline() - self.pyfile.name = self._getName() + pyfile.name = self._getName() self.link = self._getLink() diff --git a/pyload/plugins/hoster/NetloadIn.py b/pyload/plugins/hoster/NetloadIn.py index 2f75e5ac0..df3a27bb7 100644 --- a/pyload/plugins/hoster/NetloadIn.py +++ b/pyload/plugins/hoster/NetloadIn.py @@ -63,7 +63,7 @@ class NetloadIn(Hoster): def process(self, pyfile): self.url = pyfile.url self.prepare() - self.pyfile.setStatus("downloading") + pyfile.setStatus("downloading") self.proceed(self.url) def prepare(self): diff --git a/pyload/plugins/hoster/PremiumizeMe.py b/pyload/plugins/hoster/PremiumizeMe.py index 79b6e3fad..7e646fdf9 100644 --- a/pyload/plugins/hoster/PremiumizeMe.py +++ b/pyload/plugins/hoster/PremiumizeMe.py @@ -26,13 +26,13 @@ class PremiumizeMe(Hoster): # In some cases hostsers do not supply us with a filename at download, so we # are going to set a fall back filename (e.g. for freakshare or xfileshare) - self.pyfile.name = self.pyfile.name.split('/').pop() # Remove everthing before last slash + pyfile.name = pyfile.name.split('/').pop() # Remove everthing before last slash # Correction for automatic assigned filename: Removing html at end if needed suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"] - temp = self.pyfile.name.split('.') + temp = pyfile.name.split('.') if temp.pop() in suffix_to_remove: - self.pyfile.name = ".".join(temp) + pyfile.name = ".".join(temp) # Get account data (user, data) = self.account.selectAccount() @@ -40,7 +40,7 @@ class PremiumizeMe(Hoster): # Get rewritten link using the premiumize.me api v1 (see https://secure.premiumize.me/?show=api) answer = self.load( "https://api.premiumize.me/pm-api/v1.php?method=directdownloadlink¶ms[login]=%s¶ms[pass]=%s¶ms[link]=%s" % ( - user, data['password'], self.pyfile.url)) + user, data['password'], pyfile.url)) data = json_loads(answer) # Check status and decide what to do diff --git a/pyload/plugins/hoster/RPNetBiz.py b/pyload/plugins/hoster/RPNetBiz.py index 38045b452..380f84c53 100644 --- a/pyload/plugins/hoster/RPNetBiz.py +++ b/pyload/plugins/hoster/RPNetBiz.py @@ -34,7 +34,7 @@ class RPNetBiz(Hoster): # Get the download link response = self.load("https://premium.rpnet.biz/client_api.php", get={"username": user, "password": data['password'], - "action": "generate", "links": self.pyfile.url}) + "action": "generate", "links": pyfile.url}) self.logDebug("JSON data: %s" % response) link_status = json_loads(response)['links'][0] # get the first link... since we only queried one diff --git a/pyload/plugins/hoster/RapidshareCom.py b/pyload/plugins/hoster/RapidshareCom.py index 8ecea6886..47e3653cc 100644 --- a/pyload/plugins/hoster/RapidshareCom.py +++ b/pyload/plugins/hoster/RapidshareCom.py @@ -73,7 +73,7 @@ class RapidshareCom(Hoster): self.multiDL = self.resumeDownload = self.premium def process(self, pyfile): - self.url = self.pyfile.url + self.url = pyfile.url self.prepare() def prepare(self): diff --git a/pyload/plugins/hoster/RealdebridCom.py b/pyload/plugins/hoster/RealdebridCom.py index fc863863e..04ba80d0f 100644 --- a/pyload/plugins/hoster/RealdebridCom.py +++ b/pyload/plugins/hoster/RealdebridCom.py @@ -61,9 +61,9 @@ class RealdebridCom(Hoster): self.logWarning(data["message"]) self.tempOffline() else: - if self.pyfile.name is not None and self.pyfile.name.endswith('.tmp') and data["file_name"]: - self.pyfile.name = data["file_name"] - self.pyfile.size = parseFileSize(data["file_size"]) + if pyfile.name is not None and pyfile.name.endswith('.tmp') and data["file_name"]: + pyfile.name = data["file_name"] + pyfile.size = parseFileSize(data["file_size"]) new_url = data['generated_links'][0][-1] if self.getConfig("https"): diff --git a/pyload/plugins/hoster/ReloadCc.py b/pyload/plugins/hoster/ReloadCc.py index 5ad525b78..ed1b21aa3 100644 --- a/pyload/plugins/hoster/ReloadCc.py +++ b/pyload/plugins/hoster/ReloadCc.py @@ -28,13 +28,13 @@ class ReloadCc(Hoster): # In some cases hostsers do not supply us with a filename at download, so we # are going to set a fall back filename (e.g. for freakshare or xfileshare) - self.pyfile.name = self.pyfile.name.split('/').pop() # Remove everthing before last slash + pyfile.name = pyfile.name.split('/').pop() # Remove everthing before last slash # Correction for automatic assigned filename: Removing html at end if needed suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"] - temp = self.pyfile.name.split('.') + temp = pyfile.name.split('.') if temp.pop() in suffix_to_remove: - self.pyfile.name = ".".join(temp) + pyfile.name = ".".join(temp) # Get account data (user, data) = self.account.selectAccount() @@ -43,7 +43,7 @@ class ReloadCc(Hoster): via='pyload', v=1, user=user, - uri=self.pyfile.url + uri=pyfile.url ) try: @@ -63,7 +63,7 @@ class ReloadCc(Hoster): elif e.code == 403: self.fail("Your account is disabled. Please contact the Reload.cc support!") elif e.code == 409: - self.logWarning("The hoster seems to be a limited hoster and you've used your daily traffic for this hoster: %s" % self.pyfile.url) + self.logWarning("The hoster seems to be a limited hoster and you've used your daily traffic for this hoster: %s" % pyfile.url) # Wait for 6 hours and retry up to 4 times => one day self.retry(4, 6 * 60 * 60, "Limited hoster traffic limit exceeded") elif e.code == 429: diff --git a/pyload/plugins/hoster/UploadedTo.py b/pyload/plugins/hoster/UploadedTo.py index 6e6f0aeaf..2ab7d89b6 100644 --- a/pyload/plugins/hoster/UploadedTo.py +++ b/pyload/plugins/hoster/UploadedTo.py @@ -138,7 +138,7 @@ class UploadedTo(Hoster): pyfile.name = html_unescape(self.data[2]) - # self.pyfile.name = self.get_file_name() + # pyfile.name = self.get_file_name() if self.premium: self.handlePremium() diff --git a/pyload/plugins/hoster/UploadingCom.py b/pyload/plugins/hoster/UploadingCom.py index 9bb526506..c6a8a1231 100644 --- a/pyload/plugins/hoster/UploadingCom.py +++ b/pyload/plugins/hoster/UploadingCom.py @@ -44,8 +44,8 @@ class UploadingCom(SimpleHoster): self.req.cj.setCookie("uploading.com", "setlang", "en") self.req.cj.setCookie("uploading.com", "_lang", "en") - if not "/get/" in self.pyfile.url: - self.pyfile.url = self.pyfile.url.replace("/files", "/files/get") + if not "/get/" in pyfile.url: + pyfile.url = pyfile.url.replace("/files", "/files/get") self.html = self.load(pyfile.url, decode=True) self.file_info = self.getFileInfo() diff --git a/pyload/plugins/hoster/XHamsterCom.py b/pyload/plugins/hoster/XHamsterCom.py index 7d33909f7..254707faf 100644 --- a/pyload/plugins/hoster/XHamsterCom.py +++ b/pyload/plugins/hoster/XHamsterCom.py @@ -32,7 +32,7 @@ class XHamsterCom(Hoster): if self.getConfig("type"): self.desired_fmt = self.getConfig("type") - self.pyfile.name = self.get_file_name() + self.desired_fmt + pyfile.name = self.get_file_name() + self.desired_fmt self.download(self.get_file_url()) def download_html(self): diff --git a/pyload/plugins/hoster/YoupornCom.py b/pyload/plugins/hoster/YoupornCom.py index 65968ba13..c6f2d07ad 100644 --- a/pyload/plugins/hoster/YoupornCom.py +++ b/pyload/plugins/hoster/YoupornCom.py @@ -19,7 +19,7 @@ class YoupornCom(Hoster): if not self.file_exists(): self.offline() - self.pyfile.name = self.get_file_name() + pyfile.name = self.get_file_name() self.download(self.get_file_url()) def download_html(self): diff --git a/pyload/plugins/hoster/ZeveraCom.py b/pyload/plugins/hoster/ZeveraCom.py index f910852c8..7305ad90b 100644 --- a/pyload/plugins/hoster/ZeveraCom.py +++ b/pyload/plugins/hoster/ZeveraCom.py @@ -48,7 +48,7 @@ class ZeveraCom(Hoster): # self.logDebug("zevera.com: Old URL: %s" % pyfile.url) # # last_size = retries = 0 - # olink = self.pyfile.url #quote(self.pyfile.url.encode('utf_8')) + # olink = pyfile.url #quote(pyfile.url.encode('utf_8')) # # for _ in xrange(100): # self.retData = self.account.loadAPIRequest(self.req, cmd = 'download_request', olink = olink) @@ -95,7 +95,7 @@ class ZeveraCom(Hoster): # self.logError(retData['ErrorCode'], retData['ErrorMessage']) # #self.fail('ERROR: ' + retData['ErrorMessage']) # - # if self.pyfile.size / 1024000 > retData['AccountInfo']['AvailableTODAYTrafficForUseInMBytes']: + # if pyfile.size / 1024000 > retData['AccountInfo']['AvailableTODAYTrafficForUseInMBytes']: # self.logWarning("Not enough data left to download the file") # # def crazyDecode(self, ustring): |