summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Walter Purcaro <vuolter@gmail.com> 2014-09-28 21:19:03 +0200
committerGravatar Walter Purcaro <vuolter@gmail.com> 2014-09-28 21:19:03 +0200
commit46c6fc74a4e423927554f024b78dbbbf33e982cd (patch)
treee7ff9580bb5e80b91e2bd9609409116a19c5941e
parentApi : Add brackets and pipe to urlmatcher (diff)
parent[XFileSharingPro] Fixed typo (diff)
downloadpyload-46c6fc74a4e423927554f024b78dbbbf33e982cd.tar.xz
Merge branch 'stable' into 0.4.10
Conflicts: pyload/plugins/crypter/MultiuploadCom.py pyload/plugins/crypter/SerienjunkiesOrg.py pyload/plugins/hooks/ExternalScripts.py pyload/plugins/hooks/ExtractArchive.py pyload/plugins/hooks/MergeFiles.py pyload/plugins/hoster/CatShareNet.py pyload/plugins/hoster/FilezyNet.py pyload/plugins/hoster/IFileWs.py pyload/plugins/hoster/PremiumTo.py pyload/plugins/hoster/SpeedyshareCom.py pyload/plugins/hoster/UptoboxCom.py pyload/plugins/hoster/XFileSharingPro.py pyload/plugins/hoster/ZippyshareCom.py
-rw-r--r--module/plugins/accounts/TusfilesNet.py69
-rw-r--r--module/plugins/hoster/DevhostSt.py49
-rw-r--r--module/plugins/hoster/DropboxCom.py42
-rw-r--r--module/plugins/hoster/KingfilesNet.py94
-rw-r--r--pyload/plugins/Account.py2
-rw-r--r--pyload/plugins/Crypter.py11
-rw-r--r--pyload/plugins/Plugin.py3
-rw-r--r--pyload/plugins/accounts/DebridItaliaCom.py2
-rw-r--r--pyload/plugins/accounts/EasybytezCom.py36
-rw-r--r--pyload/plugins/accounts/EgoFilesCom.py2
-rw-r--r--pyload/plugins/accounts/FilerNet.py2
-rw-r--r--pyload/plugins/accounts/FreeWayMe.py2
-rw-r--r--pyload/plugins/accounts/HellshareCz.py8
-rw-r--r--pyload/plugins/accounts/LetitbitNet.py4
-rw-r--r--pyload/plugins/accounts/MyfastfileCom.py4
-rw-r--r--pyload/plugins/accounts/RealdebridCom.py2
-rw-r--r--pyload/plugins/crypter/ChipDe.py2
-rw-r--r--pyload/plugins/crypter/DataHuFolder.py4
-rw-r--r--pyload/plugins/crypter/DuckCryptInfo.py2
-rw-r--r--pyload/plugins/crypter/EasybytezComFolder.py5
-rw-r--r--pyload/plugins/crypter/FilefactoryComFolder.py2
-rw-r--r--pyload/plugins/crypter/GooGl.py2
-rw-r--r--pyload/plugins/crypter/LinkSaveIn.py6
-rw-r--r--pyload/plugins/crypter/LinkdecrypterCom.py2
-rw-r--r--pyload/plugins/crypter/LixIn.py4
-rw-r--r--pyload/plugins/crypter/MediafireComFolder.py2
-rw-r--r--pyload/plugins/crypter/MultiuploadCom.py59
-rw-r--r--pyload/plugins/crypter/NCryptIn.py2
-rw-r--r--pyload/plugins/crypter/RelinkUs.py4
-rw-r--r--pyload/plugins/crypter/SerienjunkiesOrg.py324
-rw-r--r--pyload/plugins/crypter/ShareLinksBiz.py2
-rw-r--r--pyload/plugins/crypter/UploadedToFolder.py2
-rw-r--r--pyload/plugins/hooks/BypassCaptcha.py6
-rw-r--r--pyload/plugins/hooks/Captcha9kw.py18
-rw-r--r--pyload/plugins/hooks/CaptchaBrotherhood.py2
-rw-r--r--pyload/plugins/hooks/Checksum.py19
-rw-r--r--pyload/plugins/hooks/ClickAndLoad.py2
-rw-r--r--pyload/plugins/hooks/DeathByCaptcha.py7
-rw-r--r--pyload/plugins/hooks/DeleteFinished.py6
-rw-r--r--pyload/plugins/hooks/DownloadScheduler.py8
-rw-r--r--pyload/plugins/hooks/EasybytezCom.py2
-rw-r--r--pyload/plugins/hooks/Ev0InFetcher.py5
-rw-r--r--pyload/plugins/hooks/ExpertDecoders.py7
-rw-r--r--pyload/plugins/hooks/ExternalScripts.py75
-rw-r--r--pyload/plugins/hooks/ExtractArchive.py137
-rw-r--r--pyload/plugins/hooks/FreeWayMe.py2
-rw-r--r--pyload/plugins/hooks/IRCInterface.py6
-rw-r--r--pyload/plugins/hooks/ImageTyperz.py8
-rw-r--r--pyload/plugins/hooks/LinkdecrypterCom.py2
-rw-r--r--pyload/plugins/hooks/MegaDebridEu.py2
-rw-r--r--pyload/plugins/hooks/MergeFiles.py8
-rw-r--r--pyload/plugins/hooks/MultiHome.py2
-rw-r--r--pyload/plugins/hooks/MyfastfileCom.py2
-rw-r--r--pyload/plugins/hooks/OverLoadMe.py2
-rw-r--r--pyload/plugins/hooks/RehostTo.py2
-rw-r--r--pyload/plugins/hooks/RestartFailed.py2
-rw-r--r--pyload/plugins/hooks/UnSkipOnFail.py4
-rw-r--r--pyload/plugins/hooks/UpdateManager.py8
-rw-r--r--pyload/plugins/hooks/XFileSharingPro.py8
-rw-r--r--pyload/plugins/hooks/XMPPInterface.py18
-rw-r--r--pyload/plugins/hoster/AlldebridCom.py2
-rw-r--r--pyload/plugins/hoster/BasePlugin.py11
-rw-r--r--pyload/plugins/hoster/BillionuploadsCom.py3
-rw-r--r--pyload/plugins/hoster/CatShareNet.py46
-rw-r--r--pyload/plugins/hoster/CramitIn.py7
-rw-r--r--pyload/plugins/hoster/CzshareCom.py4
-rw-r--r--pyload/plugins/hoster/DataHu.py2
-rw-r--r--pyload/plugins/hoster/DateiTo.py2
-rw-r--r--pyload/plugins/hoster/DepositfilesCom.py4
-rw-r--r--pyload/plugins/hoster/EasybytezCom.py7
-rw-r--r--pyload/plugins/hoster/EdiskCz.py2
-rw-r--r--pyload/plugins/hoster/EgoFilesCom.py8
-rw-r--r--pyload/plugins/hoster/FastixRu.py2
-rw-r--r--pyload/plugins/hoster/FastshareCz.py2
-rw-r--r--pyload/plugins/hoster/File4safeCom.py3
-rw-r--r--pyload/plugins/hoster/FileParadoxIn.py3
-rw-r--r--pyload/plugins/hoster/FilefactoryCom.py6
-rw-r--r--pyload/plugins/hoster/FileomCom.py6
-rw-r--r--pyload/plugins/hoster/FilepostCom.py2
-rw-r--r--pyload/plugins/hoster/FilerNet.py12
-rw-r--r--pyload/plugins/hoster/FilerioCom.py5
-rw-r--r--pyload/plugins/hoster/FileserveCom.py4
-rw-r--r--pyload/plugins/hoster/FilezyNet.py32
-rw-r--r--pyload/plugins/hoster/FiredriveCom.py4
-rw-r--r--pyload/plugins/hoster/FshareVn.py2
-rw-r--r--pyload/plugins/hoster/GigapetaCom.py2
-rw-r--r--pyload/plugins/hoster/HundredEightyUploadCom.py3
-rw-r--r--pyload/plugins/hoster/IFileWs.py13
-rw-r--r--pyload/plugins/hoster/Keep2shareCC.py8
-rw-r--r--pyload/plugins/hoster/LemUploadsCom.py3
-rw-r--r--pyload/plugins/hoster/LetitbitNet.py4
-rw-r--r--pyload/plugins/hoster/LinksnappyCom.py2
-rw-r--r--pyload/plugins/hoster/LomafileCom.py2
-rw-r--r--pyload/plugins/hoster/LuckyShareNet.py12
-rw-r--r--pyload/plugins/hoster/MediafireCom.py2
-rw-r--r--pyload/plugins/hoster/MegareleaseOrg.py3
-rw-r--r--pyload/plugins/hoster/MovReelCom.py3
-rw-r--r--pyload/plugins/hoster/NarodRu.py2
-rw-r--r--pyload/plugins/hoster/NosuploadCom.py2
-rw-r--r--pyload/plugins/hoster/NovafileCom.py5
-rw-r--r--pyload/plugins/hoster/NowDownloadEu.py2
-rw-r--r--pyload/plugins/hoster/OneFichierCom.py2
-rw-r--r--pyload/plugins/hoster/PremiumTo.py9
-rw-r--r--pyload/plugins/hoster/PromptfileCom.py4
-rw-r--r--pyload/plugins/hoster/QuickshareCz.py4
-rw-r--r--pyload/plugins/hoster/RapidgatorNet.py4
-rw-r--r--pyload/plugins/hoster/RarefileNet.py7
-rw-r--r--pyload/plugins/hoster/SecureUploadEu.py5
-rw-r--r--pyload/plugins/hoster/SendmywayCom.py3
-rw-r--r--pyload/plugins/hoster/ShareRapidCom.py2
-rw-r--r--pyload/plugins/hoster/SpeedyshareCom.py46
-rw-r--r--pyload/plugins/hoster/StreamcloudEu.py8
-rw-r--r--pyload/plugins/hoster/TurbobitNet.py53
-rw-r--r--pyload/plugins/hoster/TusfilesNet.py15
-rw-r--r--pyload/plugins/hoster/UlozTo.py8
-rw-r--r--pyload/plugins/hoster/UloziskoSk.py4
-rw-r--r--pyload/plugins/hoster/UnibytesCom.py2
-rw-r--r--pyload/plugins/hoster/UploadedTo.py4
-rw-r--r--pyload/plugins/hoster/UploadheroCom.py2
-rw-r--r--pyload/plugins/hoster/UpstoreNet.py6
-rw-r--r--pyload/plugins/hoster/UptoboxCom.py49
-rw-r--r--pyload/plugins/hoster/VeehdCom.py2
-rw-r--r--pyload/plugins/hoster/VeohCom.py2
-rw-r--r--pyload/plugins/hoster/VimeoCom.py2
-rw-r--r--pyload/plugins/hoster/WrzucTo.py2
-rw-r--r--pyload/plugins/hoster/XFileSharingPro.py64
-rw-r--r--pyload/plugins/hoster/ZeveraCom.py72
-rw-r--r--pyload/plugins/hoster/ZippyshareCom.py90
-rw-r--r--pyload/plugins/internal/CaptchaService.py16
-rw-r--r--pyload/plugins/internal/DeadCrypter.py7
-rw-r--r--pyload/plugins/internal/DeadHoster.py9
-rw-r--r--pyload/plugins/internal/MultiHoster.py16
-rw-r--r--pyload/plugins/internal/SimpleCrypter.py48
-rw-r--r--pyload/plugins/internal/SimpleHoster.py71
-rw-r--r--pyload/plugins/internal/UnRar.py31
135 files changed, 990 insertions, 1054 deletions
diff --git a/module/plugins/accounts/TusfilesNet.py b/module/plugins/accounts/TusfilesNet.py
new file mode 100644
index 000000000..d7cdbaebb
--- /dev/null
+++ b/module/plugins/accounts/TusfilesNet.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import mktime, strptime, gmtime
+
+from module.plugins.Account import Account
+from module.plugins.internal.SimpleHoster import parseHtmlForm
+from module.utils import parseFileSize
+
+
+class TusfilesNet(Account):
+ __name__ = "TusfilesNet"
+ __type__ = "account"
+ __version__ = "0.01"
+
+ __description__ = """ Tusfile.net account plugin """
+ __author_name__ = "guidobelix"
+ __author_mail__ = "guidobelix@hotmail.it"
+
+ VALID_UNTIL_PATTERN = r'<span class="label label-default">([^<]+)</span>'
+ TRAFFIC_LEFT_PATTERN = r'<td><img src="//www.tusfiles.net/i/icon/meter.png" alt=""/></td>\n<td>&nbsp;(?P<S>[^<]+)</td>'
+
+
+ def loadAccountInfo(self, user, req):
+ html = req.load("http://www.tusfiles.net/?op=my_account", decode=True)
+
+ validuntil = None
+ trafficleft = None
+ premium = False
+
+ m = re.search(self.VALID_UNTIL_PATTERN, html)
+ if m:
+ expiredate = m.group(1)
+ self.logDebug("Expire date: " + expiredate)
+
+ try:
+ validuntil = mktime(strptime(expiredate, "%d %B %Y"))
+ except Exception, e:
+ self.logError(e)
+
+ if validuntil > mktime(gmtime()):
+ premium = True
+ else:
+ premium = False
+ validuntil = None
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ if m:
+ trafficleft = m.group(1)
+ if "Unlimited" in trafficleft:
+ trafficleft = -1
+ else:
+ trafficleft = parseFileSize(trafficleft) * 1024
+
+ return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium}
+
+
+ def login(self, user, data, req):
+ html = req.load("http://www.tusfiles.net/login.html", decode=True)
+ action, inputs = parseHtmlForm('name="FL"', html)
+ inputs.update({'login': user,
+ 'password': data['password'],
+ 'redirect': "http://www.tusfiles.net/"})
+
+ html = req.load("http://www.tusfiles.net/", post=inputs, decode=True)
+
+ if 'Incorrect Login or Password' in html or '>Error<' in html:
+ self.wrongPassword()
diff --git a/module/plugins/hoster/DevhostSt.py b/module/plugins/hoster/DevhostSt.py
new file mode 100644
index 000000000..088ace93a
--- /dev/null
+++ b/module/plugins/hoster/DevhostSt.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+#
+# Test links:
+# http://d-h.st/mM8
+
+import re
+
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class DevhostSt(SimpleHoster):
+ __name__ = "Devhost"
+ __type__ = "hoster"
+ __version__ = "0.01"
+
+ __pattern__ = r'http://(?:www\.)?d-h\.st/\w+'
+
+ __description__ = """d-h.st hoster plugin"""
+ __author_name__ = "zapp-brannigan"
+ __author_mail__ = "fuerst.reinje@web.de"
+
+
+ FILE_NAME_PATTERN = r'>Filename:</span> <div title="(?P<N>.+?)"'
+ FILE_SIZE_PATTERN = r'>Size:</span> (?P<S>[\d.]+) (?P<U>\w+)'
+
+ OFFLINE_PATTERN = r'>File Not Found<'
+ LINK_PATTERN = r'id="downloadfile" href="(.+?)"'
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.parseError("Download link not found")
+
+ dl_url = m.group(1)
+ self.logDebug("Download URL = " + dl_url)
+ self.download(dl_url, disposition=True)
+
+ check = self.checkDownload({'is_html': re.compile("html")})
+ if check == "is_html":
+ self.parseError("Downloaded file is an html file")
+
+
+getInfo = create_getInfo(DevhostSt)
diff --git a/module/plugins/hoster/DropboxCom.py b/module/plugins/hoster/DropboxCom.py
new file mode 100644
index 000000000..ab63fc801
--- /dev/null
+++ b/module/plugins/hoster/DropboxCom.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class DropboxCom(SimpleHoster):
+ __name__ = "DropboxCom"
+ __type__ = "hoster"
+ __version__ = "0.01"
+
+ __pattern__ = r'https?://(?:www\.)?dropbox\.com/.+'
+
+ __description__ = """Dropbox.com hoster plugin"""
+ __author_name__ = "zapp-brannigan"
+ __author_mail__ = "fuerst.reinje@web.de"
+
+
+ FILE_NAME_PATTERN = r'<title>Dropbox - (?P<N>.+?)<'
+ FILE_SIZE_PATTERN = r'&nbsp;&middot;&nbsp; (?P<S>[\d,]+) (?P<U>\w+)'
+
+ OFFLINE_PATTERN = r'<title>Dropbox - (404|Shared link error)<'
+
+ SH_COOKIES = [(".dropbox.com", "lang", "en")]
+
+
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+ self.resumeDownload = True
+
+
+ def handleFree(self):
+ self.download(self.pyfile.url, get={'dl': "1"})
+
+ check = self.checkDownload({'is_html': re.compile("html")})
+ if check == "is_html":
+ self.parseError("Downloaded file is an html file")
+
+
+getInfo = create_getInfo(DropboxCom)
diff --git a/module/plugins/hoster/KingfilesNet.py b/module/plugins/hoster/KingfilesNet.py
new file mode 100644
index 000000000..4d87a5933
--- /dev/null
+++ b/module/plugins/hoster/KingfilesNet.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from module.plugins.internal.CaptchaService import SolveMedia
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class KingfilesNet(SimpleHoster):
+ __name__ = "KingfilesNet"
+ __type__ = "hoster"
+ __version__ = "0.01"
+
+ __pattern__ = r'http://(?:www\.)?kingfiles\.net/\w{12}'
+
+ __description__ = """Kingfiles.net hoster plugin"""
+ __author_name__ = ("zapp-brannigan", "Walter Purcaro")
+ __author_mail__ = ("fuerst.reinje@web.de", "vuolter@gmail.com")
+
+
+ FILE_NAME_PATTERN = r'name="fname" value="(?P<N>.+?)">'
+ FILE_SIZE_PATTERN = r'>Size: .+?">(?P<S>[\d.]+) (?P<U>\w+)'
+
+ OFFLINE_PATTERN = r'>(File Not Found</b><br><br>|File Not Found</h2>)'
+
+ FILE_ID_PATTERN = r'<input type=\"hidden\" name=\"id\" value=\"(.+)\">'
+ RAND_ID_PATTERN = r'type=\"hidden\" name=\"rand\" value=\"(.+)\">'
+
+ LINK_PATTERN = r'var download_url = \'(.+)\';'
+ SOLVEMEDIA_PATTERN = r'http://api\.solvemedia\.com/papi/challenge\.script\?k=(.+)\">'
+
+
+ def setup(self):
+ self.multiDL = True
+ self.resumeDownload = True
+
+
+ def handleFree(self):
+ # Load main page and find file-id
+ a = self.load(self.pyfile.url, cookies=True, decode=True)
+ file_id = re.search(self.FILE_ID_PATTERN, a).group(1)
+ self.logDebug("file_id", file_id)
+
+ # Click the free user button
+ post_data = {'op': "download1",
+ 'usr_login': "",
+ 'id': file_id,
+ 'fname': self.pyfile.name,
+ 'referer': "",
+ 'method_free': "+"}
+ b = self.load(self.pyfile.url, post=post_data, cookies=True, decode=True)
+
+ # Do the captcha stuff
+ m = re.search(self.SOLVEMEDIA_PATTERN, b)
+ if m is None:
+ self.parseError("Captcha key not found")
+
+ solvemedia = SolveMedia(self)
+ captcha_key = m.group(1)
+ self.logDebug("captcha_key", captcha_key)
+ captcha_challenge, captcha_response = solvemedia.challenge(captcha_key)
+
+ # Make the downloadlink appear and load the file
+ m = re.search(self.RAND_ID_PATTERN, b)
+ if m is None:
+ self.parseError("Random key not found")
+
+ rand = m.group(1)
+ self.logDebug("rand", rand)
+
+ post_data = {'op': "download2",
+ 'id': file_id,
+ 'rand': rand,
+ 'referer': self.pyfile.url,
+ 'method_free': "+",
+ 'method_premium': "",
+ 'adcopy_response': captcha_response,
+ 'adcopy_challenge': captcha_challenge,
+ 'down_direct': "1"}
+ c = self.load(self.pyfile.url, post=post_data, cookies=True, decode=True)
+
+ m = re.search(self.LINK_PATTERN, c)
+ if m is None:
+ self.parseError("Download url not found")
+
+ dl_url = m.group(1)
+ self.download(dl_url, cookies=True, disposition=True)
+
+ check = self.checkDownload({'is_html': re.compile("<html>")})
+ if check == "is_html":
+ self.parseError("Downloaded file is an html file")
+
+
+getInfo = create_getInfo(KingfilesNet)
diff --git a/pyload/plugins/Account.py b/pyload/plugins/Account.py
index 12ea494a0..e338f6b26 100644
--- a/pyload/plugins/Account.py
+++ b/pyload/plugins/Account.py
@@ -150,7 +150,7 @@ class Account(Base):
if req: req.close()
- self.logDebug("Account Info: %s" % str(infos))
+ self.logDebug("Account Info: %s" % infos)
infos['timestamp'] = time()
self.infos[name] = infos
diff --git a/pyload/plugins/Crypter.py b/pyload/plugins/Crypter.py
index ed72c57c1..7bb48d607 100644
--- a/pyload/plugins/Crypter.py
+++ b/pyload/plugins/Crypter.py
@@ -28,19 +28,16 @@ class Crypter(Plugin):
self.limitDL = 0
- def preprocessing(self, thread):
- """prepare"""
- self.setup()
- self.thread = thread
-
- self.decrypt(self.pyfile)
-
+ def process(self, pyfile):
+ """ main method """
+ self.decrypt(pyfile)
self.createPackages()
def decrypt(self, pyfile):
raise NotImplementedError
+
def createPackages(self):
""" create new packages from self.packages """
for pack in self.packages:
diff --git a/pyload/plugins/Plugin.py b/pyload/plugins/Plugin.py
index 31cbfca57..6bb325760 100644
--- a/pyload/plugins/Plugin.py
+++ b/pyload/plugins/Plugin.py
@@ -422,7 +422,8 @@ class Plugin(Base):
"""
if self.pyfile.abort: raise Abort
#utf8 vs decode -> please use decode attribute in all future plugins
- if type(url) == unicode: url = str(url)
+ if type(url) == unicode:
+ url = str(url) # encode('utf8')
res = self.req.load(url, get, post, ref, cookies, just_header, decode=decode)
diff --git a/pyload/plugins/accounts/DebridItaliaCom.py b/pyload/plugins/accounts/DebridItaliaCom.py
index cff0be018..34eb51ea6 100644
--- a/pyload/plugins/accounts/DebridItaliaCom.py
+++ b/pyload/plugins/accounts/DebridItaliaCom.py
@@ -27,7 +27,7 @@ class DebridItaliaCom(Account):
validuntil = int(time.mktime(time.strptime(m.group('D'), "%d/%m/%Y %H:%M")))
return {"premium": True, "validuntil": validuntil, "trafficleft": -1}
else:
- self.logError('Unable to retrieve account information - Plugin may be out of date')
+ self.logError("Unable to retrieve account information - Plugin may be out of date")
def login(self, user, data, req):
self.html = req.load("http://debriditalia.com/login.php",
diff --git a/pyload/plugins/accounts/EasybytezCom.py b/pyload/plugins/accounts/EasybytezCom.py
index 595e95ec4..ef5b44e46 100644
--- a/pyload/plugins/accounts/EasybytezCom.py
+++ b/pyload/plugins/accounts/EasybytezCom.py
@@ -11,11 +11,11 @@ from pyload.utils import parseFileSize
class EasybytezCom(Account):
__name__ = "EasybytezCom"
__type__ = "account"
- __version__ = "0.04"
+ __version__ = "0.06"
__description__ = """EasyBytez.com account plugin"""
- __author_name__ = "zoidberg"
- __author_mail__ = "zoidberg@mujmail.cz"
+ __author_name__ = ("zoidberg", "guidobelix")
+ __author_mail__ = ("zoidberg@mujmail.cz", "guidobelix@hotmail.it")
VALID_UNTIL_PATTERN = r'Premium account expire:</TD><TD><b>([^<]+)</b>'
TRAFFIC_LEFT_PATTERN = r'<TR><TD>Traffic available today:</TD><TD><b>(?P<S>[^<]+)</b>'
@@ -24,30 +24,38 @@ class EasybytezCom(Account):
def loadAccountInfo(self, user, req):
html = req.load("http://www.easybytez.com/?op=my_account", decode=True)
- validuntil = trafficleft = None
+ validuntil = None
+ trafficleft = None
premium = False
m = re.search(self.VALID_UNTIL_PATTERN, html)
if m:
+ expiredate = m.group(1)
+ self.logDebug("Expire date: " + expiredate)
+
try:
- self.logDebug("Expire date: " + m.group(1))
- validuntil = mktime(strptime(m.group(1), "%d %B %Y"))
+ validuntil = mktime(strptime(expiredate, "%d %B %Y"))
except Exception, e:
self.logError(e)
+
if validuntil > mktime(gmtime()):
premium = True
trafficleft = -1
- else:
- m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- if m:
- trafficleft = m.group(1)
- if "Unlimited" in trafficleft:
- trafficleft = -1
- else:
- trafficleft = parseFileSize(trafficleft) / 1024
+ else:
+ premium = False
+ validuntil = -1
+
+ m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
+ if m:
+ trafficleft = m.group(1)
+ if "Unlimited" in trafficleft:
+ trafficleft = -1
+ else:
+ trafficleft = parseFileSize(trafficleft) / 1024
return {"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}
+
def login(self, user, data, req):
html = req.load('http://www.easybytez.com/login.html', decode=True)
action, inputs = parseHtmlForm('name="FL"', html)
diff --git a/pyload/plugins/accounts/EgoFilesCom.py b/pyload/plugins/accounts/EgoFilesCom.py
index 3886d053a..620817a45 100644
--- a/pyload/plugins/accounts/EgoFilesCom.py
+++ b/pyload/plugins/accounts/EgoFilesCom.py
@@ -30,7 +30,7 @@ class EgoFilesCom(Account):
trafficleft = parseFileSize(m.group('T'), m.group('U')) / 1024
return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
else:
- self.logError('Unable to retrieve account information - Plugin may be out of date')
+ self.logError("Unable to retrieve account information - Plugin may be out of date")
def login(self, user, data, req):
# Set English language
diff --git a/pyload/plugins/accounts/FilerNet.py b/pyload/plugins/accounts/FilerNet.py
index 51c2e5d75..cc2e3fd6d 100644
--- a/pyload/plugins/accounts/FilerNet.py
+++ b/pyload/plugins/accounts/FilerNet.py
@@ -36,7 +36,7 @@ class FilerNet(Account):
trafficleft = parseFileSize(traffic.group(1)) / 1024
return {"premium": True, "validuntil": validuntil, "trafficleft": trafficleft}
else:
- self.logError('Unable to retrieve account information - Plugin may be out of date')
+ self.logError("Unable to retrieve account information - Plugin may be out of date")
return {"premium": False, "validuntil": None, "trafficleft": None}
def login(self, user, data, req):
diff --git a/pyload/plugins/accounts/FreeWayMe.py b/pyload/plugins/accounts/FreeWayMe.py
index 5106067a9..92f99972a 100644
--- a/pyload/plugins/accounts/FreeWayMe.py
+++ b/pyload/plugins/accounts/FreeWayMe.py
@@ -45,7 +45,7 @@ class FreeWayMe(Account):
def getAccountStatus(self, user, req):
answer = req.load("https://www.free-way.me/ajax/jd.php",
get={"id": 4, "user": user, "pass": self.accounts[user]['password']})
- self.logDebug("login: %s" % answer)
+ self.logDebug("Login: %s" % answer)
if answer == "Invalid login":
self.wrongPassword()
return False
diff --git a/pyload/plugins/accounts/HellshareCz.py b/pyload/plugins/accounts/HellshareCz.py
index ae3f974a1..34e4234f7 100644
--- a/pyload/plugins/accounts/HellshareCz.py
+++ b/pyload/plugins/accounts/HellshareCz.py
@@ -43,7 +43,7 @@ class HellshareCz(Account):
trafficleft = int(credit) * 1024
validuntil = -1
except Exception, e:
- self.logError('Unable to parse credit info', e)
+ self.logError("Unable to parse credit info", e)
validuntil = -1
trafficleft = -1
@@ -53,14 +53,14 @@ class HellshareCz(Account):
html = req.load('http://www.hellshare.com/')
if req.lastEffectiveURL != 'http://www.hellshare.com/':
#Switch to English
- self.logDebug('Switch lang - URL: %s' % req.lastEffectiveURL)
+ self.logDebug("Switch lang - URL: %s" % req.lastEffectiveURL)
json = req.load("%s?do=locRouter-show" % req.lastEffectiveURL)
hash = re.search(r"(--[0-9a-f]+-)", json).group(1)
- self.logDebug('Switch lang - HASH: %s' % hash)
+ self.logDebug("Switch lang - HASH: %s" % hash)
html = req.load('http://www.hellshare.com/%s/' % hash)
if re.search(self.CREDIT_LEFT_PATTERN, html):
- self.logDebug('Already logged in')
+ self.logDebug("Already logged in")
return
html = req.load('http://www.hellshare.com/login?do=loginForm-submit', post={
diff --git a/pyload/plugins/accounts/LetitbitNet.py b/pyload/plugins/accounts/LetitbitNet.py
index bce97d378..93d12a975 100644
--- a/pyload/plugins/accounts/LetitbitNet.py
+++ b/pyload/plugins/accounts/LetitbitNet.py
@@ -19,7 +19,7 @@ class LetitbitNet(Account):
# api_key = self.accounts[user]['password']
# json_data = [api_key, ['key/info']]
# api_rep = req.load('http://api.letitbit.net/json', post={'r': json_dumps(json_data)})
- # self.logDebug('API Key Info: ' + api_rep)
+ # self.logDebug("API Key Info: " + api_rep)
# api_rep = json_loads(api_rep)
#
# if api_rep['status'] == 'FAIL':
@@ -30,4 +30,4 @@ class LetitbitNet(Account):
def login(self, user, data, req):
# API_KEY is the username and the PREMIUM_KEY is the password
- self.logInfo('You must use your API KEY as username and the PREMIUM KEY as password.')
+ self.logInfo("You must use your API KEY as username and the PREMIUM KEY as password.")
diff --git a/pyload/plugins/accounts/MyfastfileCom.py b/pyload/plugins/accounts/MyfastfileCom.py
index b5d35d326..2ec0ebb6d 100644
--- a/pyload/plugins/accounts/MyfastfileCom.py
+++ b/pyload/plugins/accounts/MyfastfileCom.py
@@ -21,13 +21,13 @@ class MyfastfileCom(Account):
validuntil = int(time() + self.json_data['days_left'] * 24 * 60 * 60)
return {"premium": True, "validuntil": validuntil, "trafficleft": -1}
else:
- self.logError('Unable to get account information')
+ self.logError("Unable to get account information")
def login(self, user, data, req):
# Password to use is the API-Password written in http://myfastfile.com/myaccount
html = req.load("http://myfastfile.com/api.php",
get={"user": user, "pass": data['password']})
- self.logDebug('JSON data: ' + html)
+ self.logDebug("JSON data: " + html)
self.json_data = json_loads(html)
if self.json_data['status'] != 'ok':
self.logError('Invalid login. The password to use is the API-Password you find in your "My Account" page')
diff --git a/pyload/plugins/accounts/RealdebridCom.py b/pyload/plugins/accounts/RealdebridCom.py
index 8ab0234a9..1d76bb130 100644
--- a/pyload/plugins/accounts/RealdebridCom.py
+++ b/pyload/plugins/accounts/RealdebridCom.py
@@ -31,5 +31,5 @@ class RealdebridCom(Account):
if "Your login informations are incorrect" in page:
self.wrongPassword()
elif "PIN Code required" in page:
- self.logWarning('PIN code required. Please login to https://real-debrid.com using the PIN or disable the double authentication in your control panel on https://real-debrid.com.')
+ self.logWarning("PIN code required. Please login to https://real-debrid.com using the PIN or disable the double authentication in your control panel on https://real-debrid.com.")
self.pin_code = True
diff --git a/pyload/plugins/crypter/ChipDe.py b/pyload/plugins/crypter/ChipDe.py
index 29a248693..0ee6adfd3 100644
--- a/pyload/plugins/crypter/ChipDe.py
+++ b/pyload/plugins/crypter/ChipDe.py
@@ -24,4 +24,4 @@ class ChipDe(Crypter):
self.fail('Failed to find the URL')
else:
self.urls = [f.group(1)]
- self.logDebug('The file URL is %s' % self.urls[0])
+ self.logDebug("The file URL is %s" % self.urls[0])
diff --git a/pyload/plugins/crypter/DataHuFolder.py b/pyload/plugins/crypter/DataHuFolder.py
index 49dab9159..aafcf0def 100644
--- a/pyload/plugins/crypter/DataHuFolder.py
+++ b/pyload/plugins/crypter/DataHuFolder.py
@@ -27,7 +27,7 @@ class DataHuFolder(SimpleCrypter):
password = self.getPassword()
if password is '':
self.fail("No password specified, please set right password on Add package form and retry")
- self.logDebug('The folder is password protected', 'Using password: ' + password)
+ self.logDebug("The folder is password protected', 'Using password: " + password)
self.html = self.load(pyfile.url, post={'mappa_pass': password}, decode=True)
if u'Hib\xe1s jelsz\xf3' in self.html: # Wrong password
self.fail("Incorrect password, please set right password on Add package form and retry")
@@ -35,7 +35,7 @@ class DataHuFolder(SimpleCrypter):
package_name, folder_name = self.getPackageNameAndFolder()
package_links = re.findall(self.LINK_PATTERN, self.html)
- self.logDebug('Package has %d links' % len(package_links))
+ self.logDebug("Package has %d links" % len(package_links))
if package_links:
self.packages = [(package_name, package_links, folder_name)]
diff --git a/pyload/plugins/crypter/DuckCryptInfo.py b/pyload/plugins/crypter/DuckCryptInfo.py
index 63340f2ed..6c720297d 100644
--- a/pyload/plugins/crypter/DuckCryptInfo.py
+++ b/pyload/plugins/crypter/DuckCryptInfo.py
@@ -56,4 +56,4 @@ class DuckCryptInfo(Crypter):
soup = BeautifulSoup(src)
self.urls = [soup.find("iframe")['src']]
if not self.urls:
- self.logDebug('no links m - (Plugin out of date?)')
+ self.logDebug("No link found - (Plugin out of date?)")
diff --git a/pyload/plugins/crypter/EasybytezComFolder.py b/pyload/plugins/crypter/EasybytezComFolder.py
index 163f2bdf3..c9575db96 100644
--- a/pyload/plugins/crypter/EasybytezComFolder.py
+++ b/pyload/plugins/crypter/EasybytezComFolder.py
@@ -6,7 +6,7 @@ from pyload.plugins.internal.SimpleCrypter import SimpleCrypter
class EasybytezComFolder(SimpleCrypter):
__name__ = "EasybytezComFolder"
__type__ = "crypter"
- __version__ = "0.06"
+ __version__ = "0.07"
__pattern__ = r'http://(?:www\.)?easybytez\.com/users/(?P<ID>\d+/\d+)'
@@ -14,7 +14,10 @@ class EasybytezComFolder(SimpleCrypter):
__author_name__ = "stickell"
__author_mail__ = "l.stickell@yahoo.it"
+
URL_REPLACEMENTS = [(__pattern__, r"http://www.easybytez.com/users/\g<ID>?per_page=10000")]
LINK_PATTERN = r'<td><a href="(http://www\.easybytez\.com/\w+)" target="_blank">.+(?:</a>)?</td>'
TITLE_PATTERN = r'<Title>Files of \d+: (?P<title>.+) folder</Title>'
+
+ LOGIN_ACCOUNT = True
diff --git a/pyload/plugins/crypter/FilefactoryComFolder.py b/pyload/plugins/crypter/FilefactoryComFolder.py
index 6886fa5b1..c624b4fc5 100644
--- a/pyload/plugins/crypter/FilefactoryComFolder.py
+++ b/pyload/plugins/crypter/FilefactoryComFolder.py
@@ -18,7 +18,7 @@ class FilefactoryComFolder(SimpleCrypter):
TITLE_PATTERN = r'<h1>Files in <span>(?P<title>.+)</span></h1>'
PAGES_PATTERN = r'data-paginator-totalPages="(?P<pages>\d+)"'
- SH_COOKIES = [('.filefactory.com', 'locale', 'en_US.utf8')]
+ COOKIES = [('.filefactory.com', 'locale', 'en_US.utf8')]
def loadPage(self, page_n):
diff --git a/pyload/plugins/crypter/GooGl.py b/pyload/plugins/crypter/GooGl.py
index 93f3456cc..7ede17563 100644
--- a/pyload/plugins/crypter/GooGl.py
+++ b/pyload/plugins/crypter/GooGl.py
@@ -20,7 +20,7 @@ class GooGl(Crypter):
def decrypt(self, pyfile):
rep = self.load(self.API_URL, get={'shortUrl': pyfile.url})
- self.logDebug('JSON data: ' + rep)
+ self.logDebug("JSON data: " + rep)
rep = json_loads(rep)
if 'longUrl' in rep:
diff --git a/pyload/plugins/crypter/LinkSaveIn.py b/pyload/plugins/crypter/LinkSaveIn.py
index 4a56606c8..7aac1475b 100644
--- a/pyload/plugins/crypter/LinkSaveIn.py
+++ b/pyload/plugins/crypter/LinkSaveIn.py
@@ -136,7 +136,7 @@ class LinkSaveIn(Crypter):
package_links = []
self.logDebug("Search for Web links")
if not self.js:
- self.logDebug("no JS -> skip Web links")
+ self.logDebug("No JS -> skip Web links")
else:
#@TODO: Gather paginated web links
pattern = r'<a href="http://linksave\.in/(\w{43})"'
@@ -162,7 +162,7 @@ class LinkSaveIn(Crypter):
def handleContainer(self, type_):
package_links = []
type_ = type_.lower()
- self.logDebug('Seach for %s Container links' % type_.upper())
+ self.logDebug("Seach for %s Container links" % type_.upper())
if not type_.isalnum(): # check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric)
self.fail('unknown container type "%s" (this is probably a bug)' % type_)
pattern = r"\('%s_link'\).href=unescape\('(.*?\.%s)'\)" % (type_, type_)
@@ -177,7 +177,7 @@ class LinkSaveIn(Crypter):
package_links = []
self.logDebug("Search for CNL2 links")
if not self.js:
- self.logDebug("no JS -> skip CNL2 links")
+ self.logDebug("No JS -> skip CNL2 links")
elif 'cnl2_load' in self.html:
try:
(vcrypted, vjk) = self._getCipherParams()
diff --git a/pyload/plugins/crypter/LinkdecrypterCom.py b/pyload/plugins/crypter/LinkdecrypterCom.py
index b6ca2ec4f..a8429b579 100644
--- a/pyload/plugins/crypter/LinkdecrypterCom.py
+++ b/pyload/plugins/crypter/LinkdecrypterCom.py
@@ -43,7 +43,7 @@ class LinkdecrypterCom(Crypter):
if self.html.startswith('http://'):
return self.html.splitlines()
- self.logError('API', self.html)
+ self.logError("API", self.html)
if self.html == 'INTERRUPTION(PASSWORD)':
self.fail("No or incorrect password")
diff --git a/pyload/plugins/crypter/LixIn.py b/pyload/plugins/crypter/LixIn.py
index 1d812b0e3..5bfbd637e 100644
--- a/pyload/plugins/crypter/LixIn.py
+++ b/pyload/plugins/crypter/LixIn.py
@@ -42,12 +42,12 @@ class LixIn(Crypter):
for _ in xrange(5):
m = re.search(self.CAPTCHA_PATTERN, self.html)
if m:
- self.logDebug("trying captcha")
+ self.logDebug("Trying captcha")
captcharesult = self.decryptCaptcha("http://lix.in/" + m.group("image"))
self.html = self.req.load(url, decode=True,
post={"capt": captcharesult, "submit": "submit", "tiny": id})
else:
- self.logDebug("no captcha/captcha solved")
+ self.logDebug("No captcha/captcha solved")
else:
self.html = self.req.load(url, decode=True, post={"submit": "submit", "tiny": id})
diff --git a/pyload/plugins/crypter/MediafireComFolder.py b/pyload/plugins/crypter/MediafireComFolder.py
index 98c05f450..1035d68f7 100644
--- a/pyload/plugins/crypter/MediafireComFolder.py
+++ b/pyload/plugins/crypter/MediafireComFolder.py
@@ -23,7 +23,7 @@ class MediafireComFolder(Crypter):
def decrypt(self, pyfile):
url, result = checkHTMLHeader(pyfile.url)
- self.logDebug('Location (%d): %s' % (result, url))
+ self.logDebug("Location (%d): %s" % (result, url))
if result == 0:
# load and parse html
diff --git a/pyload/plugins/crypter/MultiuploadCom.py b/pyload/plugins/crypter/MultiuploadCom.py
index b1650b647..754247ec7 100644
--- a/pyload/plugins/crypter/MultiuploadCom.py
+++ b/pyload/plugins/crypter/MultiuploadCom.py
@@ -1,64 +1,15 @@
# -*- coding: utf-8 -*-
-import re
-from time import time
+from pyload.plugins.internal.DeadCrypter import DeadCrypter
-from pyload.plugins.Crypter import Crypter
-from pyload.utils import json_loads
-
-class MultiuploadCom(Crypter):
+class MultiuploadCom(DeadCrypter):
__name__ = "MultiuploadCom"
__type__ = "crypter"
- __version__ = "0.01"
+ __version__ = "0.02"
- __pattern__ = r'http://(?:www\.)?multiupload.com/(\w+)'
- __config__ = [("preferedHoster", "str", "Prefered hoster list (bar-separated) ", "multiupload"),
- ("ignoredHoster", "str", "Ignored hoster list (bar-separated) ", "")]
+ __pattern__ = r'http://(?:www\.)?multiupload\.(com|nl)/\w+'
- __description__ = """MultiUpload.com decrypter plugin"""
+ __description__ = """ MultiUpload.com decrypter plugin """
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
-
- ML_LINK_PATTERN = r'<div id="downloadbutton_" style=""><a href="([^"]+)"'
-
-
- def decrypt(self, pyfile):
- self.html = self.load(pyfile.url)
- m = re.search(self.ML_LINK_PATTERN, self.html)
- ml_url = m.group(1) if m else None
-
- json_list = json_loads(self.load("http://multiupload.com/progress/", get={
- "d": re.match(self.__pattern__, pyfile.url).group(1),
- "r": str(int(time() * 1000))
- }))
-
- prefered_set = map(lambda s: s.lower().split('.')[0], set(self.getConfig("preferedHoster").split('|')))
-
- if ml_url and 'multiupload' in prefered_set:
- self.urls.append(ml_url)
-
- for link in json_list:
- if link['service'].lower() in prefered_set and int(link['status']) and not int(link['deleted']):
- url = self.getLocation(link['url'])
- if url:
- self.urls.append(url)
-
- if not self.urls:
- ignored_set = map(lambda s: s.lower().split('.')[0], set(self.getConfig("ignoredHoster").split('|')))
-
- if 'multiupload' not in ignored_set:
- self.urls.append(ml_url)
-
- for link in json_list:
- if link['service'].lower() not in ignored_set and int(link['status']) and not int(link['deleted']):
- url = self.getLocation(link['url'])
- if url:
- self.urls.append(url)
-
- if not self.urls:
- self.fail('Could not extract any links')
-
- def getLocation(self, url):
- header = self.load(url, just_header=True)
- return header['location'] if "location" in header else None
diff --git a/pyload/plugins/crypter/NCryptIn.py b/pyload/plugins/crypter/NCryptIn.py
index 70c541d02..0e3043290 100644
--- a/pyload/plugins/crypter/NCryptIn.py
+++ b/pyload/plugins/crypter/NCryptIn.py
@@ -156,7 +156,7 @@ class NCryptIn(Crypter):
self.logDebug("CircleCaptcha protected")
captcha_img_url = "http://ncrypt.in/classes/captcha/circlecaptcha.php"
coords = self.decryptCaptcha(captcha_img_url, forceUser=True, imgtype="png", result_type='positional')
- self.logDebug("Captcha resolved, coords [%s]" % str(coords))
+ self.logDebug("Captcha resolved, coords [%s]" % coords)
postData['circle.x'] = coords[0]
postData['circle.y'] = coords[1]
diff --git a/pyload/plugins/crypter/RelinkUs.py b/pyload/plugins/crypter/RelinkUs.py
index 74228d41a..5a56edc4d 100644
--- a/pyload/plugins/crypter/RelinkUs.py
+++ b/pyload/plugins/crypter/RelinkUs.py
@@ -125,7 +125,7 @@ class RelinkUs(Crypter):
self.logDebug("Request user positional captcha resolving")
captcha_img_url = self.CAPTCHA_IMG_URL + "?id=%s" % self.fileid
coords = self.decryptCaptcha(captcha_img_url, forceUser=True, imgtype="png", result_type='positional')
- self.logDebug("Captcha resolved, coords [%s]" % str(coords))
+ self.logDebug("Captcha resolved, coords [%s]" % coords)
captcha_post_url = self.CAPTCHA_SUBMIT_URL + "?id=%s" % self.fileid
captcha_post_data = {'button.x': coords[0], 'button.y': coords[1], 'captcha': 'submit'}
self.html = self.load(captcha_post_url, post=captcha_post_data, decode=True)
@@ -189,7 +189,7 @@ class RelinkUs(Crypter):
return package_links
def handleDLCLinks(self):
- self.logDebug('Search for DLC links')
+ self.logDebug("Search for DLC links")
package_links = []
m = re.search(self.DLC_LINK_REGEX, self.html)
if m is not None:
diff --git a/pyload/plugins/crypter/SerienjunkiesOrg.py b/pyload/plugins/crypter/SerienjunkiesOrg.py
deleted file mode 100644
index 713086cb9..000000000
--- a/pyload/plugins/crypter/SerienjunkiesOrg.py
+++ /dev/null
@@ -1,324 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import random
-import re
-
-from time import sleep
-
-from BeautifulSoup import BeautifulSoup
-
-from pyload.plugins.Crypter import Crypter
-from pyload.utils import html_unescape
-
-
-class SerienjunkiesOrg(Crypter):
- __name__ = "SerienjunkiesOrg"
- __type__ = "crypter"
- __version__ = "0.39"
-
- __pattern__ = r'http://(?:www\.)?(serienjunkies.org|dokujunkies.org)/.*?'
- __config__ = [("changeNameSJ", "Packagename;Show;Season;Format;Episode", "Take SJ.org name", "Show"),
- ("changeNameDJ", "Packagename;Show;Format;Episode", "Take DJ.org name", "Show"),
- ("randomPreferred", "bool", "Randomize Preferred-List", False),
- ("hosterListMode", "OnlyOne;OnlyPreferred(One);OnlyPreferred(All);All",
- "Use for hosters (if supported)", "All"),
- ("hosterList", "str", "Preferred Hoster list (comma separated)",
- "RapidshareCom,UploadedTo,NetloadIn,FilefactoryCom,FreakshareNet,FilebaseTo,HotfileCom,DepositfilesCom,EasyshareCom,KickloadCom"),
- ("ignoreList", "str", "Ignored Hoster list (comma separated)", "MegauploadCom")]
-
- __description__ = """Serienjunkies.org decrypter plugin"""
- __author_name__ = ("mkaay", "godofdream")
- __author_mail__ = ("mkaay@mkaay.de", "soilfiction@gmail.com")
-
-
- def setup(self):
- self.multiDL = False
-
- def getSJSrc(self, url):
- src = self.req.load(str(url))
- if "This website is not available in your country" in src:
- self.fail("Not available in your country")
- if not src.find("Enter Serienjunkies") == -1:
- sleep(1)
- src = self.req.load(str(url))
- return src
-
- def handleShow(self, url):
- src = self.getSJSrc(url)
- soup = BeautifulSoup(src)
- packageName = self.pyfile.package().name
- if self.getConfig("changeNameSJ") == "Show":
- found = html_unescape(soup.find("h2").find("a").string.split(' &#8211;')[0])
- if found:
- packageName = found
-
- nav = soup.find("div", attrs={"id": "scb"})
-
- package_links = []
- for a in nav.findAll("a"):
- if self.getConfig("changeNameSJ") == "Show":
- package_links.append(a['href'])
- else:
- package_links.append(a['href'] + "#hasName")
- if self.getConfig("changeNameSJ") == "Show":
- self.packages.append((packageName, package_links, packageName))
- else:
- self.core.files.addLinks(package_links, self.pyfile.package().id)
-
- def handleSeason(self, url):
- src = self.getSJSrc(url)
- soup = BeautifulSoup(src)
- post = soup.find("div", attrs={"class": "post-content"})
- ps = post.findAll("p")
-
- seasonName = html_unescape(soup.find("a", attrs={"rel": "bookmark"}).string).replace("&#8211;", "-")
- groups = {}
- gid = -1
- for p in ps:
- if re.search("<strong>Sprache|<strong>Format", str(p)):
- var = p.findAll("strong")
- opts = {"Sprache": "", "Format": ""}
- for v in var:
- n = html_unescape(v.string).strip()
- n = re.sub(r"^([:]?)(.*?)([:]?)$", r'\2', n)
- if n.strip() not in opts:
- continue
- val = v.nextSibling
- if not val:
- continue
- val = val.replace("|", "").strip()
- val = re.sub(r"^([:]?)(.*?)([:]?)$", r'\2', val)
- opts[n.strip()] = val.strip()
- gid += 1
- groups[gid] = {}
- groups[gid]['ep'] = {}
- groups[gid]['opts'] = opts
- elif re.search("<strong>Download:", str(p)):
- parts = str(p).split("<br />")
- if re.search("<strong>", parts[0]):
- ename = re.search('<strong>(.*?)</strong>', parts[0]).group(1).strip().decode("utf-8").replace(
- "&#8211;", "-")
- groups[gid]['ep'][ename] = {}
- parts.remove(parts[0])
- for part in parts:
- hostername = re.search(r" \| ([-a-zA-Z0-9]+\.\w+)", part)
- if hostername:
- hostername = hostername.group(1)
- groups[gid]['ep'][ename][hostername] = []
- links = re.findall('href="(.*?)"', part)
- for link in links:
- groups[gid]['ep'][ename][hostername].append(link + "#hasName")
-
- links = []
- for g in groups.values():
- for ename in g['ep']:
- links.extend(self.getpreferred(g['ep'][ename]))
- if self.getConfig("changeNameSJ") == "Episode":
- self.packages.append((ename, links, ename))
- links = []
- package = "%s (%s, %s)" % (seasonName, g['opts']['Format'], g['opts']['Sprache'])
- if self.getConfig("changeNameSJ") == "Format":
- self.packages.append((package, links, package))
- links = []
- if (self.getConfig("changeNameSJ") == "Packagename") or re.search("#hasName", url):
- self.core.files.addLinks(links, self.pyfile.package().id)
- elif (self.getConfig("changeNameSJ") == "Season") or not re.search("#hasName", url):
- self.packages.append((seasonName, links, seasonName))
-
- def handleEpisode(self, url):
- src = self.getSJSrc(url)
- if not src.find(
- "Du hast das Download-Limit &uuml;berschritten! Bitte versuche es sp&auml;ter nocheinmal.") == -1:
- self.fail(_("Downloadlimit reached"))
- else:
- soup = BeautifulSoup(src)
- form = soup.find("form")
- h1 = soup.find("h1")
-
- if h1.get("class") == "wrap":
- captchaTag = soup.find(attrs={"src": re.compile("^/secure/")})
- if not captchaTag:
- sleep(5)
- self.retry()
-
- captchaUrl = "http://download.serienjunkies.org" + captchaTag['src']
- result = self.decryptCaptcha(str(captchaUrl), imgtype="png")
- sinp = form.find(attrs={"name": "s"})
-
- self.req.lastURL = str(url)
- sj = self.load(str(url), post={'s': sinp['value'], 'c': result, 'action': "Download"})
-
- soup = BeautifulSoup(sj)
- rawLinks = soup.findAll(attrs={"action": re.compile("^http://download.serienjunkies.org/")})
-
- if not len(rawLinks) > 0:
- sleep(1)
- self.retry()
- return
-
- self.correctCaptcha()
-
- links = []
- for link in rawLinks:
- frameUrl = link['action'].replace("/go-", "/frame/go-")
- links.append(self.handleFrame(frameUrl))
- if re.search("#hasName", url) or ((self.getConfig("changeNameSJ") == "Packagename") and
- (self.getConfig("changeNameDJ") == "Packagename")):
- self.core.files.addLinks(links, self.pyfile.package().id)
- else:
- if h1.text[2] == "_":
- eName = h1.text[3:]
- else:
- eName = h1.text
- self.packages.append((eName, links, eName))
-
- def handleOldStyleLink(self, url):
- sj = self.req.load(str(url))
- soup = BeautifulSoup(sj)
- form = soup.find("form", attrs={"action": re.compile("^http://serienjunkies.org")})
- captchaTag = form.find(attrs={"src": re.compile("^/safe/secure/")})
- captchaUrl = "http://serienjunkies.org" + captchaTag['src']
- result = self.decryptCaptcha(str(captchaUrl))
- url = form['action']
- sinp = form.find(attrs={"name": "s"})
-
- self.req.load(str(url), post={'s': sinp['value'], 'c': result, 'dl.start': "Download"}, cookies=False,
- just_header=True)
- decrypted = self.req.lastEffectiveURL
- if decrypted == str(url):
- self.retry()
- self.core.files.addLinks([decrypted], self.pyfile.package().id)
-
- def handleFrame(self, url):
- self.req.load(str(url))
- return self.req.lastEffectiveURL
-
- def handleShowDJ(self, url):
- src = self.getSJSrc(url)
- soup = BeautifulSoup(src)
- post = soup.find("div", attrs={"id": "page_post"})
- ps = post.findAll("p")
- found = html_unescape(soup.find("h2").find("a").string.split(' &#8211;')[0])
- if found:
- seasonName = found
-
- groups = {}
- gid = -1
- for p in ps:
- if re.search("<strong>Sprache|<strong>Format", str(p)):
- var = p.findAll("strong")
- opts = {"Sprache": "", "Format": ""}
- for v in var:
- n = html_unescape(v.string).strip()
- n = re.sub(r"^([:]?)(.*?)([:]?)$", r'\2', n)
- if n.strip() not in opts:
- continue
- val = v.nextSibling
- if not val:
- continue
- val = val.replace("|", "").strip()
- val = re.sub(r"^([:]?)(.*?)([:]?)$", r'\2', val)
- opts[n.strip()] = val.strip()
- gid += 1
- groups[gid] = {}
- groups[gid]['ep'] = {}
- groups[gid]['opts'] = opts
- elif re.search("<strong>Download:", str(p)):
- parts = str(p).split("<br />")
- if re.search("<strong>", parts[0]):
- ename = re.search('<strong>(.*?)</strong>', parts[0]).group(1).strip().decode("utf-8").replace(
- "&#8211;", "-")
- groups[gid]['ep'][ename] = {}
- parts.remove(parts[0])
- for part in parts:
- hostername = re.search(r" \| ([-a-zA-Z0-9]+\.\w+)", part)
- if hostername:
- hostername = hostername.group(1)
- groups[gid]['ep'][ename][hostername] = []
- links = re.findall('href="(.*?)"', part)
- for link in links:
- groups[gid]['ep'][ename][hostername].append(link + "#hasName")
-
- links = []
- for g in groups.values():
- for ename in g['ep']:
- links.extend(self.getpreferred(g['ep'][ename]))
- if self.getConfig("changeNameDJ") == "Episode":
- self.packages.append((ename, links, ename))
- links = []
- package = "%s (%s, %s)" % (seasonName, g['opts']['Format'], g['opts']['Sprache'])
- if self.getConfig("changeNameDJ") == "Format":
- self.packages.append((package, links, package))
- links = []
- if (self.getConfig("changeNameDJ") == "Packagename") or re.search("#hasName", url):
- self.core.files.addLinks(links, self.pyfile.package().id)
- elif (self.getConfig("changeNameDJ") == "Show") or not re.search("#hasName", url):
- self.packages.append((seasonName, links, seasonName))
-
- def handleCategoryDJ(self, url):
- package_links = []
- src = self.getSJSrc(url)
- soup = BeautifulSoup(src)
- content = soup.find("div", attrs={"id": "content"})
- for a in content.findAll("a", attrs={"rel": "bookmark"}):
- package_links.append(a['href'])
- self.core.files.addLinks(package_links, self.pyfile.package().id)
-
- def decrypt(self, pyfile):
- showPattern = re.compile("^http://serienjunkies.org/serie/(.*)/$")
- seasonPattern = re.compile("^http://serienjunkies.org/.*?/(.*)/$")
- episodePattern = re.compile("^http://download.serienjunkies.org/f-.*?.html(#hasName)?$")
- oldStyleLink = re.compile("^http://serienjunkies.org/safe/(.*)$")
- categoryPatternDJ = re.compile("^http://dokujunkies.org/.*?(.*)$")
- showPatternDJ = re.compile(r"^http://dokujunkies.org/.*?/(.*)\.html(#hasName)?$")
- framePattern = re.compile("^http://download.(serienjunkies.org|dokujunkies.org)/frame/go-.*?/$")
- url = pyfile.url
- if framePattern.match(url):
- self.packages.append((pyfile.package().name, [self.handleFrame(url)], pyfile.package().name))
- elif episodePattern.match(url):
- self.handleEpisode(url)
- elif oldStyleLink.match(url):
- self.handleOldStyleLink(url)
- elif showPattern.match(url):
- self.handleShow(url)
- elif showPatternDJ.match(url):
- self.handleShowDJ(url)
- elif seasonPattern.match(url):
- self.handleSeason(url)
- elif categoryPatternDJ.match(url):
- self.handleCategoryDJ(url)
-
- #selects the preferred hoster, after that selects any hoster (ignoring the one to ignore)
- def getpreferred(self, hosterlist):
-
- result = []
- preferredList = self.getConfig("hosterList").strip().lower().replace(
- '|', ',').replace('.', '').replace(';', ',').split(',')
- if (self.getConfig("randomPreferred") is True) and (
- self.getConfig("hosterListMode") in ["OnlyOne", "OnlyPreferred(One)"]):
- random.shuffle(preferredList)
- # we don't want hosters be read two times
- hosterlist2 = hosterlist.copy()
-
- for preferred in preferredList:
- for Hoster in hosterlist:
- if preferred == Hoster.lower().replace('.', ''):
- for Part in hosterlist[Hoster]:
- self.logDebug("selected " + Part)
- result.append(str(Part))
- del (hosterlist2[Hoster])
- if self.getConfig("hosterListMode") in ["OnlyOne", "OnlyPreferred(One)"]:
- return result
-
- ignorelist = self.getConfig("ignoreList").strip().lower().replace(
- '|', ',').replace('.', '').replace(';', ',').split(',')
- if self.getConfig('hosterListMode') in ["OnlyOne", "All"]:
- for Hoster in hosterlist2:
- if Hoster.strip().lower().replace('.', '') not in ignorelist:
- for Part in hosterlist2[Hoster]:
- self.logDebug("selected2 " + Part)
- result.append(str(Part))
-
- if self.getConfig('hosterListMode') == "OnlyOne":
- return result
- return result
diff --git a/pyload/plugins/crypter/ShareLinksBiz.py b/pyload/plugins/crypter/ShareLinksBiz.py
index 132d2160b..94e144e74 100644
--- a/pyload/plugins/crypter/ShareLinksBiz.py
+++ b/pyload/plugins/crypter/ShareLinksBiz.py
@@ -109,7 +109,7 @@ class ShareLinksBiz(Crypter):
captchaUrl = self.baseUrl + '/captcha.gif?d=%s&PHPSESSID=%s' % (m.group(1), m.group(2))
self.logDebug("Waiting user for correct position")
coords = self.decryptCaptcha(captchaUrl, forceUser=True, imgtype="gif", result_type='positional')
- self.logDebug("Captcha resolved, coords [%s]" % str(coords))
+ self.logDebug("Captcha resolved, coords [%s]" % coords)
# Resolve captcha
href = self._resolveCoords(coords, captchaMap)
diff --git a/pyload/plugins/crypter/UploadedToFolder.py b/pyload/plugins/crypter/UploadedToFolder.py
index 31977409d..5ba34d8b5 100644
--- a/pyload/plugins/crypter/UploadedToFolder.py
+++ b/pyload/plugins/crypter/UploadedToFolder.py
@@ -33,6 +33,6 @@ class UploadedToFolder(SimpleCrypter):
self.html = self.load(plain_link)
package_links = self.html.split('\n')[:-1]
- self.logDebug('Package has %d links' % len(package_links))
+ self.logDebug("Package has %d links" % len(package_links))
self.packages = [(package_name, package_links, folder_name)]
diff --git a/pyload/plugins/hooks/BypassCaptcha.py b/pyload/plugins/hooks/BypassCaptcha.py
index 9558ba4c4..0f16d0b06 100644
--- a/pyload/plugins/hooks/BypassCaptcha.py
+++ b/pyload/plugins/hooks/BypassCaptcha.py
@@ -74,7 +74,7 @@ class BypassCaptcha(Hook):
result = data['Value']
ticket = data['TaskId']
- self.logDebug("result %s : %s" % (ticket, result))
+ self.logDebug("Result %s : %s" % (ticket, result))
return ticket, result
@@ -83,7 +83,7 @@ class BypassCaptcha(Hook):
response = getURL(self.RESPOND_URL, post={"task_id": ticket, "key": self.getConfig("passkey"),
"cv": 1 if success else 0})
except BadHeader, e:
- self.logError("Could not send response.", str(e))
+ self.logError(_("Could not send response."), e
def newCaptchaTask(self, task):
if "service" in task.data:
@@ -105,7 +105,7 @@ class BypassCaptcha(Hook):
start_new_thread(self.processCaptcha, (task,))
else:
- self.logInfo("Your %s account has not enough credits" % self.__name__)
+ self.logInfo(_("Your %s account has not enough credits") % self.__name__)
def captchaCorrect(self, task):
if task.data['service'] == self.__name__ and "ticket" in task.data:
diff --git a/pyload/plugins/hooks/Captcha9kw.py b/pyload/plugins/hooks/Captcha9kw.py
index fcb5dd7c1..f8de28710 100644
--- a/pyload/plugins/hooks/Captcha9kw.py
+++ b/pyload/plugins/hooks/Captcha9kw.py
@@ -58,7 +58,7 @@ class Captcha9kw(Hook):
with open(task.captchaFile, 'rb') as f:
data = f.read()
data = b64encode(data)
- self.logDebug("%s : %s" % (task.captchaFile, data))
+ self.logDebug(task.captchaFile, data)
if task.isPositional():
mouse = 1
else:
@@ -93,10 +93,10 @@ class Captcha9kw(Hook):
result = response2
task.data['ticket'] = response
- self.logInfo("result %s : %s" % (response, result))
+ self.logInfo(_("Result %s : %s") % (response, result))
task.setResult(result)
else:
- self.logError("Bad upload: %s" % response)
+ self.logError(_("Bad upload"), response)
return False
def newCaptchaTask(self, task):
@@ -129,12 +129,12 @@ class Captcha9kw(Hook):
"pyload": "1",
"source": "pyload",
"id": task.data['ticket']})
- self.logInfo("Request correct: %s" % response)
+ self.logInfo(_("Request correct", response)
except BadHeader, e:
- self.logError("Could not send correct request.", str(e))
+ self.logError(_("Could not send correct request."), e)
else:
- self.logError("No CaptchaID for correct request (task %s) found." % task)
+ self.logError(_("No CaptchaID for correct request (task %s) found.") % task)
def captchaInvalid(self, task):
if "ticket" in task.data:
@@ -148,9 +148,9 @@ class Captcha9kw(Hook):
"pyload": "1",
"source": "pyload",
"id": task.data['ticket']})
- self.logInfo("Request refund: %s" % response)
+ self.logInfo(_("Request refund", response)
except BadHeader, e:
- self.logError("Could not send refund request.", str(e))
+ self.logError(_("Could not send refund request."), e)
else:
- self.logError("No CaptchaID for not correct request (task %s) found." % task)
+ self.logError(_("No CaptchaID for not correct request (task %s) found.") % task)
diff --git a/pyload/plugins/hooks/CaptchaBrotherhood.py b/pyload/plugins/hooks/CaptchaBrotherhood.py
index 81325be92..478a08cc5 100644
--- a/pyload/plugins/hooks/CaptchaBrotherhood.py
+++ b/pyload/plugins/hooks/CaptchaBrotherhood.py
@@ -139,7 +139,7 @@ class CaptchaBrotherhood(Hook):
task.setWaiting(100)
start_new_thread(self.processCaptcha, (task,))
else:
- self.logInfo("Your CaptchaBrotherhood Account has not enough credits")
+ self.logInfo(_("Your CaptchaBrotherhood Account has not enough credits"))
def captchaInvalid(self, task):
if task.data['service'] == self.__name__ and "ticket" in task.data:
diff --git a/pyload/plugins/hooks/Checksum.py b/pyload/plugins/hooks/Checksum.py
index 75ebcdc4c..31d0cbf8c 100644
--- a/pyload/plugins/hooks/Checksum.py
+++ b/pyload/plugins/hooks/Checksum.py
@@ -62,7 +62,7 @@ class Checksum(Hook):
def coreReady(self):
if not self.getConfig("check_checksum"):
- self.logInfo("Checksum validation is disabled in plugin configuration")
+ self.logInfo(_("Checksum validation is disabled in plugin configuration"))
def setup(self):
self.algorithms = sorted(
@@ -101,7 +101,7 @@ class Checksum(Hook):
api_size = int(data['size'])
file_size = getsize(local_file)
if api_size != file_size:
- self.logWarning("File %s has incorrect size: %d B (%d expected)" % (pyfile.name, file_size, api_size))
+ self.logWarning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size))
self.checkFailed(pyfile, local_file, "Incorrect file size")
del data['size']
@@ -115,17 +115,17 @@ class Checksum(Hook):
checksum = computeChecksum(local_file, key.replace("-", "").lower())
if checksum:
if checksum == data[key].lower():
- self.logInfo('File integrity of "%s" verified by %s checksum (%s).' %
+ self.logInfo(_('File integrity of "%s" verified by %s checksum (%s).') %
(pyfile.name, key.upper(), checksum))
break
else:
- self.logWarning("%s checksum for file %s does not match (%s != %s)" %
+ self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
(key.upper(), pyfile.name, checksum, data[key]))
self.checkFailed(pyfile, local_file, "Checksums do not match")
else:
- self.logWarning("Unsupported hashing algorithm: %s" % key.upper())
+ self.logWarning(_("Unsupported hashing algorithm"), key.upper())
else:
- self.logWarning("Unable to validate checksum for file %s" % pyfile.name)
+ self.logWarning(_("Unable to validate checksum for file"), pyfile.name)
def checkFailed(self, pyfile, local_file, msg):
check_action = self.getConfig("check_action")
@@ -147,14 +147,13 @@ class Checksum(Hook):
for link in pypack.getChildren().itervalues():
file_type = splitext(link['name'])[1][1:].lower()
- #self.logDebug(link, file_type)
if file_type not in self.formats:
continue
hash_file = fs_encode(safe_join(download_folder, link['name']))
if not isfile(hash_file):
- self.logWarning("File not found: %s" % link['name'])
+ self.logWarning(_("File not found"), link['name'])
continue
with open(hash_file) as f:
@@ -168,8 +167,8 @@ class Checksum(Hook):
algorithm = self.methods.get(file_type, file_type)
checksum = computeChecksum(local_file, algorithm)
if checksum == data['hash']:
- self.logInfo('File integrity of "%s" verified by %s checksum (%s).' %
+ self.logInfo(_('File integrity of "%s" verified by %s checksum (%s).') %
(data['name'], algorithm, checksum))
else:
- self.logWarning("%s checksum for file %s does not match (%s != %s)" %
+ self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
(algorithm, data['name'], checksum, data['hash']))
diff --git a/pyload/plugins/hooks/ClickAndLoad.py b/pyload/plugins/hooks/ClickAndLoad.py
index 47163ceef..501845840 100644
--- a/pyload/plugins/hooks/ClickAndLoad.py
+++ b/pyload/plugins/hooks/ClickAndLoad.py
@@ -30,7 +30,7 @@ class ClickAndLoad(Hook):
thread.start_new_thread(proxy, (self, ip, self.port, 9666))
except:
- self.logError("ClickAndLoad port already in use.")
+ self.logError(_("ClickAndLoad port already in use"))
def proxy(self, *settings):
diff --git a/pyload/plugins/hooks/DeathByCaptcha.py b/pyload/plugins/hooks/DeathByCaptcha.py
index 6db91b8c1..f2bae4848 100644
--- a/pyload/plugins/hooks/DeathByCaptcha.py
+++ b/pyload/plugins/hooks/DeathByCaptcha.py
@@ -146,7 +146,7 @@ class DeathByCaptcha(Hook):
raise DeathByCaptchaException('timed-out')
result = response['text']
- self.logDebug("result %s : %s" % (ticket, result))
+ self.logDebug("Result %s : %s" % (ticket, result))
return ticket, result
@@ -171,8 +171,9 @@ class DeathByCaptcha(Hook):
return False
balance, rate = self.info['balance'], self.info['rate']
- self.logInfo("Account balance: US$%.3f (%d captchas left at %.2f cents each)" % (balance / 100,
- balance // rate, rate))
+ self.logInfo(_("Account balance"),
+ _("US$%.3f (%d captchas left at %.2f cents each)") % (balance / 100,
+ balance // rate, rate))
if balance > rate:
task.handler.append(self)
diff --git a/pyload/plugins/hooks/DeleteFinished.py b/pyload/plugins/hooks/DeleteFinished.py
index 99aa040bf..4b22c7fed 100644
--- a/pyload/plugins/hooks/DeleteFinished.py
+++ b/pyload/plugins/hooks/DeleteFinished.py
@@ -23,8 +23,8 @@ class DeleteFinished(Hook):
if not self.info['sleep']:
deloffline = self.getConfig('deloffline')
mode = '0,1,4' if deloffline else '0,4'
- msg = 'delete all finished packages in queue list (%s packages with offline links)'
- self.logInfo(msg % ('including' if deloffline else 'excluding'))
+ msg = _('delete all finished packages in queue list (%s packages with offline links)')
+ self.logInfo(msg % (_('including') if deloffline else _('excluding')))
self.deleteFinished(mode)
self.info['sleep'] = True
self.addEvent('packageFinished', self.wakeup)
@@ -58,7 +58,7 @@ class DeleteFinished(Hook):
"""Adds an event listener for event name"""
if event in self.m.events:
if func in self.m.events[event]:
- self.logDebug('Function already registered %s' % func)
+ self.logDebug("Function already registered", func)
else:
self.m.events[event].append(func)
else:
diff --git a/pyload/plugins/hooks/DownloadScheduler.py b/pyload/plugins/hooks/DownloadScheduler.py
index fc2e10aac..c5caee35d 100644
--- a/pyload/plugins/hooks/DownloadScheduler.py
+++ b/pyload/plugins/hooks/DownloadScheduler.py
@@ -35,7 +35,7 @@ class DownloadScheduler(Hook):
schedule = re.findall("(\d{1,2}):(\d{2})[\s]*(-?\d+)",
schedule.lower().replace("full", "-1").replace("none", "0"))
if not schedule:
- self.logError("Invalid schedule")
+ self.logError(_("Invalid schedule"))
return
t0 = localtime()
@@ -58,7 +58,7 @@ class DownloadScheduler(Hook):
def setDownloadSpeed(self, speed):
if speed == 0:
abort = self.getConfig("abort")
- self.logInfo("Stopping download server. (Running downloads will %sbe aborted.)" % ('' if abort else 'not '))
+ self.logInfo(_("Stopping download server. (Running downloads will %sbe aborted.)") % '' if abort else _('not '))
self.core.api.pauseServer()
if abort:
self.core.api.stopAllDownloads()
@@ -66,10 +66,10 @@ class DownloadScheduler(Hook):
self.core.api.unpauseServer()
if speed > 0:
- self.logInfo("Setting download speed to %d kB/s" % speed)
+ self.logInfo(_("Setting download speed to %d kB/s") % speed)
self.core.api.setConfigValue("download", "limit_speed", 1)
self.core.api.setConfigValue("download", "max_speed", speed)
else:
- self.logInfo("Setting download speed to FULL")
+ self.logInfo(_("Setting download speed to FULL"))
self.core.api.setConfigValue("download", "limit_speed", 0)
self.core.api.setConfigValue("download", "max_speed", -1)
diff --git a/pyload/plugins/hooks/EasybytezCom.py b/pyload/plugins/hooks/EasybytezCom.py
index 1ec8a98f1..9d1cdc0db 100644
--- a/pyload/plugins/hooks/EasybytezCom.py
+++ b/pyload/plugins/hooks/EasybytezCom.py
@@ -31,7 +31,7 @@ class EasybytezCom(MultiHoster):
return m.group(1).split(',')
except Exception, e:
self.logDebug(e)
- self.logWarning("Unable to load supported hoster list, using last known")
+ self.logWarning(_("Unable to load supported hoster list, using last known"))
return ["bitshare.com", "crocko.com", "ddlstorage.com", "depositfiles.com", "extabit.com", "hotfile.com",
"mediafire.com", "netload.in", "rapidgator.net", "rapidshare.com", "uploading.com", "uload.to",
"uploaded.to"]
diff --git a/pyload/plugins/hooks/Ev0InFetcher.py b/pyload/plugins/hooks/Ev0InFetcher.py
index c3def8add..cd7314fc9 100644
--- a/pyload/plugins/hooks/Ev0InFetcher.py
+++ b/pyload/plugins/hooks/Ev0InFetcher.py
@@ -67,15 +67,14 @@ class Ev0InFetcher(Hook):
if show.lower() in normalizefiletitle(item['title']) and lastfound < int(mktime(item.date_parsed)):
links = self.filterLinks(item['description'].split("<br />"))
packagename = item['title'].encode("utf-8")
- self.logInfo("Ev0InFetcher: new episode '%s' (matched '%s')" % (packagename, show))
+ self.logInfo(_("New episode '%s' (matched '%s')") % (packagename, show))
self.core.api.addPackage(packagename, links, 1 if self.getConfig("queue") else 0)
self.setStorage("show_%s_lastfound" % show, int(mktime(item.date_parsed)))
found = True
if not found:
- #self.logDebug("Ev0InFetcher: no new episodes found")
pass
for show, lastfound in self.getStorage().iteritems():
if int(lastfound) > 0 and int(lastfound) + (3600 * 24 * 30) < int(time()):
self.delStorage("show_%s_lastfound" % show)
- self.logDebug("Ev0InFetcher: cleaned '%s' record" % show)
+ self.logDebug("Cleaned '%s' record" % show)
diff --git a/pyload/plugins/hooks/ExpertDecoders.py b/pyload/plugins/hooks/ExpertDecoders.py
index ef5409b76..292c84b7c 100644
--- a/pyload/plugins/hooks/ExpertDecoders.py
+++ b/pyload/plugins/hooks/ExpertDecoders.py
@@ -49,7 +49,6 @@ class ExpertDecoders(Hook):
with open(task.captchaFile, 'rb') as f:
data = f.read()
data = b64encode(data)
- #self.logDebug("%s: %s : %s" % (ticket, task.captchaFile, data))
req = getRequest()
#raise timeout threshold
@@ -61,7 +60,7 @@ class ExpertDecoders(Hook):
finally:
req.close()
- self.logDebug("result %s : %s" % (ticket, result))
+ self.logDebug("Result %s : %s" % (ticket, result))
task.setResult(result)
def newCaptchaTask(self, task):
@@ -88,7 +87,7 @@ class ExpertDecoders(Hook):
try:
response = getURL(self.API_URL, post={"action": "refund", "key": self.getConfig("passkey"),
"gen_task_id": task.data['ticket']})
- self.logInfo("Request refund: %s" % response)
+ self.logInfo(_("Request refund"), response)
except BadHeader, e:
- self.logError("Could not send refund request.", str(e))
+ self.logError(_("Could not send refund request."), e)
diff --git a/pyload/plugins/hooks/ExternalScripts.py b/pyload/plugins/hooks/ExternalScripts.py
index 372035e82..2e8dace14 100644
--- a/pyload/plugins/hooks/ExternalScripts.py
+++ b/pyload/plugins/hooks/ExternalScripts.py
@@ -2,6 +2,7 @@
import subprocess
+from itertools import chain
from os import listdir, access, X_OK, makedirs
from os.path import join, exists, basename, abspath
@@ -12,23 +13,27 @@ from pyload.utils import safe_join
class ExternalScripts(Hook):
__name__ = "ExternalScripts"
__type__ = "hook"
- __version__ = "0.23"
+ __version__ = "0.24"
__config__ = [("activated", "bool", "Activated", True)]
__description__ = """Run external scripts"""
- __author_name__ = ("mkaay", "RaNaN", "spoob")
- __author_mail__ = ("mkaay@mkaay.de", "ranan@pyload.org", "spoob@pyload.org")
+ __author_name__ = ("mkaay", "RaNaN", "spoob", "Walter Purcaro")
+ __author_mail__ = ("mkaay@mkaay.de", "ranan@pyload.org", "spoob@pyload.org", "vuolter@gmail.com")
- event_list = ["unrarFinished", "allDownloadsFinished", "allDownloadsProcessed"]
+ event_list = ["archive_extracted", "package_extracted", "all_archives_extracted", "all_archives_processed",
+ "allDownloadsFinished", "allDownloadsProcessed"]
def setup(self):
self.scripts = {}
- folders = ["download_preparing", "download_finished", "package_finished",
- "before_reconnect", "after_reconnect", "unrar_finished",
- "all_dls_finished", "all_dls_processed"]
+ folders = ["download_preparing", "download_finished", "all_downloads_finished", "all_downloads_processed",
+ "before_reconnect", "after_reconnect",
+ "package_finished", "package_extracted",
+ "archive_extracted", "all_archives_extracted", "all_archives_processed",
+ # deprecated folders
+ "unrar_finished", "all_dls_finished", "all_dls_processed"]
for folder in folders:
self.scripts[folder] = []
@@ -38,7 +43,8 @@ class ExternalScripts(Hook):
for script_type, names in self.scripts.iteritems():
if names:
- self.logInfo((_("Installed scripts for %s: ") % script_type) + ", ".join([basename(x) for x in names]))
+ self.logInfo(_("Installed scripts for"), script_type, ", ".join([basename(x) for x in names]))
+
def initPluginType(self, folder, path):
if not exists(path):
@@ -57,48 +63,75 @@ class ExternalScripts(Hook):
self.scripts[folder].append(join(path, f))
+
def callScript(self, script, *args):
try:
cmd = [script] + [str(x) if not isinstance(x, basestring) else x for x in args]
- self.logDebug("Executing %(script)s: %(cmd)s" % {"script": abspath(script), "cmd": " ".join(cmd)})
+ self.logDebug("Executing", abspath(script), " ".join(cmd))
#output goes to pyload
subprocess.Popen(cmd, bufsize=-1)
except Exception, e:
- self.logError(_("Error in %(script)s: %(error)s") % {"script": basename(script), "error": str(e)})
+ self.logError(_("Error in %(script)s: %(error)s") % {"script": basename(script), "error": e})
+
def downloadPreparing(self, pyfile):
for script in self.scripts['download_preparing']:
self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.id)
+
def downloadFinished(self, pyfile):
+ download_folder = self.config['general']['download_folder']
for script in self.scripts['download_finished']:
- self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.name,
- safe_join(self.config['general']['download_folder'],
- pyfile.package().folder, pyfile.name), pyfile.id)
+ filename = safe_join(download_folder, pyfile.package().folder, pyfile.name)
+ self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.name, filename, pyfile.id)
+
def packageFinished(self, pypack):
+ download_folder = self.config['general']['download_folder']
for script in self.scripts['package_finished']:
- folder = self.config['general']['download_folder']
- folder = safe_join(folder, pypack.folder)
-
+ folder = safe_join(download_folder, pypack.folder)
self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
+
def beforeReconnecting(self, ip):
for script in self.scripts['before_reconnect']:
self.callScript(script, ip)
+
def afterReconnecting(self, ip):
for script in self.scripts['after_reconnect']:
self.callScript(script, ip)
- def unrarFinished(self, folder, fname):
- for script in self.scripts['unrar_finished']:
- self.callScript(script, folder, fname)
+
+ def archive_extracted(self, pyfile, folder, filename, files):
+ for script in self.scripts['archive_extracted']:
+ self.callScript(script, folder, filename, files)
+ for script in self.scripts['unrar_finished']: #: deprecated
+ self.callScript(script, folder, filename)
+
+
+ def package_extracted(self, pypack):
+ download_folder = self.config['general']['download_folder']
+ for script in self.scripts['package_extracted']:
+ folder = save_join(download_folder, pypack.folder)
+ self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
+
+
+ def all_archives_extracted(self):
+ for script in self.scripts['all_archives_extracted']:
+ self.callScript(script)
+
+
+ def all_archives_processed(self):
+ for script in self.scripts['all_archives_processed']:
+ self.callScript(script)
+
def allDownloadsFinished(self):
- for script in self.scripts['all_dls_finished']:
+ for script in chain(self.scripts['all_downloads_finished'], self.scripts['all_dls_finished']):
self.callScript(script)
+
def allDownloadsProcessed(self):
- for script in self.scripts['all_dls_processed']:
+ for script in chain(self.scripts['all_downloads_processed'], self.scripts['all_dls_processed']):
self.callScript(script)
diff --git a/pyload/plugins/hooks/ExtractArchive.py b/pyload/plugins/hooks/ExtractArchive.py
index 1a2da53ad..8d6f09172 100644
--- a/pyload/plugins/hooks/ExtractArchive.py
+++ b/pyload/plugins/hooks/ExtractArchive.py
@@ -53,17 +53,14 @@ from pyload.utils import safe_join, fs_encode
class ExtractArchive(Hook):
- """
- Provides: unrarFinished (folder, filename)
- """
__name__ = "ExtractArchive"
__type__ = "hook"
- __version__ = "0.16"
+ __version__ = "0.17"
__config__ = [("activated", "bool", "Activated", True),
("fullpath", "bool", "Extract full path", True),
("overwrite", "bool", "Overwrite files", True),
- ("passwordfile", "file", "password file", "unrar_passwords.txt"),
+ ("passwordfile", "file", "password file", "archive_password.txt"),
("deletearchive", "bool", "Delete archives when done", False),
("subfolder", "bool", "Create subfolder for each package", False),
("destination", "folder", "Extract files to", ""),
@@ -73,8 +70,8 @@ class ExtractArchive(Hook):
("renice", "int", "CPU Priority", 0)]
__description__ = """Extract different kind of archives"""
- __author_name__ = ("pyLoad Team", "AndroKev")
- __author_mail__ = ("admin@pyload.org", "@pyloadforum")
+ __author_name__ = ("RaNaN", "AndroKev", "Walter Purcaro")
+ __author_mail__ = ("ranan@pyload.org", "@pyloadforum", "vuolter@gmail.com")
event_list = ["allDownloadsProcessed"]
@@ -96,12 +93,12 @@ class ExtractArchive(Hook):
if e.errno == 2:
self.logInfo(_("No %s installed") % p)
else:
- self.logWarning(_("Could not activate %s") % p, str(e))
+ self.logWarning(_("Could not activate %s") % p, e)
if self.core.debug:
print_exc()
except Exception, e:
- self.logWarning(_("Could not activate %s") % p, str(e))
+ self.logWarning(_("Could not activate %s") % p, e)
if self.core.debug:
print_exc()
@@ -113,33 +110,50 @@ class ExtractArchive(Hook):
# queue with package ids
self.queue = []
+
@Expose
def extractPackage(self, id):
""" Extract package with given id"""
self.manager.startThread(self.extract, [id])
+
def packageFinished(self, pypack):
+ pid = pypack.id
if self.getConfig("queue"):
self.logInfo(_("Package %s queued for later extracting") % pypack.name)
- self.queue.append(pypack.id)
+ self.queue.append(pid)
else:
- self.manager.startThread(self.extract, [pypack.id])
+ self.manager.startThread(self.extract, [pid])
+
@threaded
def allDownloadsProcessed(self, thread):
local = copy(self.queue)
del self.queue[:]
- self.extract(local, thread)
+ if self.extract(local, thread): #: check only if all gone fine, no failed reporting for now
+ self.manager.dispatchEvent("all_archives_extracted")
+ self.manager.dispatchEvent("all_archives_processed")
+
def extract(self, ids, thread=None):
+ processed = []
+ extracted = []
+ failed = []
+
+ destination = self.getConfig("destination")
+ subfolder = self.getConfig("subfolder")
+ fullpath = self.getConfig("fullpath")
+ overwrite = self.getConfig("overwrite")
+ excludefiles = self.getConfig("excludefiles")
+ renice = self.getConfig("renice")
+ recursive = self.getConfig("recursive")
+
# reload from txt file
self.reloadPasswords()
# dl folder
dl = self.config['general']['download_folder']
- extracted = []
-
#iterate packages -> plugins -> targets
for pid in ids:
p = self.core.files.getPackage(pid)
@@ -149,21 +163,17 @@ class ExtractArchive(Hook):
# determine output folder
out = safe_join(dl, p.folder, "")
- # force trailing slash
-
- if self.getConfig("destination") and self.getConfig("destination").lower() != "none":
out = safe_join(dl, p.folder, self.getConfig("destination"), "")
- #relative to package folder if destination is relative, otherwise absolute path overwrites them
-
- if self.getConfig("subfolder"):
+ if subfolder:
out = safe_join(out, fs_encode(p.folder))
- if not exists(out):
- makedirs(out)
+ if not exists(out):
+ makedirs(out)
files_ids = [(safe_join(dl, p.folder, x['name']), x['id']) for x in p.getChildren().itervalues()]
matched = False
+ success = True
# check as long there are unseen files
while files_ids:
@@ -175,36 +185,51 @@ class ExtractArchive(Hook):
self.logDebug("Targets for %s: %s" % (plugin.__name__, targets))
matched = True
for target, fid in targets:
- if target in extracted:
+ if target in processed:
self.logDebug(basename(target), "skipped")
continue
- extracted.append(target) # prevent extracting same file twice
- klass = plugin(self, target, out, self.getConfig("fullpath"), self.getConfig("overwrite"), self.getConfig("excludefiles"),
- self.getConfig("renice"))
- klass.init()
+ processed.append(target) # prevent extracting same file twice
self.logInfo(basename(target), _("Extract to %s") % out)
- new_files = self.startExtracting(klass, fid, p.password.strip().splitlines(), thread)
- self.logDebug("Extracted: %s" % new_files)
+ try:
+ klass = plugin(self, target, out, fullpath, overwrite, excludefiles, renice)
+ klass.init()
+ password = p.password.strip().splitlines()
+ new_files = self._extract(klass, fid, password, thread)
+ except Exception, e:
+ self.logError(basename(target), e)
+ success = False
+ continue
+
+ self.logDebug("Extracted", new_files)
self.setPermissions(new_files)
for file in new_files:
if not exists(file):
- self.logDebug("new file %s does not exists" % file)
+ self.logDebug("New file %s does not exists" % file)
continue
- if self.getConfig("recursive") and isfile(file):
+ if recursive and isfile(file):
new_files_ids.append((file, fid)) # append as new target
files_ids = new_files_ids # also check extracted files
- if not matched:
+ if matched:
+ if success:
+ extracted.append(pid)
+ self.manager.dispatchEvent("package_extracted", p)
+ else:
+ failed.append(pid)
+ self.manager.dispatchEvent("package_extract_failed", p)
+ else:
self.logInfo(_("No files found to extract"))
- def startExtracting(self, plugin, fid, passwords, thread):
+ return True if not failed else False
+
+
+ def _extract(self, plugin, fid, passwords, thread):
pyfile = self.core.files.getFile(fid)
- if not pyfile:
- return []
+ deletearchive = self.getConfig("deletearchive")
pyfile.setCustomStatus(_("extracting"))
thread.addActive(pyfile) # keep this file until everything is done
@@ -218,17 +243,17 @@ class ExtractArchive(Hook):
success = True
else:
self.logInfo(basename(plugin.file), _("Password protected"))
- self.logDebug("Passwords: %s" % str(passwords))
+ self.logDebug("Passwords", passwords)
pwlist = copy(self.getPasswords())
- #remove already supplied pws from list (only local)
+ # remove already supplied pws from list (only local)
for pw in passwords:
if pw in pwlist:
pwlist.remove(pw)
for pw in passwords + pwlist:
try:
- self.logDebug("Try password: %s" % pw)
+ self.logDebug("Try password", pw)
if plugin.checkPassword(pw):
plugin.extract(progress, pw)
self.addPassword(pw)
@@ -238,13 +263,12 @@ class ExtractArchive(Hook):
self.logDebug("Password was wrong")
if not success:
- self.logError(basename(plugin.file), _("Wrong password"))
- return []
+ raise Exception(_("Wrong password"))
if self.core.debug:
- self.logDebug("Would delete: %s" % ", ".join(plugin.getDeleteFiles()))
+ self.logDebug("Would delete", ", ".join(plugin.getDeleteFiles()))
- if self.getConfig("deletearchive"):
+ if deletearchive:
files = plugin.getDeleteFiles()
self.logInfo(_("Deleting %s files") % len(files))
for f in files:
@@ -254,53 +278,60 @@ class ExtractArchive(Hook):
self.logDebug("%s does not exists" % f)
self.logInfo(basename(plugin.file), _("Extracting finished"))
- self.manager.dispatchEvent("unrarFinished", plugin.out, plugin.file)
- return plugin.getExtractedFiles()
+ extracted_files = plugin.getExtractedFiles()
+ self.manager.dispatchEvent("archive_extracted", pyfile, plugin.out, plugin.file, extracted_files)
+
+ return extracted_files
except ArchiveError, e:
- self.logError(basename(plugin.file), _("Archive Error"), str(e))
+ self.logError(basename(plugin.file), _("Archive Error"), e)
except CRCError:
self.logError(basename(plugin.file), _("CRC Mismatch"))
except Exception, e:
if self.core.debug:
print_exc()
- self.logError(basename(plugin.file), _("Unknown Error"), str(e))
+ self.logError(basename(plugin.file), _("Unknown Error"), e)
+
+ self.manager.dispatchEvent("archive_extract_failed", pyfile)
+ raise Exception(_("Extract failed"))
- return []
@Expose
def getPasswords(self):
""" List of saved passwords """
return self.passwords
+
def reloadPasswords(self):
- pwfile = self.getConfig("passwordfile")
- if not exists(pwfile):
- open(pwfile, "wb").close()
+ passwordfile = self.getConfig("passwordfile")
+ if not exists(passwordfile):
+ open(passwordfile, "wb").close()
passwords = []
- f = open(pwfile, "rb")
+ f = open(passwordfile, "rb")
for pw in f.read().splitlines():
passwords.append(pw)
f.close()
self.passwords = passwords
+
@Expose
def addPassword(self, pw):
""" Adds a password to saved list"""
- pwfile = self.getConfig("passwordfile")
+ passwordfile = self.getConfig("passwordfile")
if pw in self.passwords:
self.passwords.remove(pw)
self.passwords.insert(0, pw)
- f = open(pwfile, "wb")
+ f = open(passwordfile, "wb")
for pw in self.passwords:
f.write(pw + "\n")
f.close()
+
def setPermissions(self, files):
for f in files:
if not exists(f):
diff --git a/pyload/plugins/hooks/FreeWayMe.py b/pyload/plugins/hooks/FreeWayMe.py
index 35b275067..635bc3415 100644
--- a/pyload/plugins/hooks/FreeWayMe.py
+++ b/pyload/plugins/hooks/FreeWayMe.py
@@ -22,5 +22,5 @@ class FreeWayMe(MultiHoster):
def getHoster(self):
hostis = getURL("https://www.free-way.me/ajax/jd.php", get={"id": 3}).replace("\"", "").strip()
- self.logDebug("hosters: %s" % hostis)
+ self.logDebug("Hosters", hostis)
return [x.strip() for x in hostis.split(",") if x.strip()]
diff --git a/pyload/plugins/hooks/IRCInterface.py b/pyload/plugins/hooks/IRCInterface.py
index ef1fa2a09..99ac20acb 100644
--- a/pyload/plugins/hooks/IRCInterface.py
+++ b/pyload/plugins/hooks/IRCInterface.py
@@ -89,8 +89,8 @@ class IRCInterface(Thread, Hook):
for t in self.getConfig("owner").split():
if t.strip().startswith("#"):
self.sock.send("JOIN %s\r\n" % t.strip())
- self.logInfo("pyLoad IRC: Connected to %s!" % host)
- self.logInfo("pyLoad IRC: Switching to listening mode!")
+ self.logInfo(_("Connected to"), host)
+ self.logInfo(_("Switching to listening mode!"))
try:
self.main_loop()
@@ -177,7 +177,7 @@ class IRCInterface(Thread, Hook):
for line in res:
self.response(line, msg['origin'])
except Exception, e:
- self.logError("pyLoad IRC: " + repr(e))
+ self.logError(repr(e))
def response(self, msg, origin=""):
if origin == "":
diff --git a/pyload/plugins/hooks/ImageTyperz.py b/pyload/plugins/hooks/ImageTyperz.py
index 2591a1c78..8d2fb2006 100644
--- a/pyload/plugins/hooks/ImageTyperz.py
+++ b/pyload/plugins/hooks/ImageTyperz.py
@@ -61,7 +61,7 @@ class ImageTyperz(Hook):
except:
raise ImageTyperzException("invalid response")
- self.logInfo("Account balance: $%s left" % response)
+ self.logInfo(_("Account balance: $%s left") % response)
return balance
def submit(self, captcha, captchaType="file", match=None):
@@ -118,7 +118,7 @@ class ImageTyperz(Hook):
start_new_thread(self.processCaptcha, (task,))
else:
- self.logInfo("Your %s account has not enough credits" % self.__name__)
+ self.logInfo(_("Your %s account has not enough credits") % self.__name__)
def captchaInvalid(self, task):
if task.data['service'] == self.__name__ and "ticket" in task.data:
@@ -127,9 +127,9 @@ class ImageTyperz(Hook):
"imageid": task.data['ticket']})
if response == "SUCCESS":
- self.logInfo("Bad captcha solution received, requested refund")
+ self.logInfo(_("Bad captcha solution received, requested refund"))
else:
- self.logError("Bad captcha solution received, refund request failed", response)
+ self.logError(_("Bad captcha solution received, refund request failed"), response)
def processCaptcha(self, task):
c = task.captchaFile
diff --git a/pyload/plugins/hooks/LinkdecrypterCom.py b/pyload/plugins/hooks/LinkdecrypterCom.py
index 34517761a..cb7ab9da5 100644
--- a/pyload/plugins/hooks/LinkdecrypterCom.py
+++ b/pyload/plugins/hooks/LinkdecrypterCom.py
@@ -52,4 +52,4 @@ class LinkdecrypterCom(Hook):
dict['pattern'] = regexp
dict['re'] = re.compile(regexp)
- self.logDebug("REGEXP: " + regexp)
+ self.logDebug("REGEXP", regexp)
diff --git a/pyload/plugins/hooks/MegaDebridEu.py b/pyload/plugins/hooks/MegaDebridEu.py
index 8c8894c9b..0345e47fa 100644
--- a/pyload/plugins/hooks/MegaDebridEu.py
+++ b/pyload/plugins/hooks/MegaDebridEu.py
@@ -25,7 +25,7 @@ class MegaDebridEu(MultiHoster):
if json_data['response_code'] == "ok":
host_list = [element[0] for element in json_data['hosters']]
else:
- self.logError("Unable to retrieve hoster list")
+ self.logError(_("Unable to retrieve hoster list"))
host_list = list()
return host_list
diff --git a/pyload/plugins/hooks/MergeFiles.py b/pyload/plugins/hooks/MergeFiles.py
index 5761a5990..540ebafdc 100644
--- a/pyload/plugins/hooks/MergeFiles.py
+++ b/pyload/plugins/hooks/MergeFiles.py
@@ -44,11 +44,11 @@ class MergeFiles(Hook):
download_folder = safe_join(download_folder, pack.folder)
for name, file_list in files.iteritems():
- self.logInfo("Starting merging of %s" % name)
+ self.logInfo(_("Starting merging of"), name)
final_file = open(safe_join(download_folder, name), "wb")
for splitted_file in file_list:
- self.logDebug("Merging part %s" % splitted_file)
+ self.logDebug("Merging part", splitted_file)
pyfile = self.core.files.getFile(fid_dict[splitted_file])
pyfile.setStatus("processing")
try:
@@ -64,7 +64,7 @@ class MergeFiles(Hook):
else:
break
s_file.close()
- self.logDebug("Finished merging part %s" % splitted_file)
+ self.logDebug("Finished merging part", splitted_file)
except Exception, e:
print traceback.print_exc()
finally:
@@ -73,4 +73,4 @@ class MergeFiles(Hook):
pyfile.release()
final_file.close()
- self.logInfo("Finished merging of %s" % name)
+ self.logInfo(_("Finished merging of"), name)
diff --git a/pyload/plugins/hooks/MultiHome.py b/pyload/plugins/hooks/MultiHome.py
index 61fbdd230..968214b5c 100644
--- a/pyload/plugins/hooks/MultiHome.py
+++ b/pyload/plugins/hooks/MultiHome.py
@@ -44,7 +44,7 @@ class MultiHome(Hook):
if iface:
iface.useFor(pluginName, account)
requestFactory.iface = lambda: iface.adress
- self.logDebug("Multihome: using address: " + iface.adress)
+ self.logDebug("Using address", iface.adress)
return oldGetRequest(pluginName, account)
requestFactory.getRequest = getRequest
diff --git a/pyload/plugins/hooks/MyfastfileCom.py b/pyload/plugins/hooks/MyfastfileCom.py
index 311bc2212..216dcaf5d 100644
--- a/pyload/plugins/hooks/MyfastfileCom.py
+++ b/pyload/plugins/hooks/MyfastfileCom.py
@@ -23,7 +23,7 @@ class MyfastfileCom(MultiHoster):
def getHoster(self):
json_data = getURL('http://myfastfile.com/api.php?hosts', decode=True)
- self.logDebug('JSON data: ' + json_data)
+ self.logDebug("JSON data", json_data)
json_data = json_loads(json_data)
return json_data['hosts']
diff --git a/pyload/plugins/hooks/OverLoadMe.py b/pyload/plugins/hooks/OverLoadMe.py
index a57c7c2b4..fae4209f8 100644
--- a/pyload/plugins/hooks/OverLoadMe.py
+++ b/pyload/plugins/hooks/OverLoadMe.py
@@ -26,6 +26,6 @@ class OverLoadMe(MultiHoster):
page = getURL(https + "://api.over-load.me/hoster.php",
get={"auth": "0001-cb1f24dadb3aa487bda5afd3b76298935329be7700cd7-5329be77-00cf-1ca0135f"}
).replace("\"", "").strip()
- self.logDebug("Hosterlist: %s" % page)
+ self.logDebug("Hosterlist", page)
return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugins/hooks/RehostTo.py b/pyload/plugins/hooks/RehostTo.py
index 097ebc646..059f36284 100644
--- a/pyload/plugins/hooks/RehostTo.py
+++ b/pyload/plugins/hooks/RehostTo.py
@@ -30,7 +30,7 @@ class RehostTo(MultiHoster):
user = self.account.selectAccount()[0]
if not user:
- self.logError("Rehost.to: " + _("Please add your rehost.to account first and restart pyLoad"))
+ self.logError(_("Please add your rehost.to account first and restart pyLoad"))
return
data = self.account.getAccountInfo(user)
diff --git a/pyload/plugins/hooks/RestartFailed.py b/pyload/plugins/hooks/RestartFailed.py
index a50ab60a4..8bad74620 100644
--- a/pyload/plugins/hooks/RestartFailed.py
+++ b/pyload/plugins/hooks/RestartFailed.py
@@ -31,7 +31,7 @@ class RestartFailed(Hook):
self.logDebug("Invalid interval value, kept current")
def periodical(self):
- self.logInfo("Restart failed downloads")
+ self.logInfo(_("Restart failed downloads"))
self.api.restartFailed()
def setup(self):
diff --git a/pyload/plugins/hooks/UnSkipOnFail.py b/pyload/plugins/hooks/UnSkipOnFail.py
index 941ce4fc7..40b0233f5 100644
--- a/pyload/plugins/hooks/UnSkipOnFail.py
+++ b/pyload/plugins/hooks/UnSkipOnFail.py
@@ -22,14 +22,14 @@ class UnSkipOnFail(Hook):
def downloadFailed(self, pyfile):
pyfile_name = basename(pyfile.name)
pid = pyfile.package().id
- msg = 'look for skipped duplicates for %s (pid:%s)...'
+ msg = _('look for skipped duplicates for %s (pid:%s)')
self.logInfo(msg % (pyfile_name, pid))
dups = self.findDuplicates(pyfile)
for link in dups:
# check if link is "skipped"(=4)
if link.status == 4:
lpid = link.packageID
- self.logInfo('restart "%s" (pid:%s)...' % (pyfile_name, lpid))
+ self.logInfo(_('restart "%s" (pid:%s)') % (pyfile_name, lpid))
self.setLinkStatus(link, "queued")
def findDuplicates(self, pyfile):
diff --git a/pyload/plugins/hooks/UpdateManager.py b/pyload/plugins/hooks/UpdateManager.py
index ece7ca610..6da39b239 100644
--- a/pyload/plugins/hooks/UpdateManager.py
+++ b/pyload/plugins/hooks/UpdateManager.py
@@ -207,7 +207,7 @@ class UpdateManager(Hook):
else:
raise Exception, _("Version mismatch")
except Exception, e:
- self.logError(_("Error updating plugin %s") % filename, str(e))
+ self.logError(_("Error updating plugin %s") % filename, e)
if blacklist:
blacklisted = sorted(map(lambda x: (x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]), blacklist))
@@ -247,7 +247,7 @@ class UpdateManager(Hook):
if not type_plugins:
return
- self.logDebug("Request deletion of plugins: %s" % type_plugins)
+ self.logDebug("Requested deletion of plugins", type_plugins)
removed = []
@@ -261,7 +261,7 @@ class UpdateManager(Hook):
try:
remove(filename)
except Exception, e:
- self.logDebug("Error deleting \"%s\"" % path.basename(filename), str(e))
+ self.logDebug("Error deleting", path.basename(filename), e)
err = True
filename += "c"
@@ -271,7 +271,7 @@ class UpdateManager(Hook):
self.manager.deactivateHook(name)
remove(filename)
except Exception, e:
- self.logDebug("Error deleting \"%s\"" % path.basename(filename), str(e))
+ self.logDebug("Error deleting", path.basename(filename), e)
err = True
if not err:
diff --git a/pyload/plugins/hooks/XFileSharingPro.py b/pyload/plugins/hooks/XFileSharingPro.py
index 7478034c6..635d5302b 100644
--- a/pyload/plugins/hooks/XFileSharingPro.py
+++ b/pyload/plugins/hooks/XFileSharingPro.py
@@ -8,7 +8,7 @@ from pyload.plugins.Hook import Hook
class XFileSharingPro(Hook):
__name__ = "XFileSharingPro"
__type__ = "hook"
- __version__ = "0.11"
+ __version__ = "0.12"
__config__ = [("activated", "bool", "Activated", True),
("loadDefault", "bool", "Include default (built-in) hoster list", True),
@@ -23,6 +23,7 @@ class XFileSharingPro(Hook):
def coreReady(self):
self.loadPattern()
+
def loadPattern(self):
hosterList = self.getConfigSet('includeList')
excludeList = self.getConfigSet('excludeList')
@@ -60,18 +61,19 @@ class XFileSharingPro(Hook):
self.unload()
return
- regexp = r"http://(?:[^/]*\.)?(%s)/\w{12}" % ("|".join(sorted(hosterList)).replace('.', '\.'))
- #self.logDebug(regexp)
+ regexp = r"http://(?:[^/]*\.)?(%s)/(?:embed-)?\w{12}" % ("|".join(sorted(hosterList)).replace('.', '\.'))
dict = self.core.pluginManager.hosterPlugins['XFileSharingPro']
dict['pattern'] = regexp
dict['re'] = re.compile(regexp)
self.logDebug("Pattern loaded - handling %d hosters" % len(hosterList))
+
def getConfigSet(self, option):
s = self.getConfig(option).lower().replace('|', ',').replace(';', ',')
return set([x.strip() for x in s.split(',')])
+
def unload(self):
dict = self.core.pluginManager.hosterPlugins['XFileSharingPro']
dict['pattern'] = r'^unmatchable$'
diff --git a/pyload/plugins/hooks/XMPPInterface.py b/pyload/plugins/hooks/XMPPInterface.py
index 881e7f5dc..4a01493a6 100644
--- a/pyload/plugins/hooks/XMPPInterface.py
+++ b/pyload/plugins/hooks/XMPPInterface.py
@@ -84,22 +84,22 @@ class XMPPInterface(IRCInterface, JabberClient):
try:
self.loop()
except Exception, ex:
- self.logError("pyLoad XMPP: %s" % str(ex))
+ self.logError(ex)
def stream_state_changed(self, state, arg):
"""This one is called when the state of stream connecting the component
to a server changes. This will usually be used to let the user
know what is going on."""
- self.logDebug("pyLoad XMPP: *** State changed: %s %r ***" % (state, arg))
+ self.logDebug("*** State changed: %s %r ***" % (state, arg))
def disconnected(self):
- self.logDebug("pyLoad XMPP: Client was disconnected")
+ self.logDebug("Client was disconnected")
def stream_closed(self, stream):
- self.logDebug("pyLoad XMPP: Stream was closed | %s" % stream)
+ self.logDebug("Stream was closed", stream)
def stream_error(self, err):
- self.logDebug("pyLoad XMPP: Stream Error: %s" % err)
+ self.logDebug("Stream Error", err)
def get_message_handlers(self):
"""Return list of (message_type, message_handler) tuples.
@@ -113,8 +113,8 @@ class XMPPInterface(IRCInterface, JabberClient):
subject = stanza.get_subject()
body = stanza.get_body()
t = stanza.get_type()
- self.logDebug(u'pyLoad XMPP: Message from %s received.' % (unicode(stanza.get_from(),)))
- self.logDebug(u'pyLoad XMPP: Body: %s Subject: %s Type: %s' % (body, subject, t))
+ self.logDebug("Message from %s received." % unicode(stanza.get_from()))
+ self.logDebug("Body: %s Subject: %s Type: %s" % (body, subject, t))
if t == "headline":
# 'headline' messages should never be replied to
@@ -158,7 +158,7 @@ class XMPPInterface(IRCInterface, JabberClient):
messages.append(m)
except Exception, e:
- self.logError("pyLoad XMPP: " + repr(e))
+ self.logError(repr(e))
return messages
@@ -171,7 +171,7 @@ class XMPPInterface(IRCInterface, JabberClient):
def announce(self, message):
""" send message to all owners"""
for user in self.getConfig("owners").split(";"):
- self.logDebug("pyLoad XMPP: Send message to %s" % user)
+ self.logDebug("Send message to", user)
to_jid = JID(user)
diff --git a/pyload/plugins/hoster/AlldebridCom.py b/pyload/plugins/hoster/AlldebridCom.py
index bdb5b1599..ecf53701b 100644
--- a/pyload/plugins/hoster/AlldebridCom.py
+++ b/pyload/plugins/hoster/AlldebridCom.py
@@ -50,7 +50,7 @@ class AlldebridCom(Hoster):
page = self.load(url)
data = json_loads(page)
- self.logDebug("Json data: %s" % str(data))
+ self.logDebug("Json data", data)
if data['error']:
if data['error'] == "This link isn't available on the hoster website.":
diff --git a/pyload/plugins/hoster/BasePlugin.py b/pyload/plugins/hoster/BasePlugin.py
index 55cdf5b88..3fdd0348d 100644
--- a/pyload/plugins/hoster/BasePlugin.py
+++ b/pyload/plugins/hoster/BasePlugin.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
-from re import match, search
+import re
+
from urllib import unquote
from urlparse import urlparse
@@ -25,10 +26,11 @@ class BasePlugin(Hoster):
self.chunkLimit = -1
self.resumeDownload = True
+
def process(self, pyfile):
"""main function"""
- #debug part, for api exerciser
+ #: debug part, for api exerciser
if pyfile.url.startswith("DEBUG_API"):
self.multiDL = False
return
@@ -74,6 +76,7 @@ class BasePlugin(Hoster):
else:
self.fail("No Plugin matched and not a downloadable url.")
+
def downloadFile(self, pyfile):
url = pyfile.url
@@ -86,7 +89,7 @@ class BasePlugin(Hoster):
if 'location' in header:
self.logDebug("Location: " + header['location'])
- base = match(r'https?://[^/]+', url).group(0)
+ base = re.match(r'https?://[^/]+', url).group(0)
if header['location'].startswith("http"):
url = header['location']
elif header['location'].startswith("/"):
@@ -100,7 +103,7 @@ class BasePlugin(Hoster):
if 'content-disposition' in header:
self.logDebug("Content-Disposition: " + header['content-disposition'])
- m = search("filename(?P<type>=|\*=(?P<enc>.+)'')(?P<name>.*)", header['content-disposition'])
+ m = re.search("filename(?P<type>=|\*=(?P<enc>.+)'')(?P<name>.*)", header['content-disposition'])
if m:
disp = m.groupdict()
self.logDebug(disp)
diff --git a/pyload/plugins/hoster/BillionuploadsCom.py b/pyload/plugins/hoster/BillionuploadsCom.py
index 6c14d103d..d6f39b61c 100644
--- a/pyload/plugins/hoster/BillionuploadsCom.py
+++ b/pyload/plugins/hoster/BillionuploadsCom.py
@@ -8,12 +8,13 @@ class BillionuploadsCom(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.01"
- __pattern__ = r'http://(?:www\.)?billionuploads.com/\w{12}'
+ __pattern__ = r'http://(?:www\.)?billionuploads\.com/\w{12}'
__description__ = """Billionuploads.com hoster plugin"""
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
+
HOSTER_NAME = "billionuploads.com"
FILE_NAME_PATTERN = r'<b>Filename:</b>(?P<N>.*?)<br>'
diff --git a/pyload/plugins/hoster/CatShareNet.py b/pyload/plugins/hoster/CatShareNet.py
index 415ec2379..36f2ea441 100644
--- a/pyload/plugins/hoster/CatShareNet.py
+++ b/pyload/plugins/hoster/CatShareNet.py
@@ -9,36 +9,50 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class CatShareNet(SimpleHoster):
__name__ = "CatShareNet"
__type__ = "hoster"
- __version__ = "0.01"
+ __version__ = "0.05"
- __pattern__ = r'http://(?:www\.)?catshare.net/\w{16}.*'
+ __pattern__ = r'http://(?:www\.)?catshare\.net/\w{16}'
__description__ = """CatShare.net hoster plugin"""
- __author_name__ = "z00nx"
- __author_mail__ = "z00nx0@gmail.com"
+ __author_name__ = ("z00nx", "prOq", "Walter Purcaro")
+ __author_mail__ = ("z00nx0@gmail.com", None, "vuolter@gmail.com")
- FILE_INFO_PATTERN = r'<h3 class="pull-left"[^>]+>(?P<N>.*)</h3>\s+<h3 class="pull-right"[^>]+>(?P<S>.*)</h3>'
- OFFLINE_PATTERN = r'Podany plik zosta'
- SECONDS_PATTERN = r'var\s+count\s+=\s+(\d+);'
+ FILE_INFO_PATTERN = r'<title>(?P<N>.+) \((?P<S>[\d.]+) (?P<U>\w+)\)<'
+ OFFLINE_PATTERN = r'Podany plik został usunięty\s*</div>'
+ IP_BLOCKED_PATTERN = r'>Nasz serwis wykrył ÅŒe Twój adres IP nie pochodzi z Polski.<'
+ SECONDS_PATTERN = 'var count = (\d+);'
RECAPTCHA_KEY = "6Lfln9kSAAAAANZ9JtHSOgxUPB9qfDFeLUI_QMEy"
+ LINK_PATTERN = r'<form action="(.+?)" method="GET">'
+
+
+ def getFileInfo(self):
+ m = re.search(self.IP_BLOCKED_PATTERN, self.html)
+ if m is None:
+ self.fail("Only connections from Polish IP address are allowed")
+ return super(CatShareNet, self).getFileInfo()
def handleFree(self):
m = re.search(self.SECONDS_PATTERN, self.html)
- seconds = int(m.group(1))
- self.logDebug("Seconds found", seconds)
- self.wait(seconds + 1)
+ if m:
+ wait_time = int(m.group(1))
+ self.wait(wait_time, True)
+
recaptcha = ReCaptcha(self)
challenge, code = recaptcha.challenge(self.RECAPTCHA_KEY)
- post_data = {"recaptcha_challenge_field": challenge, "recaptcha_response_field": code}
- self.download(self.pyfile.url, post=post_data)
- check = self.checkDownload({"html": re.compile("\A<!DOCTYPE html PUBLIC")})
- if check == "html":
- self.logDebug("Wrong captcha entered")
+ self.html = self.load(self.pyfile.url,
+ post={'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field': code})
+
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
self.invalidCaptcha()
- self.retry()
+ self.retry(reason="Wrong captcha entered")
+
+ dl_link = m.group(1)
+ self.download(dl_link)
getInfo = create_getInfo(CatShareNet)
diff --git a/pyload/plugins/hoster/CramitIn.py b/pyload/plugins/hoster/CramitIn.py
index 6c5142d96..7091e02c2 100644
--- a/pyload/plugins/hoster/CramitIn.py
+++ b/pyload/plugins/hoster/CramitIn.py
@@ -8,20 +8,17 @@ class CramitIn(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.04"
- __pattern__ = r'http://(?:www\.)?cramit.in/\w{12}'
+ __pattern__ = r'http://(?:www\.)?cramit\.in/\w{12}'
__description__ = """Cramit.in hoster plugin"""
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
+
HOSTER_NAME = "cramit.in"
FILE_INFO_PATTERN = r'<span class=t2>\s*(?P<N>.*?)</span>.*?<small>\s*\((?P<S>.*?)\)'
LINK_PATTERN = r'href="(http://cramit.in/file_download/.*?)"'
- def setup(self):
- self.resumeDownload = self.multiDL = self.premium
-
-
getInfo = create_getInfo(CramitIn)
diff --git a/pyload/plugins/hoster/CzshareCom.py b/pyload/plugins/hoster/CzshareCom.py
index 0e6fab15a..f5df313f7 100644
--- a/pyload/plugins/hoster/CzshareCom.py
+++ b/pyload/plugins/hoster/CzshareCom.py
@@ -27,7 +27,7 @@ class CzshareCom(SimpleHoster):
FILE_SIZE_REPLACEMENTS = [(' ', '')]
FILE_URL_REPLACEMENTS = [(r'http://[^/]*/download.php\?.*?id=(\w+).*', r'http://sdilej.cz/\1/x/')]
- SH_CHECK_TRAFFIC = True
+ FORCE_CHECK_TRAFFIC = True
FREE_URL_PATTERN = r'<a href="([^"]+)" class="page-download">[^>]*alt="([^"]+)" /></a>'
FREE_FORM_PATTERN = r'<form action="download.php" method="post">\s*<img src="captcha.php" id="captcha" />(.*?)</form>'
@@ -57,7 +57,7 @@ class CzshareCom(SimpleHoster):
return False
except Exception, e:
# let's continue and see what happens...
- self.logError('Parse error (CREDIT): %s' % e)
+ self.logError("Parse error (CREDIT): %s" % e)
return True
diff --git a/pyload/plugins/hoster/DataHu.py b/pyload/plugins/hoster/DataHu.py
index 68162c203..222278b49 100644
--- a/pyload/plugins/hoster/DataHu.py
+++ b/pyload/plugins/hoster/DataHu.py
@@ -31,7 +31,7 @@ class DataHu(SimpleHoster):
m = re.search(self.LINK_PATTERN, self.html)
if m:
url = m.group(1)
- self.logDebug('Direct link: ' + url)
+ self.logDebug("Direct link: " + url)
else:
self.parseError('Unable to get direct link')
diff --git a/pyload/plugins/hoster/DateiTo.py b/pyload/plugins/hoster/DateiTo.py
index 1e8ca3614..9ada88157 100644
--- a/pyload/plugins/hoster/DateiTo.py
+++ b/pyload/plugins/hoster/DateiTo.py
@@ -61,7 +61,7 @@ class DateiTo(SimpleHoster):
self.fail('Too bad...')
download_url = self.html
- self.logDebug('Download URL', download_url)
+ self.logDebug("Download URL", download_url)
self.download(download_url)
def checkErrors(self):
diff --git a/pyload/plugins/hoster/DepositfilesCom.py b/pyload/plugins/hoster/DepositfilesCom.py
index 9c0348cbd..2f647514f 100644
--- a/pyload/plugins/hoster/DepositfilesCom.py
+++ b/pyload/plugins/hoster/DepositfilesCom.py
@@ -27,7 +27,7 @@ class DepositfilesCom(SimpleHoster):
(r'.*<b title="(?P<N>[^"]+).*', "\g<N>")]
FILE_URL_REPLACEMENTS = [(__pattern__, "https://dfiles.eu/files/\g<ID>")]
- SH_COOKIES = [(".dfiles.eu", "lang_current", "en")]
+ COOKIES = [(".dfiles.eu", "lang_current", "en")]
RECAPTCHA_PATTERN = r"Recaptcha.create\('([^']+)'"
@@ -106,7 +106,7 @@ class DepositfilesCom(SimpleHoster):
self.retry(wait_time=60)
def handlePremium(self):
- self.html = self.load(self.pyfile.url, cookies=self.SH_COOKIES)
+ self.html = self.load(self.pyfile.url, cookies=self.COOKIES)
if '<span class="html_download_api-gold_traffic_limit">' in self.html:
self.logWarning("Download limit reached")
diff --git a/pyload/plugins/hoster/EasybytezCom.py b/pyload/plugins/hoster/EasybytezCom.py
index 7b1d8881f..e010aee2a 100644
--- a/pyload/plugins/hoster/EasybytezCom.py
+++ b/pyload/plugins/hoster/EasybytezCom.py
@@ -8,12 +8,13 @@ class EasybytezCom(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.18"
- __pattern__ = r'http://(?:www\.)?easybytez.com/(\w+).*'
+ __pattern__ = r'http://(?:www\.)?easybytez\.com/\w{12}'
__description__ = """Easybytez.com hoster plugin"""
__author_name__ = ("zoidberg", "stickell")
__author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it")
+
HOSTER_NAME = "easybytez.com"
FILE_INFO_PATTERN = r'<span class="name">(?P<N>.+)</span><br>\s*<span class="size">(?P<S>[^<]+)</span>'
@@ -24,8 +25,4 @@ class EasybytezCom(XFileSharingPro):
ERROR_PATTERN = r'(?:class=["\']err["\'][^>]*>|<Center><b>)(.*?)</'
- def setup(self):
- self.resumeDownload = self.multiDL = self.premium
-
-
getInfo = create_getInfo(EasybytezCom)
diff --git a/pyload/plugins/hoster/EdiskCz.py b/pyload/plugins/hoster/EdiskCz.py
index 4c532b33f..fcb42020d 100644
--- a/pyload/plugins/hoster/EdiskCz.py
+++ b/pyload/plugins/hoster/EdiskCz.py
@@ -29,7 +29,7 @@ class EdiskCz(SimpleHoster):
def process(self, pyfile):
url = re.sub("/(stahni|sk/stahni)/", "/en/download/", pyfile.url)
- self.logDebug('URL:' + url)
+ self.logDebug("URL:" + url)
m = re.search(self.ACTION_PATTERN, url)
if m is None:
diff --git a/pyload/plugins/hoster/EgoFilesCom.py b/pyload/plugins/hoster/EgoFilesCom.py
index 7d59b274c..7bf723926 100644
--- a/pyload/plugins/hoster/EgoFilesCom.py
+++ b/pyload/plugins/hoster/EgoFilesCom.py
@@ -32,7 +32,7 @@ class EgoFilesCom(SimpleHoster):
self.load("https://egofiles.com/ajax/lang.php?lang=en", just_header=True)
def process(self, pyfile):
- if self.premium and (not self.SH_CHECK_TRAFFIC or self.checkTrafficLeft()):
+ if self.premium and (not self.FORCE_CHECK_TRAFFIC or self.checkTrafficLeft()):
self.handlePremium()
else:
self.handleFree()
@@ -56,7 +56,7 @@ class EgoFilesCom(SimpleHoster):
self.html = self.load(self.pyfile.url, post=post_data, decode=True)
m = re.search(self.LINK_PATTERN, self.html)
if m is None:
- self.logInfo('Wrong captcha')
+ self.logInfo("Wrong captcha")
self.invalidCaptcha()
elif hasattr(m, 'group'):
downloadURL = m.group('link')
@@ -73,7 +73,7 @@ class EgoFilesCom(SimpleHoster):
def handlePremium(self):
header = self.load(self.pyfile.url, just_header=True)
if 'location' in header:
- self.logDebug('DIRECT LINK from header: ' + header['location'])
+ self.logDebug("DIRECT LINK from header: " + header['location'])
self.download(header['location'])
else:
self.html = self.load(self.pyfile.url, decode=True)
@@ -82,7 +82,7 @@ class EgoFilesCom(SimpleHoster):
if m is None:
self.parseError('Unable to detect direct download url')
else:
- self.logDebug('DIRECT URL from html: ' + m.group('link'))
+ self.logDebug("DIRECT URL from html: " + m.group('link'))
self.download(m.group('link'), disposition=True)
diff --git a/pyload/plugins/hoster/FastixRu.py b/pyload/plugins/hoster/FastixRu.py
index aa1794047..cb0cdb278 100644
--- a/pyload/plugins/hoster/FastixRu.py
+++ b/pyload/plugins/hoster/FastixRu.py
@@ -47,7 +47,7 @@ class FastixRu(Hoster):
url = "http://fastix.ru/api_v2/?apikey=%s&sub=getdirectlink&link=%s" % (api_key, pyfile.url)
page = self.load(url)
data = json_loads(page)
- self.logDebug("Json data: %s" % str(data))
+ self.logDebug("Json data", data)
if "error\":true" in page:
self.offline()
else:
diff --git a/pyload/plugins/hoster/FastshareCz.py b/pyload/plugins/hoster/FastshareCz.py
index 3897a1c23..a5a3dece1 100644
--- a/pyload/plugins/hoster/FastshareCz.py
+++ b/pyload/plugins/hoster/FastshareCz.py
@@ -26,7 +26,7 @@ class FastshareCz(SimpleHoster):
FILE_URL_REPLACEMENTS = [("#.*", "")]
- SH_COOKIES = [(".fastshare.cz", "lang", "en")]
+ COOKIES = [(".fastshare.cz", "lang", "en")]
FREE_URL_PATTERN = r'action=(/free/.*?)>\s*<img src="([^"]*)"><br'
PREMIUM_URL_PATTERN = r'(http://data\d+\.fastshare\.cz/download\.php\?id=\d+&)'
diff --git a/pyload/plugins/hoster/File4safeCom.py b/pyload/plugins/hoster/File4safeCom.py
index 4aa0e26a4..a86ed033e 100644
--- a/pyload/plugins/hoster/File4safeCom.py
+++ b/pyload/plugins/hoster/File4safeCom.py
@@ -12,12 +12,13 @@ class File4safeCom(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.02"
- __pattern__ = r'https?://(?:www\.)?file4safe\.com/\w+'
+ __pattern__ = r'https?://(?:www\.)?file4safe\.com/\w{12}'
__description__ = """File4safe.com hoster plugin"""
__author_name__ = "stickell"
__author_mail__ = "l.stickell@yahoo.it"
+
HOSTER_NAME = "file4safe.com"
diff --git a/pyload/plugins/hoster/FileParadoxIn.py b/pyload/plugins/hoster/FileParadoxIn.py
index 955a9726b..436fed357 100644
--- a/pyload/plugins/hoster/FileParadoxIn.py
+++ b/pyload/plugins/hoster/FileParadoxIn.py
@@ -10,12 +10,13 @@ class FileParadoxIn(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.01"
- __pattern__ = r'https?://(?:www\.)?fileparadox\.in/\w+'
+ __pattern__ = r'https?://(?:www\.)?fileparadox\.in/\w{12}'
__description__ = """FileParadox.in hoster plugin"""
__author_name__ = "RazorWing"
__author_mail__ = "muppetuk1@hotmail.com"
+
HOSTER_NAME = "fileparadox.in"
FILE_SIZE_PATTERN = r'</font>\s*\(\s*(?P<S>[^)]+)\s*\)</font>'
diff --git a/pyload/plugins/hoster/FilefactoryCom.py b/pyload/plugins/hoster/FilefactoryCom.py
index fafe96477..03af98843 100644
--- a/pyload/plugins/hoster/FilefactoryCom.py
+++ b/pyload/plugins/hoster/FilefactoryCom.py
@@ -33,7 +33,7 @@ class FilefactoryCom(SimpleHoster):
OFFLINE_PATTERN = r'<h2>File Removed</h2>|This file is no longer available'
PREMIUM_ONLY_PATTERN = r'>Premium Account Required<'
- SH_COOKIES = [(".filefactory.com", "locale", "en_US.utf8")]
+ COOKIES = [(".filefactory.com", "locale", "en_US.utf8")]
def handleFree(self):
@@ -73,7 +73,7 @@ class FilefactoryCom(SimpleHoster):
self.parseError('Unable to detect free direct link')
direct = direct.group(1)
- self.logDebug('DIRECT LINK: ' + direct)
+ self.logDebug("DIRECT LINK: " + direct)
self.download(direct, disposition=True)
check = self.checkDownload({"multiple": "You are currently downloading too many files at once.",
@@ -102,5 +102,5 @@ class FilefactoryCom(SimpleHoster):
else:
self.parseError('Unable to detect premium direct link')
- self.logDebug('DIRECT PREMIUM LINK: ' + url)
+ self.logDebug("DIRECT PREMIUM LINK: " + url)
self.download(url, disposition=True)
diff --git a/pyload/plugins/hoster/FileomCom.py b/pyload/plugins/hoster/FileomCom.py
index 11052e289..a5de24a3f 100644
--- a/pyload/plugins/hoster/FileomCom.py
+++ b/pyload/plugins/hoster/FileomCom.py
@@ -11,16 +11,16 @@ class FileomCom(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.01"
- __pattern__ = r'https?://(?:www\.)?fileom\.com/\w+'
+ __pattern__ = r'https?://(?:www\.)?fileom\.com/\w{12}'
__description__ = """Fileom.com hoster plugin"""
__author_name__ = "Walter Purcaro"
__author_mail__ = "vuolter@gmail.com"
+
HOSTER_NAME = "fileom.com"
FILE_URL_REPLACEMENTS = [(r'/$', "")]
- SH_COOKIES = [(".fileom.com", "lang", "english")]
FILE_NAME_PATTERN = r'Filename: <span>(?P<N>.+?)<'
FILE_SIZE_PATTERN = r'File Size: <span class="size">(?P<S>[\d\.]+) (?P<U>\w+)'
@@ -31,9 +31,9 @@ class FileomCom(XFileSharingPro):
def setup(self):
- self.resumeDownload = self.premium
self.multiDL = True
self.chunkLimit = 1
+ self.resumeDownload = self.premium
getInfo = create_getInfo(FileomCom)
diff --git a/pyload/plugins/hoster/FilepostCom.py b/pyload/plugins/hoster/FilepostCom.py
index 382971c61..03eddee91 100644
--- a/pyload/plugins/hoster/FilepostCom.py
+++ b/pyload/plugins/hoster/FilepostCom.py
@@ -114,7 +114,7 @@ class FilepostCom(SimpleHoster):
elif 'CAPTCHA Code nicht korrekt' in json_response['js']['error']:
return None
elif 'CAPTCHA' in json_response['js']['error']:
- self.logDebug('error response is unknown, but mentions CAPTCHA -> return None')
+ self.logDebug("Error response is unknown, but mentions CAPTCHA")
return None
else:
self.fail(json_response['js']['error'])
diff --git a/pyload/plugins/hoster/FilerNet.py b/pyload/plugins/hoster/FilerNet.py
index 5f1b6bea8..bf33f7fb3 100644
--- a/pyload/plugins/hoster/FilerNet.py
+++ b/pyload/plugins/hoster/FilerNet.py
@@ -31,14 +31,14 @@ class FilerNet(SimpleHoster):
def process(self, pyfile):
- if self.premium and (not self.SH_CHECK_TRAFFIC or self.checkTrafficLeft()):
+ if self.premium and (not self.FORCE_CHECK_TRAFFIC or self.checkTrafficLeft()):
self.handlePremium()
else:
self.handleFree()
def handleFree(self):
self.req.setOption("timeout", 120)
- self.html = self.load(self.pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES)
+ self.html = self.load(self.pyfile.url, decode=not self.TEXT_ENCODING, cookies=self.COOKIES)
# Wait between downloads
m = re.search(r'musst du <span id="time">(\d+)</span> Sekunden warten', self.html)
@@ -54,7 +54,7 @@ class FilerNet(SimpleHoster):
if 'token' not in inputs:
self.parseError('Unable to detect token')
token = inputs['token']
- self.logDebug('Token: ' + token)
+ self.logDebug("Token: " + token)
self.html = self.load(self.pyfile.url, post={'token': token}, decode=True)
@@ -62,7 +62,7 @@ class FilerNet(SimpleHoster):
if 'hash' not in inputs:
self.parseError('Unable to detect hash')
hash_data = inputs['hash']
- self.logDebug('Hash: ' + hash_data)
+ self.logDebug("Hash: " + hash_data)
downloadURL = r''
recaptcha = ReCaptcha(self)
@@ -83,7 +83,7 @@ class FilerNet(SimpleHoster):
self.correctCaptcha()
break
else:
- self.logInfo('Wrong captcha')
+ self.logInfo("Wrong captcha")
self.invalidCaptcha()
if not downloadURL:
@@ -102,7 +102,7 @@ class FilerNet(SimpleHoster):
self.parseError("Unable to detect direct link, try to enable 'Direct download' in your user settings")
dl = 'http://filer.net' + m.group(1)
- self.logDebug('Direct link: ' + dl)
+ self.logDebug("Direct link: " + dl)
self.download(dl, disposition=True)
diff --git a/pyload/plugins/hoster/FilerioCom.py b/pyload/plugins/hoster/FilerioCom.py
index 31d04b0ee..5c62b0da8 100644
--- a/pyload/plugins/hoster/FilerioCom.py
+++ b/pyload/plugins/hoster/FilerioCom.py
@@ -14,14 +14,11 @@ class FilerioCom(XFileSharingPro):
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
+
HOSTER_NAME = "filerio.in"
OFFLINE_PATTERN = r'<b>&quot;File Not Found&quot;</b>|File has been removed due to Copyright Claim'
FILE_URL_REPLACEMENTS = [(r'http://.*?/', 'http://filerio.in/')]
- def setup(self):
- self.resumeDownload = self.multiDL = self.premium
-
-
getInfo = create_getInfo(FilerioCom)
diff --git a/pyload/plugins/hoster/FileserveCom.py b/pyload/plugins/hoster/FileserveCom.py
index 5892cd96a..367545618 100644
--- a/pyload/plugins/hoster/FileserveCom.py
+++ b/pyload/plugins/hoster/FileserveCom.py
@@ -103,7 +103,7 @@ class FileserveCom(Hoster):
# show download link
response = self.load(self.url, post={"downloadLink": "show"}, decode=True)
- self.logDebug("show downloadLink response : %s" % response)
+ self.logDebug("Show downloadLink response : %s" % response)
if "fail" in response:
self.fail("Couldn't retrieve download url")
@@ -130,7 +130,7 @@ class FileserveCom(Hoster):
def doTimmer(self):
response = self.load(self.url, post={"downloadLink": "wait"}, decode=True)
- self.logDebug("wait response : %s" % response[:80])
+ self.logDebug("Wait response : %s" % response[:80])
if "fail" in response:
self.fail("Failed getting wait time")
diff --git a/pyload/plugins/hoster/FilezyNet.py b/pyload/plugins/hoster/FilezyNet.py
index eeba4add0..4bd5de495 100644
--- a/pyload/plugins/hoster/FilezyNet.py
+++ b/pyload/plugins/hoster/FilezyNet.py
@@ -1,42 +1,18 @@
# -*- coding: utf-8 -*-
-import re
+from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-from pyload.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo
-
-class FilezyNet(XFileSharingPro):
+class FilezyNet(DeadHoster):
__name__ = "FilezyNet"
__type__ = "hoster"
- __version__ = "0.1"
+ __version__ = "0.2"
- __pattern__ = r'http://(?:www\.)?filezy.net/.*/.*.html'
+ __pattern__ = r'http://(?:www\.)?filezy\.net/\w{12}'
__description__ = """Filezy.net hoster plugin"""
__author_name__ = None
__author_mail__ = None
- HOSTER_NAME = "filezy.net"
-
- FILE_SIZE_PATTERN = r'<span class="plansize">(?P<S>[0-9.]+) (?P<U>[kKMG])i?B</span>'
- WAIT_PATTERN = r'<div id="countdown_str" class="seconds">\n<!--Wait--> <span id=".*?">(\d+)</span>'
- DOWNLOAD_JS_PATTERN = r"<script type='text/javascript'>eval(.*)"
-
-
- def setup(self):
- self.resumeDownload = True
- self.multiDL = self.premium
-
- def getDownloadLink(self):
- self.logDebug("Getting download link")
-
- data = self.getPostParameters()
- self.html = self.load(self.pyfile.url, post=data, ref=True, decode=True)
-
- obfuscated_js = re.search(self.DOWNLOAD_JS_PATTERN, self.html)
- dl_file_now = self.js.eval(obfuscated_js.group(1))
- link = re.search(self.LINK_PATTERN, dl_file_now)
- return link.group(1)
-
getInfo = create_getInfo(FilezyNet)
diff --git a/pyload/plugins/hoster/FiredriveCom.py b/pyload/plugins/hoster/FiredriveCom.py
index a9d62bb75..8bd841c8f 100644
--- a/pyload/plugins/hoster/FiredriveCom.py
+++ b/pyload/plugins/hoster/FiredriveCom.py
@@ -8,7 +8,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class FiredriveCom(SimpleHoster):
__name__ = "FiredriveCom"
__type__ = "hoster"
- __version__ = "0.03"
+ __version__ = "0.04"
__pattern__ = r'https?://(?:www\.)?(firedrive|putlocker)\.com/(mobile/)?(file|embed)/(?P<ID>\w+)'
@@ -19,7 +19,7 @@ class FiredriveCom(SimpleHoster):
FILE_NAME_PATTERN = r'<b>Name:</b> (?P<N>.+) <br>'
FILE_SIZE_PATTERN = r'<b>Size:</b> (?P<S>[\d.]+) (?P<U>[a-zA-Z]+) <br>'
OFFLINE_PATTERN = r'class="sad_face_image"|>No such page here.<'
- TEMP_OFFLINE_PATTERN = r'>(File Temporarily Unavailable|Server Error. Try again later)'
+ TEMP_OFFLINE_PATTERN = r'Please try again in a few minutes.<'
FILE_URL_REPLACEMENTS = [(__pattern__, r'http://www.firedrive.com/file/\g<ID>')]
diff --git a/pyload/plugins/hoster/FshareVn.py b/pyload/plugins/hoster/FshareVn.py
index 5109d239d..3e3632902 100644
--- a/pyload/plugins/hoster/FshareVn.py
+++ b/pyload/plugins/hoster/FshareVn.py
@@ -69,7 +69,7 @@ class FshareVn(SimpleHoster):
self.parseError('FORM')
elif 'link_file_pwd_dl' in inputs:
for password in self.getPassword().splitlines():
- self.logInfo('Password protected link, trying "%s"' % password)
+ self.logInfo("Password protected link, trying", password)
inputs['link_file_pwd_dl'] = password
self.html = self.load(self.url, post=inputs, decode=True)
if not 'name="link_file_pwd_dl"' in self.html:
diff --git a/pyload/plugins/hoster/GigapetaCom.py b/pyload/plugins/hoster/GigapetaCom.py
index d09a1fb0c..dde9cab55 100644
--- a/pyload/plugins/hoster/GigapetaCom.py
+++ b/pyload/plugins/hoster/GigapetaCom.py
@@ -23,7 +23,7 @@ class GigapetaCom(SimpleHoster):
FILE_SIZE_PATTERN = r'<th>\s*Size\s*</th>\s*<td>\s*(?P<S>.*?)\s*</td>'
OFFLINE_PATTERN = r'<div id="page_error">'
- SH_COOKIES = [(".gigapeta.com", "lang", "us")]
+ COOKIES = [(".gigapeta.com", "lang", "us")]
def handleFree(self):
diff --git a/pyload/plugins/hoster/HundredEightyUploadCom.py b/pyload/plugins/hoster/HundredEightyUploadCom.py
index 29e152c1d..fa3dd8de3 100644
--- a/pyload/plugins/hoster/HundredEightyUploadCom.py
+++ b/pyload/plugins/hoster/HundredEightyUploadCom.py
@@ -11,12 +11,13 @@ class HundredEightyUploadCom(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.01"
- __pattern__ = r'http://(?:www\.)?180upload\.com/(\w+).*'
+ __pattern__ = r'http://(?:www\.)?180upload\.com/\w{12}'
__description__ = """180upload.com hoster plugin"""
__author_name__ = "stickell"
__author_mail__ = "l.stickell@yahoo.it"
+
HOSTER_NAME = "180upload.com"
FILE_NAME_PATTERN = r'Filename:</b></td><td nowrap>(?P<N>.+)</td></tr>-->'
diff --git a/pyload/plugins/hoster/IFileWs.py b/pyload/plugins/hoster/IFileWs.py
index 45039f8e0..63edfec40 100644
--- a/pyload/plugins/hoster/IFileWs.py
+++ b/pyload/plugins/hoster/IFileWs.py
@@ -1,23 +1,18 @@
# -*- coding: utf-8 -*-
-from pyload.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo
+from pyload.plugins.internal.DeadHoster import DeadHoster, create_getInfo
-class IFileWs(XFileSharingPro):
+class IFileWs(DeadHoster):
__name__ = "IFileWs"
__type__ = "hoster"
- __version__ = "0.01"
+ __version__ = "0.02"
- __pattern__ = r'http://(?:www\.)?ifile\.ws/\w+(/.+)?'
+ __pattern__ = r'http://(?:www\.)?ifile\.ws/\w{12}'
__description__ = """Ifile.ws hoster plugin"""
__author_name__ = "z00nx"
__author_mail__ = "z00nx0@gmail.com"
- HOSTER_NAME = "ifile.ws"
-
- FILE_INFO_PATTERN = r'<h1\s+style="display:inline;">(?P<N>[^<]+)</h1>\s+\[(?P<S>[^]]+)\]'
- OFFLINE_PATTERN = r'File Not Found|The file was removed by administrator'
-
getInfo = create_getInfo(IFileWs)
diff --git a/pyload/plugins/hoster/Keep2shareCC.py b/pyload/plugins/hoster/Keep2shareCC.py
index 088a1b012..059ab8e05 100644
--- a/pyload/plugins/hoster/Keep2shareCC.py
+++ b/pyload/plugins/hoster/Keep2shareCC.py
@@ -52,7 +52,7 @@ class Keep2shareCC(SimpleHoster):
m = re.search(self.WAIT_PATTERN, self.html)
if m:
- self.logDebug('Hoster told us to wait for %s' % m.group(1))
+ self.logDebug("Hoster told us to wait for %s" % m.group(1))
# string to time convert courtesy of https://stackoverflow.com/questions/10663720
ftr = [3600, 60, 1]
wait_time = sum([a * b for a, b in zip(ftr, map(int, m.group(1).split(':')))])
@@ -62,7 +62,7 @@ class Keep2shareCC(SimpleHoster):
m = re.search(self.ALREADY_DOWNLOADING_PATTERN, self.html)
if m:
# if someone is already downloading on our line, wait 30min and retry
- self.logDebug('Already downloading, waiting for 30 minutes')
+ self.logDebug("Already downloading, waiting for 30 minutes")
self.wait(30 * 60, reconnect=True)
self.retry()
@@ -89,14 +89,14 @@ class Keep2shareCC(SimpleHoster):
self.correctCaptcha()
break
else:
- self.logInfo('Wrong captcha')
+ self.logInfo("Wrong captcha")
self.invalidCaptcha()
else:
self.fail("All captcha attempts failed")
def startDownload(self, url):
d = urljoin(self.base_url, url)
- self.logDebug('Direct Link: ' + d)
+ self.logDebug("Direct Link: " + d)
self.download(d, disposition=True)
def sanitize_url(self):
diff --git a/pyload/plugins/hoster/LemUploadsCom.py b/pyload/plugins/hoster/LemUploadsCom.py
index 8556e3c9c..08d999478 100644
--- a/pyload/plugins/hoster/LemUploadsCom.py
+++ b/pyload/plugins/hoster/LemUploadsCom.py
@@ -11,12 +11,13 @@ class LemUploadsCom(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.01"
- __pattern__ = r'https?://(?:www\.)?lemuploads.com/\w{12}'
+ __pattern__ = r'https?://(?:www\.)?lemuploads\.com/\w{12}'
__description__ = """LemUploads.com hoster plugin"""
__author_name__ = "t4skforce"
__author_mail__ = "t4skforce1337[AT]gmail[DOT]com"
+
HOSTER_NAME = "lemuploads.com"
OFFLINE_PATTERN = r'<b>File Not Found</b><br><br>'
diff --git a/pyload/plugins/hoster/LetitbitNet.py b/pyload/plugins/hoster/LetitbitNet.py
index a28f06571..b9631d311 100644
--- a/pyload/plugins/hoster/LetitbitNet.py
+++ b/pyload/plugins/hoster/LetitbitNet.py
@@ -148,13 +148,13 @@ class LetitbitNet(SimpleHoster):
json_data = [api_key, ["download/direct_links", {"pass": premium_key, "link": self.pyfile.url}]]
api_rep = self.load('http://api.letitbit.net/json', post={'r': json_dumps(json_data)})
- self.logDebug('API Data: ' + api_rep)
+ self.logDebug("API Data: " + api_rep)
api_rep = json_loads(api_rep)
if api_rep['status'] == 'FAIL':
self.fail(api_rep['data'])
direct_link = api_rep['data'][0][0]
- self.logDebug('Direct Link: ' + direct_link)
+ self.logDebug("Direct Link: " + direct_link)
self.download(direct_link, disposition=True)
diff --git a/pyload/plugins/hoster/LinksnappyCom.py b/pyload/plugins/hoster/LinksnappyCom.py
index e7cc61391..54c6c0ecb 100644
--- a/pyload/plugins/hoster/LinksnappyCom.py
+++ b/pyload/plugins/hoster/LinksnappyCom.py
@@ -46,7 +46,7 @@ class LinksnappyCom(Hoster):
j = json_loads(r)['links'][0]
if j['error']:
- self.logError('Error converting the link: %s' % j['error'])
+ self.logError("Error converting the link: %s" % j['error'])
self.fail('Error converting the link')
pyfile.name = j['filename']
diff --git a/pyload/plugins/hoster/LomafileCom.py b/pyload/plugins/hoster/LomafileCom.py
index 942afa1f4..3b75a79ab 100644
--- a/pyload/plugins/hoster/LomafileCom.py
+++ b/pyload/plugins/hoster/LomafileCom.py
@@ -22,7 +22,7 @@ class LomafileCom(SimpleHoster):
def handleFree(self):
- for _ in range(3):
+ for _ in xrange(3):
captcha_id = re.search(r'src="http://lomafile\.com/captchas/(?P<id>\w+)\.jpg"', self.html)
if not captcha_id:
self.parseError("Unable to parse captcha id.")
diff --git a/pyload/plugins/hoster/LuckyShareNet.py b/pyload/plugins/hoster/LuckyShareNet.py
index 5cb15d49e..14eacae98 100644
--- a/pyload/plugins/hoster/LuckyShareNet.py
+++ b/pyload/plugins/hoster/LuckyShareNet.py
@@ -30,7 +30,7 @@ class LuckyShareNet(SimpleHoster):
m = re.search(r"waitingtime = (\d+);", html)
if m:
waittime = int(m.group(1))
- self.logDebug('You have to wait %d seconds between free downloads' % waittime)
+ self.logDebug("You have to wait %d seconds between free downloads" % waittime)
self.retry(wait_time=waittime)
else:
self.parseError('Unable to detect wait time between free downloads')
@@ -42,9 +42,9 @@ class LuckyShareNet(SimpleHoster):
# TODO: Some files could not be downloaded in free mode
def handleFree(self):
file_id = re.match(self.__pattern__, self.pyfile.url).group('ID')
- self.logDebug('File ID: ' + file_id)
+ self.logDebug("File ID: " + file_id)
rep = self.load(r"http://luckyshare.net/download/request/type/time/file/" + file_id, decode=True)
- self.logDebug('JSON: ' + rep)
+ self.logDebug("JSON: " + rep)
json = self.parseJson(rep)
self.wait(int(json['time']))
@@ -54,13 +54,13 @@ class LuckyShareNet(SimpleHoster):
challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
rep = self.load(r"http://luckyshare.net/download/verify/challenge/%s/response/%s/hash/%s" %
(challenge, response, json['hash']), decode=True)
- self.logDebug('JSON: ' + rep)
+ self.logDebug("JSON: " + rep)
if 'link' in rep:
json.update(self.parseJson(rep))
self.correctCaptcha()
break
elif 'Verification failed' in rep:
- self.logInfo('Wrong captcha')
+ self.logInfo("Wrong captcha")
self.invalidCaptcha()
else:
self.parseError('Unable to get downlaod link')
@@ -68,7 +68,7 @@ class LuckyShareNet(SimpleHoster):
if not json['link']:
self.fail("No Download url retrieved/all captcha attempts failed")
- self.logDebug('Direct URL: ' + json['link'])
+ self.logDebug("Direct URL: " + json['link'])
self.download(json['link'])
diff --git a/pyload/plugins/hoster/MediafireCom.py b/pyload/plugins/hoster/MediafireCom.py
index bbf9f06b6..52382e6e6 100644
--- a/pyload/plugins/hoster/MediafireCom.py
+++ b/pyload/plugins/hoster/MediafireCom.py
@@ -74,7 +74,7 @@ class MediafireCom(SimpleHoster):
pyfile.url = re.sub(r'/view/?\?', '/?', pyfile.url)
self.url, result = checkHTMLHeader(pyfile.url)
- self.logDebug('Location (%d): %s' % (result, self.url))
+ self.logDebug("Location (%d): %s" % (result, self.url))
if result == 0:
self.html = self.load(self.url, decode=True)
diff --git a/pyload/plugins/hoster/MegareleaseOrg.py b/pyload/plugins/hoster/MegareleaseOrg.py
index 6a689b6dd..adfe68026 100644
--- a/pyload/plugins/hoster/MegareleaseOrg.py
+++ b/pyload/plugins/hoster/MegareleaseOrg.py
@@ -8,12 +8,13 @@ class MegareleaseOrg(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.01"
- __pattern__ = r'https?://(?:www\.)?megarelease.org/\w{12}'
+ __pattern__ = r'https?://(?:www\.)?megarelease\.org/\w{12}'
__description__ = """Megarelease.org hoster plugin"""
__author_name__ = ("derek3x", "stickell")
__author_mail__ = ("derek3x@vmail.me", "l.stickell@yahoo.it")
+
HOSTER_NAME = "megarelease.org"
FILE_INFO_PATTERN = r'<font color="red">%s/(?P<N>.+)</font> \((?P<S>[^)]+)\)</font>' % __pattern__
diff --git a/pyload/plugins/hoster/MovReelCom.py b/pyload/plugins/hoster/MovReelCom.py
index 3f97d3fca..8a754f6c8 100644
--- a/pyload/plugins/hoster/MovReelCom.py
+++ b/pyload/plugins/hoster/MovReelCom.py
@@ -8,12 +8,13 @@ class MovReelCom(XFileSharingPro):
__type__ = "hoster"
__version__ = "1.20"
- __pattern__ = r'http://(?:www\.)?movreel.com/.*'
+ __pattern__ = r'http://(?:www\.)?movreel\.com/\w{12}'
__description__ = """MovReel.com hoster plugin"""
__author_name__ = "JorisV83"
__author_mail__ = "jorisv83-pyload@yahoo.com"
+
HOSTER_NAME = "movreel.com"
FILE_INFO_PATTERN = r'<h3>(?P<N>.+?) <small><sup>(?P<S>[\d.]+) (?P<U>..)</sup> </small></h3>'
diff --git a/pyload/plugins/hoster/NarodRu.py b/pyload/plugins/hoster/NarodRu.py
index 22c0ba908..6fa16362d 100644
--- a/pyload/plugins/hoster/NarodRu.py
+++ b/pyload/plugins/hoster/NarodRu.py
@@ -53,7 +53,7 @@ class NarodRu(SimpleHoster):
else:
self.fail("No valid captcha code entered")
- self.logDebug('Download link: ' + url)
+ self.logDebug("Download link: " + url)
self.download(url)
diff --git a/pyload/plugins/hoster/NosuploadCom.py b/pyload/plugins/hoster/NosuploadCom.py
index e4feabdd0..83e018355 100644
--- a/pyload/plugins/hoster/NosuploadCom.py
+++ b/pyload/plugins/hoster/NosuploadCom.py
@@ -31,7 +31,7 @@ class NosuploadCom(XFileSharingPro):
# stage2: wait some time and press the "Download File" button
data = self.getPostParameters()
wait_time = re.search(self.WAIT_PATTERN, self.html, re.MULTILINE | re.DOTALL).group(1)
- self.logDebug("hoster told us to wait %s seconds" % wait_time)
+ self.logDebug("Hoster told us to wait %s seconds" % wait_time)
self.wait(wait_time)
self.html = self.load(self.pyfile.url, post=data, ref=True, decode=True)
diff --git a/pyload/plugins/hoster/NovafileCom.py b/pyload/plugins/hoster/NovafileCom.py
index 1346bbde9..8f3f78de1 100644
--- a/pyload/plugins/hoster/NovafileCom.py
+++ b/pyload/plugins/hoster/NovafileCom.py
@@ -18,6 +18,7 @@ class NovafileCom(XFileSharingPro):
__author_name__ = ("zoidberg", "stickell")
__author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it")
+
HOSTER_NAME = "novafile.com"
FILE_SIZE_PATTERN = r'<div class="size">(?P<S>.+?)</div>'
@@ -26,8 +27,4 @@ class NovafileCom(XFileSharingPro):
WAIT_PATTERN = r'<p>Please wait <span id="count"[^>]*>(\d+)</span> seconds</p>'
- def setup(self):
- self.multiDL = False
-
-
getInfo = create_getInfo(NovafileCom)
diff --git a/pyload/plugins/hoster/NowDownloadEu.py b/pyload/plugins/hoster/NowDownloadEu.py
index 6e42a55bb..2b0dca907 100644
--- a/pyload/plugins/hoster/NowDownloadEu.py
+++ b/pyload/plugins/hoster/NowDownloadEu.py
@@ -53,7 +53,7 @@ class NowDownloadEu(SimpleHoster):
url = re.search(self.LINK_PATTERN, self.html)
if url is None:
self.fail('Download Link not Found (Plugin out of Date?)')
- self.logDebug('Download link: ' + str(url.group(1)))
+ self.logDebug("Download link", url.group(1))
self.download(str(url.group(1)))
diff --git a/pyload/plugins/hoster/OneFichierCom.py b/pyload/plugins/hoster/OneFichierCom.py
index 8fdecb342..f7f42e463 100644
--- a/pyload/plugins/hoster/OneFichierCom.py
+++ b/pyload/plugins/hoster/OneFichierCom.py
@@ -40,7 +40,7 @@ class OneFichierCom(SimpleHoster):
self.html = self.load(self.pyfile.url, decode=True)
if self.WAITING_PATTERN in self.html:
- self.logInfo('You have to wait been each free download! Retrying in %d seconds.' % self.WAIT_TIME)
+ self.logInfo("You have to wait been each free download! Retrying in %d seconds." % self.WAIT_TIME)
self.waitAndRetry(self.WAIT_TIME)
else: # detect parallel download
m = re.search(self.NOT_PARALLEL, self.html)
diff --git a/pyload/plugins/hoster/PremiumTo.py b/pyload/plugins/hoster/PremiumTo.py
index 33df2e7bc..c0a2868d9 100644
--- a/pyload/plugins/hoster/PremiumTo.py
+++ b/pyload/plugins/hoster/PremiumTo.py
@@ -11,9 +11,9 @@ from pyload.utils import fs_encode
class PremiumTo(Hoster):
__name__ = "PremiumTo"
__type__ = "hoster"
- __version__ = "0.09"
+ __version__ = "0.10"
- __pattern__ = r'https?://(?:www\.)?premium.to/.*'
+ __pattern__ = r'https?://(?:www\.)?premium\.to/.+'
__description__ = """Premium.to hoster plugin"""
__author_name__ = ("RaNaN", "zoidberg", "stickell")
@@ -24,6 +24,7 @@ class PremiumTo(Hoster):
self.resumeDownload = True
self.chunkLimit = 1
+
def process(self, pyfile):
if not self.account:
self.logError(_("Please enter your %s account or deactivate this plugin") % "premium.to")
@@ -37,8 +38,7 @@ class PremiumTo(Hoster):
self.req.setOption("timeout", 120)
self.download(
- "http://premium.to/api/getfile.php",
- get={"username": self.account.username, "password": self.account.password, "link": quote(pyfile.url, "")},
+ "http://premium.to/api/getfile.php?username=%s&password=%s&link=%s" % (self.account.username, self.account.password, quote(pyfile.url, "")),
disposition=True)
check = self.checkDownload({"nopremium": "No premium account available"})
@@ -65,6 +65,7 @@ class PremiumTo(Hoster):
if err:
self.fail(err)
+
def getTraffic(self):
try:
api_r = self.load("http://premium.to/api/straffic.php",
diff --git a/pyload/plugins/hoster/PromptfileCom.py b/pyload/plugins/hoster/PromptfileCom.py
index 108f470d2..4d2ac8ad6 100644
--- a/pyload/plugins/hoster/PromptfileCom.py
+++ b/pyload/plugins/hoster/PromptfileCom.py
@@ -29,7 +29,7 @@ class PromptfileCom(SimpleHoster):
if m is None:
self.parseError("Unable to detect chash")
chash = m.group(1)
- self.logDebug("read chash %s" % chash)
+ self.logDebug("Read chash %s" % chash)
# continue to stage2
self.html = self.load(self.pyfile.url, decode=True, post={'chash': chash})
@@ -38,7 +38,7 @@ class PromptfileCom(SimpleHoster):
if m is None:
self.parseError("Unable to detect direct link")
direct = m.group(1)
- self.logDebug("found direct link: " + direct)
+ self.logDebug("Found direct link: " + direct)
self.download(direct, disposition=True)
diff --git a/pyload/plugins/hoster/QuickshareCz.py b/pyload/plugins/hoster/QuickshareCz.py
index d82c64888..4082fab44 100644
--- a/pyload/plugins/hoster/QuickshareCz.py
+++ b/pyload/plugins/hoster/QuickshareCz.py
@@ -36,11 +36,11 @@ class QuickshareCz(SimpleHoster):
if self.premium:
if 'UU_prihlasen' in self.jsvars:
if self.jsvars['UU_prihlasen'] == '0':
- self.logWarning('User not logged in')
+ self.logWarning("User not logged in")
self.relogin(self.user)
self.retry()
elif float(self.jsvars['UU_kredit']) < float(self.jsvars['kredit_odecet']):
- self.logWarning('Not enough credit left')
+ self.logWarning("Not enough credit left")
self.premium = False
if self.premium:
diff --git a/pyload/plugins/hoster/RapidgatorNet.py b/pyload/plugins/hoster/RapidgatorNet.py
index cc13f7097..a92424cac 100644
--- a/pyload/plugins/hoster/RapidgatorNet.py
+++ b/pyload/plugins/hoster/RapidgatorNet.py
@@ -59,12 +59,12 @@ class RapidgatorNet(SimpleHoster):
json = self.load('%s/%s' % (self.API_URL, cmd),
get={'sid': self.sid,
'url': self.pyfile.url}, decode=True)
- self.logDebug('API:%s' % cmd, json, "SID: %s" % self.sid)
+ self.logDebug("API:%s" % cmd, json, "SID: %s" % self.sid)
json = json_loads(json)
status = json['response_status']
msg = json['response_details']
except BadHeader, e:
- self.logError('API:%s' % cmd, e, "SID: %s" % self.sid)
+ self.logError("API:%s" % cmd, e, "SID: %s" % self.sid)
status = e.code
msg = e
diff --git a/pyload/plugins/hoster/RarefileNet.py b/pyload/plugins/hoster/RarefileNet.py
index 7c6632aac..7082dc19e 100644
--- a/pyload/plugins/hoster/RarefileNet.py
+++ b/pyload/plugins/hoster/RarefileNet.py
@@ -11,21 +11,20 @@ class RarefileNet(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.03"
- __pattern__ = r'http://(?:www\.)?rarefile.net/\w{12}'
+ __pattern__ = r'http://(?:www\.)?rarefile\.net/\w{12}'
__description__ = """Rarefile.net hoster plugin"""
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
+
HOSTER_NAME = "rarefile.net"
FILE_NAME_PATTERN = r'<td><font color="red">(?P<N>.*?)</font></td>'
FILE_SIZE_PATTERN = r'<td>Size : (?P<S>.+?)&nbsp;'
- LINK_PATTERN = r'<a href="(?P<link>[^"]+)">(?P=link)</a>'
+ LINK_PATTERN = r'<a href="(?P<link>[^"]+)">(?P=link)</a>'
- def setup(self):
- self.resumeDownload = self.multiDL = self.premium
def handleCaptcha(self, inputs):
captcha_div = re.search(r'<b>Enter code.*?<div.*?>(.*?)</div>', self.html, re.S).group(1)
diff --git a/pyload/plugins/hoster/SecureUploadEu.py b/pyload/plugins/hoster/SecureUploadEu.py
index befe5f0e9..0edc1860d 100644
--- a/pyload/plugins/hoster/SecureUploadEu.py
+++ b/pyload/plugins/hoster/SecureUploadEu.py
@@ -6,14 +6,15 @@ from pyload.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInf
class SecureUploadEu(XFileSharingPro):
__name__ = "SecureUploadEu"
__type__ = "hoster"
- __version__ = "0.01"
+ __version__ = "0.02"
- __pattern__ = r'http://(?:www\.)?secureupload\.eu/(\w){12}(/\w+)'
+ __pattern__ = r'https?://(?:www\.)?secureupload\.eu/\w{12}'
__description__ = """SecureUpload.eu hoster plugin"""
__author_name__ = "z00nx"
__author_mail__ = "z00nx0@gmail.com"
+
HOSTER_NAME = "secureupload.eu"
FILE_INFO_PATTERN = r'<h3>Downloading (?P<N>[^<]+) \((?P<S>[^<]+)\)</h3>'
diff --git a/pyload/plugins/hoster/SendmywayCom.py b/pyload/plugins/hoster/SendmywayCom.py
index 87cbfcc0d..0306206ca 100644
--- a/pyload/plugins/hoster/SendmywayCom.py
+++ b/pyload/plugins/hoster/SendmywayCom.py
@@ -8,12 +8,13 @@ class SendmywayCom(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.01"
- __pattern__ = r'http://(?:www\.)?sendmyway.com/\w{12}'
+ __pattern__ = r'http://(?:www\.)?sendmyway\.com/\w{12}'
__description__ = """SendMyWay hoster plugin"""
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
+
HOSTER_NAME = "sendmyway.com"
FILE_NAME_PATTERN = r'<p class="file-name" ><.*?>\s*(?P<N>.+)'
diff --git a/pyload/plugins/hoster/ShareRapidCom.py b/pyload/plugins/hoster/ShareRapidCom.py
index b474103fc..d89be1ec4 100644
--- a/pyload/plugins/hoster/ShareRapidCom.py
+++ b/pyload/plugins/hoster/ShareRapidCom.py
@@ -34,7 +34,7 @@ class ShareRapidCom(SimpleHoster):
FILE_SIZE_PATTERN = r'<td class="i">Velikost:</td>\s*<td class="h"><strong>\s*(?P<S>[0-9.]+) (?P<U>[kKMG])i?B</strong></td>'
OFFLINE_PATTERN = ur'Nastala chyba 404|Soubor byl smazán'
- SH_CHECK_TRAFFIC = True
+ FORCE_CHECK_TRAFFIC = True
LINK_PATTERN = r'<a href="([^"]+)" title="Stahnout">([^<]+)</a>'
ERR_LOGIN_PATTERN = ur'<div class="error_div"><strong>Stahování je přístupné pouze přihlášenÃœm uÅŸivatelům'
diff --git a/pyload/plugins/hoster/SpeedyshareCom.py b/pyload/plugins/hoster/SpeedyshareCom.py
index ed6fc443f..42bb3e453 100644
--- a/pyload/plugins/hoster/SpeedyshareCom.py
+++ b/pyload/plugins/hoster/SpeedyshareCom.py
@@ -1,43 +1,51 @@
# -*- coding: utf-8 -*-
-
-# Testlink:
+#
+# Test links:
# http://speedy.sh/ep2qY/Zapp-Brannigan.jpg
import re
+from urlparse import urljoin
+
from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class SpeedyshareCom(SimpleHoster):
__name__ = "SpeedyshareCom"
__type__ = "hoster"
- __pattern__ = r"https?://(www\.)?(speedyshare.com|speedy.sh)/.*"
- __version__ = "0.01"
- __description__ = """speedyshare.com hoster plugin"""
- __author_name__ = ("zapp-brannigan")
- __author_mail__ = ("fuerst.reinje@web.de")
+ __version__ = "0.02"
+
+ __pattern__ = r"https?://(www\.)?(speedyshare\.com|speedy\.sh)/\w+"
+
+ __description__ = """Speedyshare.com hoster plugin"""
+ __author_name__ = "zapp-brannigan"
+ __author_mail__ = "fuerst.reinje@web.de"
+
FILE_NAME_PATTERN = r'class=downloadfilename>(?P<N>.*)</span></td>'
FILE_SIZE_PATTERN = r'class=sizetagtext>(?P<S>.*) (?P<U>[kKmM]?[iI]?[bB]?)</div>'
- LINK_PATTERN = r'<a href=\'(.*)\'><img src=/gf/slowdownload.png alt=\'Slow Download\' border=0'
+
FILE_OFFLINE_PATTERN = r'class=downloadfilenamenotfound>.*</span>'
- BASE_URL = 'www.speedyshare.com'
+
+ LINK_PATTERN = r'<a href=\'(.*)\'><img src=/gf/slowdownload.png alt=\'Slow Download\' border=0'
+
def setup(self):
self.multiDL = False
self.chunkLimit = 1
- def process(self, pyfile):
- self.html = self.load(pyfile.url, decode=True)
- try:
- dl_link = re.search(self.LINK_PATTERN, self.html).group(1)
- self.logDebug("Link: " + dl_link)
- except:
- self.parseError("Unable to find download link")
- self.download(self.BASE_URL + dl_link, disposition=True)
- check = self.checkDownload({"is_html": re.compile("html")})
+
+ def handleFree(self):
+ m = re.search(self.LINK_PATTERN, self.html)
+ if m is None:
+ self.parseError("Download link not found")
+
+ dl_link = urljoin("http://www.speedyshare.com", m.group(1))
+ self.download(dl_link, disposition=True)
+
+ check = self.checkDownload({'is_html': re.compile("html")})
if check == "is_html":
- self.fail("The downloaded file is html, maybe the plugin is out of date")
+ self.parseError("Downloaded file is an html file")
getInfo = create_getInfo(SpeedyshareCom)
diff --git a/pyload/plugins/hoster/StreamcloudEu.py b/pyload/plugins/hoster/StreamcloudEu.py
index 0e36a047c..875766fd7 100644
--- a/pyload/plugins/hoster/StreamcloudEu.py
+++ b/pyload/plugins/hoster/StreamcloudEu.py
@@ -13,20 +13,23 @@ class StreamcloudEu(XFileSharingPro):
__type__ = "hoster"
__version__ = "0.04"
- __pattern__ = r'http://(?:www\.)?streamcloud\.eu/\S+'
+ __pattern__ = r'http://(?:www\.)?streamcloud\.eu/\w{12}'
__description__ = """Streamcloud.eu hoster plugin"""
__author_name__ = "seoester"
__author_mail__ = "seoester@googlemail.com"
+
HOSTER_NAME = "streamcloud.eu"
LINK_PATTERN = r'file: "(http://(stor|cdn)\d+\.streamcloud.eu:?\d*/.*/video\.(mp4|flv))",'
def setup(self):
- super(StreamcloudEu, self).setup()
self.multiDL = True
+ self.chunkLimit = 1
+ self.resumeDownload = self.premium
+
def getDownloadLink(self):
m = re.search(self.LINK_PATTERN, self.html, re.S)
@@ -58,6 +61,7 @@ class StreamcloudEu(XFileSharingPro):
return m.group(1)
+
def getPostParameters(self):
for i in xrange(3):
if not self.errmsg:
diff --git a/pyload/plugins/hoster/TurbobitNet.py b/pyload/plugins/hoster/TurbobitNet.py
index 1fbdf9e87..9a6b26c97 100644
--- a/pyload/plugins/hoster/TurbobitNet.py
+++ b/pyload/plugins/hoster/TurbobitNet.py
@@ -17,31 +17,32 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, t
class TurbobitNet(SimpleHoster):
__name__ = "TurbobitNet"
__type__ = "hoster"
- __version__ = "0.11"
+ __version__ = "0.12"
- __pattern__ = r'http://(?:www\.)?(turbobit.net|unextfiles.com)/(?!download/folder/)(?:download/free/)?(?P<ID>\w+).*'
+ __pattern__ = r'http://(?:www\.)?turbobit\.net/(?:download/free/)?(?P<ID>\w+)'
- __description__ = """Turbobit.net plugin"""
- __author_name__ = "zoidberg"
- __author_mail__ = "zoidberg@mujmail.cz"
+ __description__ = """ Turbobit.net hoster plugin """
+ __author_name__ = ("zoidberg", "prOq")
+ __author_mail__ = ("zoidberg@mujmail.cz", None)
- FILE_INFO_PATTERN = r"<span class='file-icon1[^>]*>(?P<N>[^<]+)</span>\s*\((?P<S>[^\)]+)\)\s*</h1>" #: long filenames are shortened
- FILE_NAME_PATTERN = r'<meta name="keywords" content="\s+(?P<N>[^,]+)' #: full name but missing on page2
+
+ FILE_NAME_PATTERN = r'id="file-title">(?P<N>.+?)<'
+ FILE_SIZE_PATTERN = r'class="file-size">(?P<S>[\d,.]+) (?P<U>\w+)'
OFFLINE_PATTERN = r'<h2>File Not Found</h2>|html\(\'File (?:was )?not found'
- FILE_URL_REPLACEMENTS = [(r"http://(?:www\.)?(turbobit.net|unextfiles.com)/(?:download/free/)?(?P<ID>\w+).*",
- "http://turbobit.net/\g<ID>.html")]
- SH_COOKIES = [(".turbobit.net", "user_lang", "en")]
+ FILE_URL_REPLACEMENTS = [(__pattern__, "http://turbobit.net/\g<ID>.html")]
+
+ COOKIES = [(".turbobit.net", "user_lang", "en")]
LINK_PATTERN = r'(?P<url>/download/redirect/[^"\']+)'
- LIMIT_WAIT_PATTERN = r'<div id="time-limit-text">\s*.*?<span id=\'timeout\'>(\d+)</span>'
+ LIMIT_WAIT_PATTERN = r"<div id='timeout'>(\d+)<"
CAPTCHA_KEY_PATTERN = r'src="http://api\.recaptcha\.net/challenge\?k=([^"]+)"'
- CAPTCHA_SRC_PATTERN = r'<img alt="Captcha" src="(.*?)"'
+ CAPTCHA_SRC_PATTERN = r'<img alt="Captcha" src="(.+?)"'
def handleFree(self):
self.url = "http://turbobit.net/download/free/%s" % self.file_info['ID']
- self.html = self.load(self.url)
+ self.html = self.load(self.url, ref=True, decode=True)
rtUpdate = self.getRtUpdate()
@@ -54,6 +55,7 @@ class TurbobitNet(SimpleHoster):
self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With:"])
self.downloadFile()
+
def solveCaptcha(self):
for _ in xrange(5):
m = re.search(self.LIMIT_WAIT_PATTERN, self.html)
@@ -83,7 +85,8 @@ class TurbobitNet(SimpleHoster):
self.logDebug(inputs)
self.html = self.load(self.url, post=inputs)
- if not "<div class='download-timer-header'>" in self.html:
+ if '<div class="captcha-error">Incorrect, try again!<' in self.html:
+ self.logInfo("Invalid captcha")
self.invalidCaptcha()
else:
self.correctCaptcha()
@@ -91,11 +94,12 @@ class TurbobitNet(SimpleHoster):
else:
self.fail("Invalid captcha")
+
def getRtUpdate(self):
rtUpdate = self.getStorage("rtUpdate")
if not rtUpdate:
- if self.getStorage("version") != self.__version__ or int(
- self.getStorage("timestamp", 0)) + 86400000 < timestamp():
+ if self.getStorage("version") != self.__version__ \
+ or int(self.getStorage("timestamp", 0)) + 86400000 < timestamp():
# that's right, we are even using jdownloader updates
rtUpdate = getURL("http://update0.jdownloader.org/pluginstuff/tbupdate.js")
rtUpdate = self.decrypt(rtUpdate.splitlines()[1])
@@ -114,19 +118,23 @@ class TurbobitNet(SimpleHoster):
return rtUpdate
+
def getDownloadUrl(self, rtUpdate):
self.req.http.lastURL = self.url
m = re.search("(/\w+/timeout\.js\?\w+=)([^\"\'<>]+)", self.html)
- url = "http://turbobit.net%s%s" % (m.groups() if m else (
- '/files/timeout.js?ver=', ''.join(random.choice('0123456789ABCDEF') for _ in xrange(32))))
+ if m:
+ url = "http://turbobit.net%s%s" % m.groups()
+ else:
+ url = "http://turbobit.net/files/timeout.js?ver=%s" % "".join(random.choice('0123456789ABCDEF') for _ in xrange(32))
+
fun = self.load(url)
self.setWait(65, False)
for b in [1, 3]:
self.jscode = "var id = \'%s\';var b = %d;var inn = \'%s\';%sout" % (
- self.file_info['ID'], b, quote(fun), rtUpdate)
+ self.file_info['ID'], b, quote(fun), rtUpdate)
try:
out = self.js.eval(self.jscode)
@@ -141,26 +149,29 @@ class TurbobitNet(SimpleHoster):
self.delStorage("rtUpdate")
self.retry()
+
def decrypt(self, data):
cipher = ARC4.new(hexlify('E\x15\xa1\x9e\xa3M\xa0\xc6\xa0\x84\xb6H\x83\xa8o\xa0'))
return unhexlify(cipher.encrypt(unhexlify(data)))
+
def getLocalTimeString(self):
lt = time.localtime()
tz = time.altzone if lt.tm_isdst else time.timezone
return "%s GMT%+03d%02d" % (time.strftime("%a %b %d %Y %H:%M:%S", lt), -tz // 3600, tz % 3600)
+
def handlePremium(self):
self.logDebug("Premium download as user %s" % self.user)
self.html = self.load(self.pyfile.url) # Useless in 0.5
self.downloadFile()
+
def downloadFile(self):
m = re.search(self.LINK_PATTERN, self.html)
if m is None:
- self.parseError("download link")
+ self.parseError("Download link not found")
self.url = "http://turbobit.net" + m.group('url')
- self.logDebug(self.url)
self.download(self.url)
diff --git a/pyload/plugins/hoster/TusfilesNet.py b/pyload/plugins/hoster/TusfilesNet.py
index 0e01ec805..bbed62a6a 100644
--- a/pyload/plugins/hoster/TusfilesNet.py
+++ b/pyload/plugins/hoster/TusfilesNet.py
@@ -6,21 +6,20 @@ from pyload.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInf
class TusfilesNet(XFileSharingPro):
__name__ = "TusfilesNet"
__type__ = "hoster"
- __version__ = "0.03"
+ __version__ = "0.04"
- __pattern__ = r'https?://(?:www\.)?tusfiles\.net/(?P<ID>\w+)'
+ __pattern__ = r'https?://(?:www\.)?tusfiles\.net/\w{12}'
__description__ = """Tusfiles.net hoster plugin"""
- __author_name__ = "Walter Purcaro"
- __author_mail__ = "vuolter@gmail.com"
+ __author_name__ = ("Walter Purcaro", "guidobelix")
+ __author_mail__ = ("vuolter@gmail.com", "guidobelix@hotmail.it")
+
HOSTER_NAME = "tusfiles.net"
FILE_INFO_PATTERN = r'\](?P<N>.+) - (?P<S>[\d.]+) (?P<U>\w+)\['
OFFLINE_PATTERN = r'>File Not Found|<Title>TusFiles - Fast Sharing Files!'
- SH_COOKIES = [(".tusfiles.net", "lang", "english")]
-
def setup(self):
self.multiDL = False
@@ -28,4 +27,8 @@ class TusfilesNet(XFileSharingPro):
self.resumeDownload = True
+ def handlePremium(self):
+ return self.handleFree()
+
+
getInfo = create_getInfo(TusfilesNet)
diff --git a/pyload/plugins/hoster/UlozTo.py b/pyload/plugins/hoster/UlozTo.py
index c3957aaa0..b33c5dd5f 100644
--- a/pyload/plugins/hoster/UlozTo.py
+++ b/pyload/plugins/hoster/UlozTo.py
@@ -85,14 +85,14 @@ class UlozTo(SimpleHoster):
if not action or not inputs:
self.parseError("free download form")
- self.logDebug('inputs.keys() = ' + str(inputs.keys()))
+ self.logDebug("inputs.keys = " + str(inputs.keys()))
# get and decrypt captcha
if all(key in inputs for key in ("captcha_value", "captcha_id", "captcha_key")):
# Old version - last seen 9.12.2013
self.logDebug('Using "old" version')
captcha_value = self.decryptCaptcha("http://img.uloz.to/captcha/%s.png" % inputs['captcha_id'])
- self.logDebug('CAPTCHA ID: ' + inputs['captcha_id'] + ", CAPTCHA VALUE: " + captcha_value)
+ self.logDebug("CAPTCHA ID: " + inputs['captcha_id'] + ", CAPTCHA VALUE: " + captcha_value)
inputs.update({'captcha_id': inputs['captcha_id'], 'captcha_key': inputs['captcha_key'], 'captcha_value': captcha_value})
@@ -101,11 +101,11 @@ class UlozTo(SimpleHoster):
self.logDebug('Using "new" version')
xapca = self.load("http://www.ulozto.net/reloadXapca.php", get={"rnd": str(int(time.time()))})
- self.logDebug('xapca = ' + str(xapca))
+ self.logDebug("xapca = " + str(xapca))
data = json_loads(xapca)
captcha_value = self.decryptCaptcha(str(data['image']))
- self.logDebug("CAPTCHA HASH: " + data['hash'] + ", CAPTCHA SALT: " + str(data['salt']) + ", CAPTCHA VALUE: " + captcha_value)
+ self.logDebug("CAPTCHA HASH: " + data['hash'], "CAPTCHA SALT: " + str(data['salt']), "CAPTCHA VALUE: " + captcha_value)
inputs.update({'timestamp': data['timestamp'], 'salt': data['salt'], 'hash': data['hash'], 'captcha_value': captcha_value})
else:
diff --git a/pyload/plugins/hoster/UloziskoSk.py b/pyload/plugins/hoster/UloziskoSk.py
index f78a6e29a..5bfb2fc77 100644
--- a/pyload/plugins/hoster/UloziskoSk.py
+++ b/pyload/plugins/hoster/UloziskoSk.py
@@ -48,7 +48,7 @@ class UloziskoSk(SimpleHoster):
self.parseError('ID')
id = m.group(1)
- self.logDebug('URL:' + parsed_url + ' ID:' + id)
+ self.logDebug("URL:" + parsed_url + ' ID:' + id)
m = re.search(self.CAPTCHA_PATTERN, self.html)
if m is None:
@@ -57,7 +57,7 @@ class UloziskoSk(SimpleHoster):
captcha = self.decryptCaptcha(captcha_url, cookies=True)
- self.logDebug('CAPTCHA_URL:' + captcha_url + ' CAPTCHA:' + captcha)
+ self.logDebug("CAPTCHA_URL:" + captcha_url + ' CAPTCHA:' + captcha)
self.download(parsed_url, post={
"antispam": captcha,
diff --git a/pyload/plugins/hoster/UnibytesCom.py b/pyload/plugins/hoster/UnibytesCom.py
index 1541265d9..6adfdbae2 100644
--- a/pyload/plugins/hoster/UnibytesCom.py
+++ b/pyload/plugins/hoster/UnibytesCom.py
@@ -63,7 +63,7 @@ class UnibytesCom(SimpleHoster):
else:
self.fail("No valid captcha code entered")
- self.logDebug('Download link: ' + url)
+ self.logDebug("Download link: " + url)
self.req.http.c.setopt(FOLLOWLOCATION, 1)
self.download(url)
diff --git a/pyload/plugins/hoster/UploadedTo.py b/pyload/plugins/hoster/UploadedTo.py
index db620eea6..694a053eb 100644
--- a/pyload/plugins/hoster/UploadedTo.py
+++ b/pyload/plugins/hoster/UploadedTo.py
@@ -205,7 +205,7 @@ class UploadedTo(Hoster):
self.wait()
result = self.load(url, post=options)
- self.logDebug("result: %s" % result)
+ self.logDebug("Result: %s" % result)
if "limit-size" in result:
self.fail("File too big for free download")
@@ -220,7 +220,7 @@ class UploadedTo(Hoster):
self.wait()
self.retry()
elif '"err":"captcha"' in result:
- self.logError("ul.net captcha is disabled")
+ self.logError("captcha is disabled")
self.invalidCaptcha()
elif "type:'download'" in result:
self.correctCaptcha()
diff --git a/pyload/plugins/hoster/UploadheroCom.py b/pyload/plugins/hoster/UploadheroCom.py
index f1f893c30..63155a23e 100644
--- a/pyload/plugins/hoster/UploadheroCom.py
+++ b/pyload/plugins/hoster/UploadheroCom.py
@@ -23,7 +23,7 @@ class UploadheroCom(SimpleHoster):
FILE_SIZE_PATTERN = r'Taille du fichier : </span><strong>(?P<S>.*?)</strong>'
OFFLINE_PATTERN = r'<p class="titre_dl_2">|<div class="raison"><strong>Le lien du fichier ci-dessus n\'existe plus.'
- SH_COOKIES = [(".uploadhero.co", "lang", "en")]
+ COOKIES = [(".uploadhero.co", "lang", "en")]
IP_BLOCKED_PATTERN = r'href="(/lightbox_block_download.php\?min=.*?)"'
IP_WAIT_PATTERN = r'<span id="minutes">(\d+)</span>.*\s*<span id="seconds">(\d+)</span>'
diff --git a/pyload/plugins/hoster/UpstoreNet.py b/pyload/plugins/hoster/UpstoreNet.py
index bd084612c..d812d292d 100644
--- a/pyload/plugins/hoster/UpstoreNet.py
+++ b/pyload/plugins/hoster/UpstoreNet.py
@@ -31,7 +31,7 @@ class UpstoreNet(SimpleHoster):
if m is None:
self.parseError("could not detect hash")
chash = m.group(1)
- self.logDebug("read hash " + chash)
+ self.logDebug("Read hash " + chash)
# continue to stage2
post_data = {'hash': chash, 'free': 'Slow download'}
self.html = self.load(self.pyfile.url, post=post_data, decode=True)
@@ -41,7 +41,7 @@ class UpstoreNet(SimpleHoster):
recaptcha = ReCaptcha(self)
if not recaptcha.detect_key(self.html):
self.parseError("could not find recaptcha pattern")
- self.logDebug("using captcha key " + recaptcha.recaptcha_key)
+ self.logDebug("Using captcha key " + recaptcha.recaptcha_key)
# try the captcha 5 times
for i in xrange(5):
m = re.search(self.WAIT_PATTERN, self.html)
@@ -68,7 +68,7 @@ class UpstoreNet(SimpleHoster):
self.parseError("could not detect direct link")
direct = m.group(1)
- self.logDebug('found direct link: ' + direct)
+ self.logDebug("Found direct link: " + direct)
self.download(direct, disposition=True)
diff --git a/pyload/plugins/hoster/UptoboxCom.py b/pyload/plugins/hoster/UptoboxCom.py
index 8fd5e6fa7..2786deb5a 100644
--- a/pyload/plugins/hoster/UptoboxCom.py
+++ b/pyload/plugins/hoster/UptoboxCom.py
@@ -2,68 +2,35 @@
import re
-from urllib import unquote
-
from pyload.plugins.hoster.XFileSharingPro import XFileSharingPro, create_getInfo
-from pyload.plugins.internal.CaptchaService import ReCaptcha, SolveMedia
-from pyload.utils import html_unescape
class UptoboxCom(XFileSharingPro):
__name__ = "UptoboxCom"
__type__ = "hoster"
- __version__ = "0.09"
+ __version__ = "0.10"
- __pattern__ = r'https?://(?:www\.)?uptobox\.com/\w+'
+ __pattern__ = r'https?://(?:www\.)?uptobox\.com/\w{12}'
__description__ = """Uptobox.com hoster plugin"""
__author_name__ = "Walter Purcaro"
__author_mail__ = "vuolter@gmail.com"
+
HOSTER_NAME = "uptobox.com"
- FILE_INFO_PATTERN = r'"para_title">(?P<N>.+) \((?P<S>[\d\.]+) (?P<U>\w+)\)'
+ FILE_INFO_PATTERN = r'"para_title">(?P<N>.+) \((?P<S>[\d.]+) (?P<U>\w+)\)'
OFFLINE_PATTERN = r'>(File not found|Access Denied|404 Not Found)'
TEMP_OFFLINE_PATTERN = r'>This server is in maintenance mode'
WAIT_PATTERN = r'>(\d+)</span> seconds<'
-
LINK_PATTERN = r'"(https?://\w+\.uptobox\.com/d/.*?)"'
- def handleCaptcha(self, inputs):
- m = re.search(self.SOLVEMEDIA_PATTERN, self.html)
- if m:
- captcha_key = m.group(1)
- captcha = SolveMedia(self)
- inputs['adcopy_challenge'], inputs['adcopy_response'] = captcha.challenge(captcha_key)
- return 4
- else:
- m = re.search(self.CAPTCHA_URL_PATTERN, self.html)
- if m:
- captcha_url = m.group(1)
- inputs['code'] = self.decryptCaptcha(captcha_url)
- return 2
- else:
- m = re.search(self.CAPTCHA_DIV_PATTERN, self.html, re.DOTALL)
- if m:
- captcha_div = m.group(1)
- self.logDebug(captcha_div)
- numerals = re.findall(r'<span.*?padding-left\s*:\s*(\d+).*?>(\d)</span>',
- html_unescape(captcha_div))
- inputs['code'] = "".join([a[1] for a in sorted(numerals, key=lambda num: int(num[0]))])
- self.logDebug("CAPTCHA", inputs['code'], numerals)
- return 3
- else:
- m = re.search(self.RECAPTCHA_URL_PATTERN, self.html)
- if m:
- recaptcha_key = unquote(m.group(1))
- self.logDebug("RECAPTCHA KEY: %s" % recaptcha_key)
- recaptcha = ReCaptcha(self)
- inputs['recaptcha_challenge_field'], inputs['recaptcha_response_field'] = recaptcha.challenge(
- recaptcha_key)
- return 1
- return 0
+ def setup(self):
+ self.multiDL = True
+ self.chunkLimit = 1
+ self.resumeDownload = True
getInfo = create_getInfo(UptoboxCom)
diff --git a/pyload/plugins/hoster/VeehdCom.py b/pyload/plugins/hoster/VeehdCom.py
index 4d76c3525..8a882a932 100644
--- a/pyload/plugins/hoster/VeehdCom.py
+++ b/pyload/plugins/hoster/VeehdCom.py
@@ -20,7 +20,7 @@ class VeehdCom(Hoster):
def _debug(self, msg):
- self.logDebug('[%s] %s' % (self.__name__, msg))
+ self.logDebug("[%s] %s" % (self.__name__, msg))
def setup(self):
self.multiDL = True
diff --git a/pyload/plugins/hoster/VeohCom.py b/pyload/plugins/hoster/VeohCom.py
index 31b21420a..057db56a3 100644
--- a/pyload/plugins/hoster/VeohCom.py
+++ b/pyload/plugins/hoster/VeohCom.py
@@ -22,7 +22,7 @@ class VeohCom(SimpleHoster):
FILE_URL_REPLACEMENTS = [(__pattern__, r'http://www.veoh.com/watch/\g<ID>')]
- SH_COOKIES = [(".veoh.com", "lassieLocale", "en")]
+ COOKIES = [(".veoh.com", "lassieLocale", "en")]
def setup(self):
diff --git a/pyload/plugins/hoster/VimeoCom.py b/pyload/plugins/hoster/VimeoCom.py
index aebf1c344..d5dab556e 100644
--- a/pyload/plugins/hoster/VimeoCom.py
+++ b/pyload/plugins/hoster/VimeoCom.py
@@ -24,7 +24,7 @@ class VimeoCom(SimpleHoster):
FILE_URL_REPLACEMENTS = [(__pattern__, r'https://www.vimeo.com/\g<ID>')]
- SH_COOKIES = [(".vimeo.com", "language", "en")]
+ COOKIES = [(".vimeo.com", "language", "en")]
def setup(self):
diff --git a/pyload/plugins/hoster/WrzucTo.py b/pyload/plugins/hoster/WrzucTo.py
index b766ea785..17d568f54 100644
--- a/pyload/plugins/hoster/WrzucTo.py
+++ b/pyload/plugins/hoster/WrzucTo.py
@@ -21,7 +21,7 @@ class WrzucTo(SimpleHoster):
FILE_NAME_PATTERN = r'id="file_info">\s*<strong>(?P<N>.*?)</strong>'
FILE_SIZE_PATTERN = r'class="info">\s*<tr>\s*<td>(?P<S>.*?)</td>'
- SH_COOKIES = [(".wrzuc.to", "language", "en")]
+ COOKIES = [(".wrzuc.to", "language", "en")]
def setup(self):
diff --git a/pyload/plugins/hoster/XFileSharingPro.py b/pyload/plugins/hoster/XFileSharingPro.py
index c7733600b..212ef23ef 100644
--- a/pyload/plugins/hoster/XFileSharingPro.py
+++ b/pyload/plugins/hoster/XFileSharingPro.py
@@ -21,22 +21,31 @@ class XFileSharingPro(SimpleHoster):
"""
__name__ = "XFileSharingPro"
__type__ = "hoster"
- __version__ = "0.32"
+ __version__ = "0.36"
__pattern__ = r'^unmatchable$'
__description__ = """XFileSharingPro base hoster plugin"""
- __author_name__ = ("zoidberg", "stickell")
- __author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it")
+ __author_name__ = ("zoidberg", "stickell", "Walter Purcaro")
+ __author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it", "vuolter@gmail.com")
+
+
+ HOSTER_NAME = None
+
+ FILE_URL_REPLACEMENTS = [(r'/embed-(\w{12}).*', r'/\1')] #: support embedded files
+
+ COOKIES = [(HOSTER_NAME, "lang", "english")]
FILE_INFO_PATTERN = r'<tr><td align=right><b>Filename:</b></td><td nowrap>(?P<N>[^<]+)</td></tr>\s*.*?<small>\((?P<S>[^<]+)\)</small>'
FILE_NAME_PATTERN = r'<input type="hidden" name="fname" value="(?P<N>[^"]+)"'
FILE_SIZE_PATTERN = r'You have requested .*\((?P<S>[\d\.\,]+) ?(?P<U>\w+)?\)</font>'
+
OFFLINE_PATTERN = r'>\w+ (Not Found|file (was|has been) removed)'
WAIT_PATTERN = r'<span id="countdown_str">.*?>(\d+)</span>'
OVR_LINK_PATTERN = r'<h2>Download Link</h2>\s*<textarea[^>]*>([^<]+)'
+ LINK_PATTERN = None #: final download url pattern
CAPTCHA_URL_PATTERN = r'(http://[^"\']+?/captchas?/[^"\']+)'
RECAPTCHA_URL_PATTERN = r'http://[^"\']+?recaptcha[^"\']+?\?k=([^"\']+)"'
@@ -47,13 +56,33 @@ class XFileSharingPro(SimpleHoster):
def setup(self):
+ self.chunkLimit = 1
+
if self.__name__ == "XFileSharingPro":
- self.__pattern__ = self.core.pluginManager.hosterPlugins[self.__name__]['pattern']
self.multiDL = True
+ self.__pattern__ = self.core.pluginManager.hosterPlugins[self.__name__]['pattern']
+ self.HOSTER_NAME = re.match(self.__pattern__, self.pyfile.url).group(1).lower()
+ self.COOKIES = [(self.HOSTER_NAME, "lang", "english")]
else:
self.resumeDownload = self.multiDL = self.premium
- self.chunkLimit = 1
+
+ def prepare(self):
+ """ Initialize important variables """
+ if not self.HOSTER_NAME:
+ self.fail("Missing HOSTER_NAME")
+
+ if not self.LINK_PATTERN:
+ pattr = r'(http://([^/]*?%s|\d+\.\d+\.\d+\.\d+)(:\d+)?(/d/|(?:/files)?/\d+/\w+/)[^"\'<]+)'
+ self.LINK_PATTERN = pattr % self.HOSTER_NAME
+
+ if isinstance(self.COOKIES, list):
+ set_cookies(self.req.cj, self.COOKIES)
+
+ self.captcha = None
+ self.errmsg = None
+ self.passwords = self.getPassword().splitlines()
+
def process(self, pyfile):
self.prepare()
@@ -69,8 +98,8 @@ class XFileSharingPro(SimpleHoster):
try:
# Due to a 0.4.9 core bug self.load would use cookies even if
# cookies=False. Workaround using getURL to avoid cookies.
- # Can be reverted in 0.5 as the cookies bug has been fixed.
- self.html = getURL(pyfile.url, decode=True)
+ # Can be reverted in 0.4.10 as the cookies bug has been fixed.
+ self.html = getURL(pyfile.url, decode=True, cookies=self.COOKIES)
self.file_info = self.getFileInfo()
except PluginParseError:
self.file_info = None
@@ -88,22 +117,13 @@ class XFileSharingPro(SimpleHoster):
else:
self.handleFree()
- def prepare(self):
- """ Initialize important variables """
- if not hasattr(self, "HOSTER_NAME"):
- self.HOSTER_NAME = re.match(self.__pattern__, self.pyfile.url).group(1)
- if not hasattr(self, "LINK_PATTERN"):
- self.LINK_PATTERN = r'(http://([^/]*?%s|\d+\.\d+\.\d+\.\d+)(:\d+)?(/d/|(?:/files)?/\d+/\w+/)[^"\'<]+)' % self.HOSTER_NAME
-
- self.captcha = self.errmsg = None
- self.passwords = self.getPassword().splitlines()
def getDirectDownloadLink(self):
""" Get download link for premium users with direct download enabled """
self.req.http.lastURL = self.pyfile.url
self.req.http.c.setopt(FOLLOWLOCATION, 0)
- self.html = self.load(self.pyfile.url, cookies=True, decode=True)
+ self.html = self.load(self.pyfile.url, decode=True)
self.header = self.req.http.header
self.req.http.c.setopt(FOLLOWLOCATION, 1)
@@ -114,11 +134,13 @@ class XFileSharingPro(SimpleHoster):
return location
+
def handleFree(self):
url = self.getDownloadLink()
self.logDebug("Download URL: %s" % url)
self.startDownload(url)
+
def getDownloadLink(self):
for i in xrange(5):
self.logDebug("Getting download link: #%d" % i)
@@ -145,6 +167,7 @@ class XFileSharingPro(SimpleHoster):
return m.group(1)
+
def handlePremium(self):
self.html = self.load(self.pyfile.url, post=self.getPostParameters())
m = re.search(self.LINK_PATTERN, self.html)
@@ -152,6 +175,7 @@ class XFileSharingPro(SimpleHoster):
self.parseError('DIRECT LINK')
self.startDownload(m.group(1))
+
def handleOverriden(self):
#only tested with easybytez.com
self.html = self.load("http://www.%s/" % self.HOSTER_NAME)
@@ -189,13 +213,15 @@ class XFileSharingPro(SimpleHoster):
else:
self.retry()
+
def startDownload(self, link):
link = link.strip()
if self.captcha:
self.correctCaptcha()
- self.logDebug('DIRECT LINK: %s' % link)
+ self.logDebug("DIRECT LINK: %s" % link)
self.download(link, disposition=True)
+
def checkErrors(self):
m = re.search(self.ERROR_PATTERN, self.html)
if m:
@@ -227,6 +253,7 @@ class XFileSharingPro(SimpleHoster):
return self.errmsg
+
def getPostParameters(self):
for _ in xrange(3):
if not self.errmsg:
@@ -288,6 +315,7 @@ class XFileSharingPro(SimpleHoster):
else:
self.parseError('FORM: %s' % (inputs['op'] if 'op' in inputs else 'UNKNOWN'))
+
def handleCaptcha(self, inputs):
m = re.search(self.RECAPTCHA_URL_PATTERN, self.html)
if m:
diff --git a/pyload/plugins/hoster/ZeveraCom.py b/pyload/plugins/hoster/ZeveraCom.py
index f76290ea5..64b93e14d 100644
--- a/pyload/plugins/hoster/ZeveraCom.py
+++ b/pyload/plugins/hoster/ZeveraCom.py
@@ -8,7 +8,7 @@ class ZeveraCom(Hoster):
__type__ = "hoster"
__version__ = "0.21"
- __pattern__ = r'http://(?:www\.)?zevera.com/.*'
+ __pattern__ = r'http://(?:www\.)?zevera\.com/.*'
__description__ = """Zevera.com hoster plugin"""
__author_name__ = "zoidberg"
@@ -24,7 +24,7 @@ class ZeveraCom(Hoster):
self.logError(_("Please enter your %s account or deactivate this plugin") % "zevera.com")
self.fail("No zevera.com account provided")
- self.logDebug("zevera.com: Old URL: %s" % pyfile.url)
+ self.logDebug("Old URL: %s" % pyfile.url)
if self.account.getAPIData(self.req, cmd="checklink", olink=pyfile.url) != "Alive":
self.fail("Offline or not downloadable - contact Zevera support")
@@ -38,71 +38,3 @@ class ZeveraCom(Hoster):
check = self.checkDownload({"error": 'action="ErrorDownload.aspx'})
if check == "error":
self.fail("Error response received - contact Zevera support")
-
- # BitAPI not used - defunct, probably abandoned by Zevera
- #
- # api_url = "http://zevera.com/API.ashx"
- #
- # def process(self, pyfile):
- # if not self.account:
- # self.logError(_("Please enter your zevera.com account or deactivate this plugin"))
- # self.fail("No zevera.com account provided")
- #
- # self.logDebug("zevera.com: Old URL: %s" % pyfile.url)
- #
- # last_size = retries = 0
- # olink = pyfile.url #quote(pyfile.url.encode('utf_8'))
- #
- # for _ in xrange(100):
- # self.retData = self.account.loadAPIRequest(self.req, cmd = 'download_request', olink = olink)
- # self.checkAPIErrors(self.retData)
- #
- # if self.retData['FileInfo']['StatusID'] == 100:
- # break
- # elif self.retData['FileInfo']['StatusID'] == 99:
- # self.fail('Failed to initialize download (99)')
- # else:
- # if self.retData['FileInfo']['Progress']['BytesReceived'] <= last_size:
- # if retries >= 6:
- # self.fail('Failed to initialize download (%d)' % self.retData['FileInfo']['StatusID'] )
- # retries += 1
- # else:
- # retries = 0
- #
- # last_size = self.retData['FileInfo']['Progress']['BytesReceived']
- #
- # self.setWait(self.retData['Update_Wait'])
- # self.wait()
- #
- # pyfile.name = self.retData['FileInfo']['RealFileName']
- # pyfile.size = self.retData['FileInfo']['FileSizeInBytes']
- #
- # self.retData = self.account.loadAPIRequest(self.req, cmd = 'download_start',
- # FileID = self.retData['FileInfo']['FileID'])
- # self.checkAPIErrors(self.retData)
- #
- # self.download(self.api_url, get = {
- # 'cmd': "open_stream",
- # 'login': self.account.loginname,
- # 'pass': self.account.password,
- # 'FileID': self.retData['FileInfo']['FileID'],
- # 'startBytes': 0
- # }
- # )
- #
- # def checkAPIErrors(self, retData):
- # if not retData:
- # self.fail('Unknown API response')
- #
- # if retData['ErrorCode']:
- # self.logError(retData['ErrorCode'], retData['ErrorMessage'])
- # #self.fail('ERROR: ' + retData['ErrorMessage'])
- #
- # if pyfile.size / 1024000 > retData['AccountInfo']['AvailableTODAYTrafficForUseInMBytes']:
- # self.logWarning("Not enough data left to download the file")
- #
- # def crazyDecode(self, ustring):
- # # accepts decoded ie. unicode string - API response is double-quoted, double-utf8-encoded
- # # no idea what the proper order of calling these functions would be :-/
- # return html_unescape(unquote(unquote(ustring.replace(
- # '@DELIMITER@','#'))).encode('raw_unicode_escape').decode('utf-8'))
diff --git a/pyload/plugins/hoster/ZippyshareCom.py b/pyload/plugins/hoster/ZippyshareCom.py
index d6b7375e2..60d152455 100644
--- a/pyload/plugins/hoster/ZippyshareCom.py
+++ b/pyload/plugins/hoster/ZippyshareCom.py
@@ -1,74 +1,72 @@
# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://www13.zippyshare.com/v/18665333/file.html
import re
+from os import path
+from urlparse import urljoin
+
from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class ZippyshareCom(SimpleHoster):
__name__ = "ZippyshareCom"
__type__ = "hoster"
- __version__ = "0.49"
+ __version__ = "0.51"
- __pattern__ = r'(?P<HOST>http://www\d{0,2}\.zippyshare.com)/v(?:/|iew.jsp.*key=)(?P<KEY>\d+)'
+ __pattern__ = r'(?P<HOST>http://www\d{0,2}\.zippyshare\.com)/v(?:/|iew\.jsp.*key=)(?P<KEY>\d+)'
__description__ = """Zippyshare.com hoster plugin"""
- __author_name__ = ("spoob", "zoidberg", "stickell", "skylab")
- __author_mail__ = ("spoob@pyload.org", "zoidberg@mujmail.cz", "l.stickell@yahoo.it", "development@sky-lab.de")
+ __author_name__ = "Walter Purcaro"
+ __author_mail__ = "vuolter@gmail.com"
+
+
+ FILE_NAME_PATTERN = r'>Name:.+?">(?P<N>.+?)<'
+ FILE_SIZE_PATTERN = r'>Size:.+?">(?P<S>[\d.]+) (?P<U>\w+)'
- FILE_NAME_PATTERN = r'<title>Zippyshare\.com - (?P<N>[^<]+)</title>'
- FILE_SIZE_PATTERN = r'>Size:</font>\s*<font [^>]*>(?P<S>[0-9.,]+) (?P<U>[kKMG]+)i?B</font><br />'
- FILE_INFO_PATTERN = r'document\.getElementById\(\'dlbutton\'\)\.href = "[^;]*/(?P<N>[^"]+)";'
- OFFLINE_PATTERN = r'>File does not exist on this server</div>'
+ OFFLINE_PATTERN = r'>File does not exist on this server<'
- SH_COOKIES = [(".zippyshare.com", "ziplocale", "en")]
+ COOKIES = [(".zippyshare.com", "ziplocale", "en")]
def setup(self):
self.multiDL = True
+ self.chunkLimit = -1
+ self.resumeDownload = True
+
def handleFree(self):
- url = self.get_file_url()
- if not url:
- self.fail("Download URL not found.")
+ url = self.get_link()
self.logDebug("Download URL: %s" % url)
self.download(url)
- def get_file_url(self):
- """returns the absolute downloadable filepath"""
- url_parts = re.search(r'(addthis:url="(http://www(\d+).zippyshare.com/v/(\d*)/file.html))', self.html)
- number = url_parts.group(4)
- check = re.search(r'<script type="text/javascript">([^<]*?)(var a = (\d*);)', self.html)
- if check:
- a = int(re.search(r'<script type="text/javascript">([^<]*?)(var a = (\d*);)', self.html).group(3))
- k = int(re.search(r'<script type="text/javascript">([^<]*?)(\d*%(\d*))', self.html).group(3))
- checksum = ((a + 3) % k) * ((a + 3) % 3) + 18
+
+ def get_checksum(self):
+ m = re.search(r'\(a\*b\+19\)', self.html)
+ if m:
+ m = re.findall(r'var \w = (\d+)\%(\d+);', self.html)
+ c = lambda a,b: a * b + 19
else:
- # This might work but is insecure
- # checksum = eval(re.search("((\d*)\s\%\s(\d*)\s\+\s(\d*)\s\%\s(\d*))", self.html).group(0))
-
- m = re.search(r"((?P<a>\d*)\s%\s(?P<b>\d*)\s\+\s(?P<c>\d*)\s%\s(?P<k>\d*))", self.html)
- if m is None:
- self.parseError("Unable to detect values to calculate direct link")
- a = int(m.group("a"))
- b = int(m.group("b"))
- c = int(m.group("c"))
- k = int(m.group("k"))
- if a == c:
- checksum = ((a % b) + (a % k))
- else:
- checksum = ((a % b) + (c % k))
-
- self.logInfo('Checksum: %s' % checksum)
-
- filename = re.search(r'>Name:</font>\s*<font [^>]*>(?P<N>[^<]+)</font><br />', self.html).group('N')
-
- url = "/d/%s/%s/%s" % (number, checksum, filename)
- self.logInfo(self.file_info['HOST'] + url)
- return self.file_info['HOST'] + url
+ m = re.findall(r'(\d+) \% (\d+)', self.html)
+ c = lambda a,b: a + b
+
+ if not m:
+ self.parseError("Unable to calculate checksum")
+
+ a = map(lambda x: int(x), m[0])
+ b = map(lambda x: int(x), m[1])
+
+ # Checksum is calculated as (a*b+19) or (a+b), where a and b are the result of modulo calculations
+ a = a[0] % a[1]
+ b = b[0] % b[1]
+
+ return c(a, b)
+
+
+ def get_link(self):
+ checksum = self.get_checksum()
+ p_url = path.join("d", self.file_info['KEY'], str(checksum), self.pyfile.name)
+ dl_link = urljoin(self.file_info['HOST'], p_url)
+ return dl_link
getInfo = create_getInfo(ZippyshareCom)
diff --git a/pyload/plugins/internal/CaptchaService.py b/pyload/plugins/internal/CaptchaService.py
index b247ba654..26482379d 100644
--- a/pyload/plugins/internal/CaptchaService.py
+++ b/pyload/plugins/internal/CaptchaService.py
@@ -7,7 +7,7 @@ from random import random
class CaptchaService:
__name__ = "CaptchaService"
- __version__ = "0.05"
+ __version__ = "0.06"
__description__ = """Captcha service plugin"""
__author_name__ = "pyLoad Team"
@@ -28,6 +28,7 @@ class ReCaptcha:
def __init__(self, plugin):
self.plugin = plugin
+
def detect_key(self, html):
m = re.search(self.RECAPTCHA_KEY_PATTERN, html)
if m is None:
@@ -38,11 +39,13 @@ class ReCaptcha:
else:
return None
+
def challenge(self, key=None):
- if key is None and self.recaptcha_key:
- key = self.recaptcha_key
- else:
- raise TypeError("ReCaptcha key not found")
+ if not key:
+ if self.recaptcha_key:
+ key = self.recaptcha_key
+ else:
+ raise TypeError("ReCaptcha key not found")
js = self.plugin.req.load("http://www.google.com/recaptcha/api/challenge", get={"k": key}, cookies=True)
@@ -55,6 +58,7 @@ class ReCaptcha:
return challenge, result
+
def result(self, server, challenge):
return self.plugin.decryptCaptcha("%simage" % server, get={"c": challenge},
cookies=True, forceUser=True, imgtype="jpg")
@@ -74,6 +78,7 @@ class AdsCaptcha(CaptchaService):
return challenge, result
+
def result(self, server, challenge):
return self.plugin.decryptCaptcha("%sChallenge.aspx" % server, get={"cid": challenge, "dummy": random()},
cookies=True, imgtype="jpg")
@@ -92,5 +97,6 @@ class SolveMedia(CaptchaService):
return challenge, result
+
def result(self, challenge):
return self.plugin.decryptCaptcha("http://api.solvemedia.com/papi/media?c=%s" % challenge, imgtype="gif")
diff --git a/pyload/plugins/internal/DeadCrypter.py b/pyload/plugins/internal/DeadCrypter.py
index ea9c414cb..1c484274b 100644
--- a/pyload/plugins/internal/DeadCrypter.py
+++ b/pyload/plugins/internal/DeadCrypter.py
@@ -6,14 +6,15 @@ from pyload.plugins.Crypter import Crypter as _Crypter
class DeadCrypter(_Crypter):
__name__ = "DeadCrypter"
__type__ = "crypter"
- __version__ = "0.01"
+ __version__ = "0.02"
__pattern__ = None
- __description__ = """Crypter is no longer available"""
+ __description__ = """ Crypter is no longer available """
__author_name__ = "stickell"
__author_mail__ = "l.stickell@yahoo.it"
def setup(self):
- self.fail("Crypter is no longer available")
+ self.pyfile.error = "Crypter is no longer available"
+ self.offline() #@TODO: self.offline("Crypter is no longer available")
diff --git a/pyload/plugins/internal/DeadHoster.py b/pyload/plugins/internal/DeadHoster.py
index 0b2398020..fc7e1a6ad 100644
--- a/pyload/plugins/internal/DeadHoster.py
+++ b/pyload/plugins/internal/DeadHoster.py
@@ -6,7 +6,7 @@ from pyload.plugins.Hoster import Hoster as _Hoster
def create_getInfo(plugin):
def getInfo(urls):
- yield [('#N/A: ' + url, 0, 1, url) for url in urls]
+ yield map(lambda url: ('#N/A: ' + url, 0, 1, url), urls)
return getInfo
@@ -14,14 +14,15 @@ def create_getInfo(plugin):
class DeadHoster(_Hoster):
__name__ = "DeadHoster"
__type__ = "hoster"
- __version__ = "0.11"
+ __version__ = "0.12"
__pattern__ = None
- __description__ = """Hoster is no longer available"""
+ __description__ = """ Hoster is no longer available """
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
def setup(self):
- self.fail("Hoster is no longer available")
+ self.pyfile.error = "Hoster is no longer available"
+ self.offline() #@TODO: self.offline("Hoster is no longer available")
diff --git a/pyload/plugins/internal/MultiHoster.py b/pyload/plugins/internal/MultiHoster.py
index d99ae6ff9..fdaccdd5b 100644
--- a/pyload/plugins/internal/MultiHoster.py
+++ b/pyload/plugins/internal/MultiHoster.py
@@ -41,7 +41,7 @@ class MultiHoster(Hook):
try:
hosterSet = self.toHosterSet(self.getHoster()) - set(self.ignored)
except Exception, e:
- self.logError("%s" % str(e))
+ self.logError(e)
return []
try:
@@ -55,7 +55,7 @@ class MultiHoster(Hook):
hosterSet -= configSet
except Exception, e:
- self.logError("%s" % str(e))
+ self.logError(e)
self.hosters = list(hosterSet)
@@ -99,7 +99,7 @@ class MultiHoster(Hook):
def periodical(self):
"""reload hoster list periodically"""
- self.logInfo("Reloading supported hoster list")
+ self.logInfo(_("Reloading supported hoster list"))
old_supported = self.supported
self.supported, self.new_supported, self.hosters = [], [], []
@@ -108,7 +108,7 @@ class MultiHoster(Hook):
old_supported = [hoster for hoster in old_supported if hoster not in self.supported]
if old_supported:
- self.logDebug("UNLOAD: %s" % ", ".join(old_supported))
+ self.logDebug("UNLOAD", ", ".join(old_supported))
for hoster in old_supported:
self.unloadHoster(hoster)
@@ -139,24 +139,24 @@ class MultiHoster(Hook):
klass = getattr(module, self.__name__)
# inject plugin plugin
- self.logDebug("Overwritten Hosters: %s" % ", ".join(sorted(self.supported)))
+ self.logDebug("Overwritten Hosters", ", ".join(sorted(self.supported)))
for hoster in self.supported:
dict = self.core.pluginManager.hosterPlugins[hoster]
dict['new_module'] = module
dict['new_name'] = self.__name__
if excludedList:
- self.logInfo("The following hosters were not overwritten - account exists: %s" % ", ".join(sorted(excludedList)))
+ self.logInfo(_("The following hosters were not overwritten - account exists"), ", ".join(sorted(excludedList)))
if self.new_supported:
- self.logDebug("New Hosters: %s" % ", ".join(sorted(self.new_supported)))
+ self.logDebug("New Hosters", ", ".join(sorted(self.new_supported)))
# create new regexp
regexp = r".*(%s).*" % "|".join([x.replace(".", "\\.") for x in self.new_supported])
if hasattr(klass, "__pattern__") and isinstance(klass.__pattern__, basestring) and '://' in klass.__pattern__:
regexp = r"%s|%s" % (klass.__pattern__, regexp)
- self.logDebug("Regexp: %s" % regexp)
+ self.logDebug("Regexp", regexp)
dict = self.core.pluginManager.hosterPlugins[self.__name__]
dict['pattern'] = regexp
diff --git a/pyload/plugins/internal/SimpleCrypter.py b/pyload/plugins/internal/SimpleCrypter.py
index 6e639c946..d9982007d 100644
--- a/pyload/plugins/internal/SimpleCrypter.py
+++ b/pyload/plugins/internal/SimpleCrypter.py
@@ -10,7 +10,7 @@ from pyload.utils import html_unescape
class SimpleCrypter(Crypter):
__name__ = "SimpleCrypter"
__type__ = "crypter"
- __version__ = "0.10"
+ __version__ = "0.12"
__pattern__ = None
@@ -24,7 +24,7 @@ class SimpleCrypter(Crypter):
LINK_PATTERN: group(1) must be a download link or a regex to catch more links
example: LINK_PATTERN = r'<div class="link"><a href="(http://speedload.org/\w+)'
- TITLE_PATTERN: (optional) The group defined by 'title' should be the title
+ TITLE_PATTERN: (optional) The group defined by 'title' should be the folder name or the webpage title
example: TITLE_PATTERN = r'<title>Files of: (?P<title>[^<]+) folder</title>'
OFFLINE_PATTERN: (optional) Checks if the file is yet available online
@@ -34,32 +34,47 @@ class SimpleCrypter(Crypter):
example: TEMP_OFFLINE_PATTERN = r'Server maintainance'
- If it's impossible to extract the links using the LINK_PATTERN only you can override the getLinks method.
+ You can override the getLinks method if you need a more sophisticated way to extract the links.
- If the links are disposed on multiple pages you need to define a pattern:
- PAGES_PATTERN: The group defined by 'pages' must be the total number of pages
+ If the links are splitted on multiple pages you can define the PAGES_PATTERN regex:
+
+ PAGES_PATTERN: (optional) The group defined by 'pages' should be the number of overall pages containing the links
example: PAGES_PATTERN = r'Pages: (?P<pages>\d+)'
- and a function:
+ and its loadPage method:
- loadPage(self, page_n):
- return the html of the page number 'page_n'
+ def loadPage(self, page_n):
+ return the html of the page number page_n
"""
+
URL_REPLACEMENTS = []
- SH_COOKIES = True # or False or list of tuples [(domain, name, value)]
+ TEXT_ENCODING = False #: Set to True or encoding name if encoding in http header is not correct
+ COOKIES = True #: or False or list of tuples [(domain, name, value)]
+
+ LOGIN_ACCOUNT = False
+ LOGIN_PREMIUM = False
+
+
+ def prepare(self):
+ if self.LOGIN_ACCOUNT and not self.account:
+ self.fail('Required account not found!')
+
+ if self.LOGIN_PREMIUM and not self.premium:
+ self.fail('Required premium account not found!')
+ if isinstance(self.COOKIES, list):
+ set_cookies(self.req.cj, self.COOKIES)
- def setup(self):
- if isinstance(self.SH_COOKIES, list):
- set_cookies(self.req.cj, self.SH_COOKIES)
def decrypt(self, pyfile):
+ self.prepare()
+
pyfile.url = replace_patterns(pyfile.url, self.URL_REPLACEMENTS)
- self.html = self.load(pyfile.url, decode=True)
+ self.html = self.load(pyfile.url, decode=not self.TEXT_ENCODING)
self.checkOnline()
@@ -70,13 +85,14 @@ class SimpleCrypter(Crypter):
if hasattr(self, 'PAGES_PATTERN') and hasattr(self, 'loadPage'):
self.handleMultiPages()
- self.logDebug('Package has %d links' % len(self.package_links))
+ self.logDebug("Package has %d links" % len(self.package_links))
if self.package_links:
self.packages = [(package_name, self.package_links, folder_name)]
else:
self.fail('Could not extract any links')
+
def getLinks(self):
"""
Returns the links extracted from self.html
@@ -84,12 +100,14 @@ class SimpleCrypter(Crypter):
"""
return re.findall(self.LINK_PATTERN, self.html)
+
def checkOnline(self):
if hasattr(self, "OFFLINE_PATTERN") and re.search(self.OFFLINE_PATTERN, self.html):
self.offline()
elif hasattr(self, "TEMP_OFFLINE_PATTERN") and re.search(self.TEMP_OFFLINE_PATTERN, self.html):
self.tempOffline()
+
def getPackageNameAndFolder(self):
if hasattr(self, 'TITLE_PATTERN'):
m = re.search(self.TITLE_PATTERN, self.html)
@@ -103,6 +121,7 @@ class SimpleCrypter(Crypter):
self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
return name, folder
+
def handleMultiPages(self):
pages = re.search(self.PAGES_PATTERN, self.html)
if pages:
@@ -114,5 +133,6 @@ class SimpleCrypter(Crypter):
self.html = self.loadPage(p)
self.package_links += self.getLinks()
+
def parseError(self, msg):
raise PluginParseError(msg)
diff --git a/pyload/plugins/internal/SimpleHoster.py b/pyload/plugins/internal/SimpleHoster.py
index ca320732f..75c6fc8e8 100644
--- a/pyload/plugins/internal/SimpleHoster.py
+++ b/pyload/plugins/internal/SimpleHoster.py
@@ -15,7 +15,6 @@ def replace_patterns(string, ruleslist):
for r in ruleslist:
rf, rt = r
string = re.sub(rf, rt, string)
- #self.logDebug(rf, rt, string)
return string
@@ -78,8 +77,8 @@ def parseFileInfo(self, url='', html=''):
else:
if not html and hasattr(self, "html"):
html = self.html
- if isinstance(self.SH_BROKEN_ENCODING, (str, unicode)):
- html = unicode(html, self.SH_BROKEN_ENCODING)
+ if isinstance(self.TEXT_ENCODING, basestring):
+ html = unicode(html, self.TEXT_ENCODING)
if hasattr(self, "html"):
self.html = html
@@ -112,7 +111,7 @@ def parseFileInfo(self, url='', html=''):
size = replace_patterns(info['S'] + info['U'] if 'U' in info else info['S'],
self.FILE_SIZE_REPLACEMENTS)
info['size'] = parseFileSize(size)
- elif isinstance(info['size'], (str, unicode)):
+ elif isinstance(info['size'], basestring):
if 'units' in info:
info['size'] += info['units']
info['size'] = parseFileSize(info['size'])
@@ -128,10 +127,10 @@ def create_getInfo(plugin):
def getInfo(urls):
for url in urls:
cj = CookieJar(plugin.__name__)
- if isinstance(plugin.SH_COOKIES, list):
- set_cookies(cj, plugin.SH_COOKIES)
+ if isinstance(plugin.COOKIES, list):
+ set_cookies(cj, plugin.COOKIES)
file_info = parseFileInfo(plugin, url, getURL(replace_patterns(url, plugin.FILE_URL_REPLACEMENTS),
- decode=not plugin.SH_BROKEN_ENCODING, cookies=cj))
+ decode=not plugin.TEXT_ENCODING, cookies=cj))
yield file_info
return getInfo
@@ -154,13 +153,13 @@ class PluginParseError(Exception):
class SimpleHoster(Hoster):
__name__ = "SimpleHoster"
__type__ = "hoster"
- __version__ = "0.35"
+ __version__ = "0.36"
__pattern__ = None
__description__ = """Simple hoster plugin"""
- __author_name__ = ("zoidberg", "stickell")
- __author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it")
+ __author_name__ = ("zoidberg", "stickell", "Walter Purcaro")
+ __author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it", "vuolter@gmail.com")
"""
Following patterns should be defined by each hoster:
@@ -187,46 +186,49 @@ class SimpleHoster(Hoster):
FILE_SIZE_REPLACEMENTS = []
FILE_URL_REPLACEMENTS = []
- SH_BROKEN_ENCODING = False # Set to True or encoding name if encoding in http header is not correct
- SH_COOKIES = True # or False or list of tuples [(domain, name, value)]
- SH_CHECK_TRAFFIC = False # True = force check traffic left for a premium account
+ TEXT_ENCODING = False #: Set to True or encoding name if encoding in http header is not correct
+ COOKIES = True #: or False or list of tuples [(domain, name, value)]
+ FORCE_CHECK_TRAFFIC = False #: Set to True to force checking traffic left for premium account
def init(self):
self.file_info = {}
+
def setup(self):
self.resumeDownload = self.multiDL = self.premium
- if isinstance(self.SH_COOKIES, list):
- set_cookies(self.req.cj, self.SH_COOKIES)
+
+
+ def prepare(self):
+ if isinstance(self.COOKIES, list):
+ set_cookies(self.req.cj, self.COOKIES)
+ self.req.setOption("timeout", 120)
+
def process(self, pyfile):
+ self.prepare()
+
pyfile.url = replace_patterns(pyfile.url, self.FILE_URL_REPLACEMENTS)
- self.req.setOption("timeout", 120)
+
# Due to a 0.4.9 core bug self.load would keep previous cookies even if overridden by cookies parameter.
- # Workaround using getURL. Can be reverted in 0.5 as the cookies bug has been fixed.
- self.html = getURL(pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES)
+ # Workaround using getURL. Can be reverted in 0.4.10 as the cookies bug has been fixed.
+ self.html = getURL(pyfile.url, decode=not self.TEXT_ENCODING, cookies=self.COOKIES)
premium_only = hasattr(self, 'PREMIUM_ONLY_PATTERN') and re.search(self.PREMIUM_ONLY_PATTERN, self.html)
if not premium_only: # Usually premium only pages doesn't show the file information
self.getFileInfo()
- if self.premium and (not self.SH_CHECK_TRAFFIC or self.checkTrafficLeft()):
+ if self.premium and (not self.FORCE_CHECK_TRAFFIC or self.checkTrafficLeft()):
self.handlePremium()
elif premium_only:
self.fail("This link require a premium account")
else:
- # This line is required due to the getURL workaround. Can be removed in 0.5
- self.html = self.load(pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES)
+ # This line is required due to the getURL workaround. Can be removed in 0.4.10
+ self.html = self.load(pyfile.url, decode=not self.TEXT_ENCODING)
self.handleFree()
- def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=False):
- if type(url) == unicode:
- url = url.encode('utf8')
- return Hoster.load(self, url=url, get=get, post=post, ref=ref, cookies=cookies,
- just_header=just_header, decode=decode)
def getFileInfo(self):
- self.logDebug("URL: %s" % self.pyfile.url)
+ self.logDebug("URL", self.pyfile.url)
name, size, status = parseFileInfo(self)[:3]
@@ -246,20 +248,24 @@ class SimpleHoster(Hoster):
if size:
self.pyfile.size = size
else:
- self.logError("File size not parsed")
+ self.logError(_("File size not parsed"))
self.logDebug("FILE NAME: %s FILE SIZE: %s" % (self.pyfile.name, self.pyfile.size))
return self.file_info
+
def handleFree(self):
self.fail("Free download not implemented")
+
def handlePremium(self):
self.fail("Premium download not implemented")
+
def parseError(self, msg):
raise PluginParseError(msg)
+
def longWait(self, wait_time=None, max_tries=3):
if wait_time and isinstance(wait_time, (int, long, float)):
time_str = "%dh %dm" % divmod(wait_time / 60, 60)
@@ -268,24 +274,27 @@ class SimpleHoster(Hoster):
time_str = "(unknown time)"
max_tries = 100
- self.logInfo("Download limit reached, reconnect or wait %s" % time_str)
+ self.logInfo(_("Download limit reached, reconnect or wait %s") % time_str)
self.setWait(wait_time, True)
self.wait()
self.retry(max_tries=max_tries, reason="Download limit reached")
+
def parseHtmlForm(self, attr_str='', input_names=None):
return parseHtmlForm(attr_str, self.html, input_names)
+
def checkTrafficLeft(self):
traffic = self.account.getAccountInfo(self.user, True)['trafficleft']
if traffic == -1:
return True
size = self.pyfile.size / 1024
- self.logInfo("Filesize: %i KiB, Traffic left for user %s: %i KiB" % (size, self.user, traffic))
+ self.logInfo(_("Filesize: %i KiB, Traffic left for user %s: %i KiB") % (size, self.user, traffic))
return size <= traffic
- # TODO: Remove in 0.5
+
+ #@TODO: Remove in 0.4.10
def wait(self, seconds=False, reconnect=False):
if seconds:
self.setWait(seconds, reconnect)
diff --git a/pyload/plugins/internal/UnRar.py b/pyload/plugins/internal/UnRar.py
index ed8478a3a..0f54e75b9 100644
--- a/pyload/plugins/internal/UnRar.py
+++ b/pyload/plugins/internal/UnRar.py
@@ -4,7 +4,7 @@ import os
import re
from glob import glob
-from os.path import join
+from os.path import basename, join
from string import digits
from subprocess import Popen, PIPE
@@ -12,14 +12,23 @@ from pyload.plugins.internal.AbstractExtractor import AbtractExtractor, WrongPas
from pyload.utils import safe_join, decode
+def renice(pid, value):
+ if os.name != "nt" and value:
+ try:
+ Popen(["renice", str(value), str(pid)], stdout=PIPE, stderr=PIPE, bufsize=-1)
+ except:
+ print "Renice failed"
+
+
class UnRar(AbtractExtractor):
__name__ = "UnRar"
- __version__ = "0.16"
+ __version__ = "0.18"
__description__ = """Rar extractor plugin"""
__author_name__ = "RaNaN"
__author_mail__ = "RaNaN@pyload.org"
+
CMD = "unrar"
# there are some more uncovered rar formats
@@ -50,6 +59,7 @@ class UnRar(AbtractExtractor):
return True
+
@staticmethod
def getTargets(files_ids):
result = []
@@ -68,12 +78,14 @@ class UnRar(AbtractExtractor):
return result
+
def init(self):
self.passwordProtected = False
self.headerProtected = False #: list files will not work without password
self.smallestFile = None #: small file to test passwords
self.password = "" #: save the correct password
+
def checkArchive(self):
p = self.call_unrar("l", "-v", self.file)
out, err = p.communicate()
@@ -100,6 +112,7 @@ class UnRar(AbtractExtractor):
return False
+
def checkPassword(self, password):
# at this point we can only verify header protected files
if self.headerProtected:
@@ -110,6 +123,7 @@ class UnRar(AbtractExtractor):
return True
+
def extract(self, progress, password=None):
command = "x" if self.fullpath else "e"
@@ -151,13 +165,15 @@ class UnRar(AbtractExtractor):
self.password = password
self.listContent()
+
def getDeleteFiles(self):
- if ".part" in self.file:
+ if ".part" in basename(self.file):
return glob(re.sub("(?<=\.part)([01]+)", "*", self.file, re.IGNORECASE))
# get files which matches .r* and filter unsuited files out
parts = glob(re.sub(r"(?<=\.r)ar$", "*", self.file, re.IGNORECASE))
return filter(lambda x: self.re_partfiles.match(x), parts)
+
def listContent(self):
command = "vb" if self.fullpath else "lb"
p = self.call_unrar(command, "-v", self.file, password=self.password)
@@ -177,6 +193,7 @@ class UnRar(AbtractExtractor):
self.files = result
+
def call_unrar(self, command, *xargs, **kwargs):
args = []
# overwrite flag
@@ -202,11 +219,3 @@ class UnRar(AbtractExtractor):
p = Popen(call, stdout=PIPE, stderr=PIPE)
return p
-
-
-def renice(pid, value):
- if os.name != "nt" and value:
- try:
- Popen(["renice", str(value), str(pid)], stdout=PIPE, stderr=PIPE, bufsize=-1)
- except:
- print "Renice failed"