summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Walter Purcaro <vuolter@gmail.com> 2014-12-07 15:34:19 +0100
committerGravatar Walter Purcaro <vuolter@gmail.com> 2014-12-07 15:34:19 +0100
commitd8771b13f1c658ac726ac93195a48ad87169eccd (patch)
treea81899c2905d59ff20341ffcad35c949d880a0b4
parentTiny code cosmetics (diff)
parent[SkipRev] Tiny fixup (diff)
downloadpyload-d8771b13f1c658ac726ac93195a48ad87169eccd.tar.xz
Merge branch 'stable' into 0.4.10
Conflicts: module/plugins/hoster/NowDownloadEu.py module/plugins/hoster/NowDownloadSx.py module/plugins/hoster/NowVideoAt.py module/plugins/hoster/NowVideoSx.py pyload/plugins/account/RapidshareCom.py pyload/plugins/addon/ExtractArchive.py pyload/plugins/addon/HotFolder.py pyload/plugins/addon/UpdateManager.py pyload/plugins/hook/Captcha9kw.py pyload/plugins/hook/DebridItaliaCom.py pyload/plugins/hoster/DebridItaliaCom.py pyload/plugins/hoster/Keep2shareCc.py pyload/plugins/hoster/NetloadIn.py pyload/plugins/hoster/NowDownloadEu.py pyload/plugins/hoster/NowVideoAt.py pyload/plugins/hoster/RapidshareCom.py pyload/plugins/hoster/ShareonlineBiz.py pyload/plugins/internal/MultiHoster.py pyload/plugins/internal/SimpleHoster.py
-rw-r--r--module/plugins/accounts/RapiduNet.py45
-rw-r--r--module/plugins/accounts/SafesharingEu.py16
-rw-r--r--module/plugins/crypter/Go4UpCom.py49
-rw-r--r--module/plugins/hooks/SkipRev.py81
-rw-r--r--module/plugins/hoster/DodanePl.py18
-rw-r--r--module/plugins/hoster/NowDownloadSx.py64
-rw-r--r--module/plugins/hoster/NowVideoSx.py44
-rw-r--r--module/plugins/hoster/RapiduNet.py82
-rw-r--r--module/plugins/hoster/SafesharingEu.py25
-rw-r--r--module/plugins/hoster/UploadableCh.py90
-rw-r--r--pyload/plugins/account/AlldebridCom.py4
-rw-r--r--pyload/plugins/account/DebridItaliaCom.py30
-rw-r--r--pyload/plugins/account/EasybytezCom.py2
-rw-r--r--pyload/plugins/account/FastixRu.py11
-rw-r--r--pyload/plugins/account/KingfilesNet.py8
-rw-r--r--pyload/plugins/account/NetloadIn.py2
-rw-r--r--pyload/plugins/account/OboomCom.py12
-rw-r--r--pyload/plugins/account/PremiumTo.py6
-rw-r--r--pyload/plugins/account/PremiumizeMe.py7
-rw-r--r--pyload/plugins/account/RapidshareCom.py55
-rw-r--r--pyload/plugins/account/RehostTo.py9
-rw-r--r--pyload/plugins/addon/Checksum.py9
-rw-r--r--pyload/plugins/addon/ExtractArchive.py4
-rw-r--r--pyload/plugins/addon/HotFolder.py7
-rw-r--r--pyload/plugins/addon/UpdateManager.py79
-rw-r--r--pyload/plugins/container/RSDF.py4
-rw-r--r--pyload/plugins/crypter/FilecryptCc.py2
-rw-r--r--pyload/plugins/crypter/LinkCryptWs.py7
-rw-r--r--pyload/plugins/crypter/MediafireCom.py6
-rw-r--r--pyload/plugins/crypter/RelinkUs.py4
-rw-r--r--pyload/plugins/hook/AlldebridCom.py2
-rw-r--r--pyload/plugins/hook/Captcha9kw.py31
-rw-r--r--pyload/plugins/hook/DebridItaliaCom.py17
-rw-r--r--pyload/plugins/hook/EasybytezCom.py17
-rw-r--r--pyload/plugins/hook/FastixRu.py5
-rw-r--r--pyload/plugins/hook/FreeWayMe.py2
-rw-r--r--pyload/plugins/hook/LinksnappyCom.py2
-rw-r--r--pyload/plugins/hook/MegaDebridEu.py2
-rw-r--r--pyload/plugins/hook/MyfastfileCom.py2
-rw-r--r--pyload/plugins/hook/OverLoadMe.py3
-rw-r--r--pyload/plugins/hook/PremiumizeMe.py4
-rw-r--r--pyload/plugins/hook/RPNetBiz.py2
-rw-r--r--pyload/plugins/hook/RehostTo.py3
-rw-r--r--pyload/plugins/hook/SimplyPremiumCom.py2
-rw-r--r--pyload/plugins/hook/SimplydebridCom.py2
-rw-r--r--pyload/plugins/hook/UnrestrictLi.py2
-rw-r--r--pyload/plugins/hook/XFileSharingPro.py8
-rw-r--r--pyload/plugins/hook/ZeveraCom.py2
-rw-r--r--pyload/plugins/hoster/AlldebridCom.py8
-rw-r--r--pyload/plugins/hoster/BezvadataCz.py5
-rw-r--r--pyload/plugins/hoster/BitshareCom.py2
-rw-r--r--pyload/plugins/hoster/CatShareNet.py2
-rw-r--r--pyload/plugins/hoster/CrockoCom.py2
-rw-r--r--pyload/plugins/hoster/DailymotionCom.py41
-rw-r--r--pyload/plugins/hoster/DataHu.py9
-rw-r--r--pyload/plugins/hoster/DateiTo.py24
-rw-r--r--pyload/plugins/hoster/DebridItaliaCom.py37
-rw-r--r--pyload/plugins/hoster/DlFreeFr.py15
-rw-r--r--pyload/plugins/hoster/FastixRu.py7
-rw-r--r--pyload/plugins/hoster/FastshareCz.py70
-rw-r--r--pyload/plugins/hoster/FileSharkPl.py95
-rw-r--r--pyload/plugins/hoster/FileStoreTo.py3
-rw-r--r--pyload/plugins/hoster/FilecloudIo.py13
-rw-r--r--pyload/plugins/hoster/FilefactoryCom.py2
-rw-r--r--pyload/plugins/hoster/FilepostCom.py11
-rw-r--r--pyload/plugins/hoster/FilerNet.py74
-rw-r--r--pyload/plugins/hoster/FileserveCom.py4
-rw-r--r--pyload/plugins/hoster/FreakshareCom.py4
-rw-r--r--pyload/plugins/hoster/FreeWayMe.py4
-rw-r--r--pyload/plugins/hoster/FshareVn.py2
-rw-r--r--pyload/plugins/hoster/GamefrontCom.py5
-rw-r--r--pyload/plugins/hoster/GigapetaCom.py2
-rw-r--r--pyload/plugins/hoster/GooIm.py3
-rw-r--r--pyload/plugins/hoster/JumbofilesCom.py3
-rw-r--r--pyload/plugins/hoster/JunocloudMe.py5
-rw-r--r--pyload/plugins/hoster/Keep2shareCc.py117
-rw-r--r--pyload/plugins/hoster/LetitbitNet.py5
-rw-r--r--pyload/plugins/hoster/LoadTo.py2
-rw-r--r--pyload/plugins/hoster/LuckyShareNet.py7
-rw-r--r--pyload/plugins/hoster/MegaCoNz.py33
-rw-r--r--pyload/plugins/hoster/MegasharesCom.py19
-rw-r--r--pyload/plugins/hoster/MultishareCz.py9
-rw-r--r--pyload/plugins/hoster/NetloadIn.py147
-rw-r--r--pyload/plugins/hoster/OneFichierCom.py6
-rw-r--r--pyload/plugins/hoster/PremiumTo.py12
-rw-r--r--pyload/plugins/hoster/PremiumizeMe.py9
-rw-r--r--pyload/plugins/hoster/PromptfileCom.py5
-rw-r--r--pyload/plugins/hoster/QuickshareCz.py1
-rw-r--r--pyload/plugins/hoster/RapidgatorNet.py2
-rw-r--r--pyload/plugins/hoster/RapidshareCom.py228
-rw-r--r--pyload/plugins/hoster/RealdebridCom.py9
-rw-r--r--pyload/plugins/hoster/RehostTo.py5
-rw-r--r--pyload/plugins/hoster/RemixshareCom.py2
-rw-r--r--pyload/plugins/hoster/RgHostNet.py13
-rw-r--r--pyload/plugins/hoster/ShareonlineBiz.py193
-rw-r--r--pyload/plugins/hoster/SimplyPremiumCom.py2
-rw-r--r--pyload/plugins/hoster/SimplydebridCom.py7
-rw-r--r--pyload/plugins/hoster/StreamCz.py2
-rw-r--r--pyload/plugins/hoster/TurbobitNet.py6
-rw-r--r--pyload/plugins/hoster/TwoSharedCom.py3
-rw-r--r--pyload/plugins/hoster/UlozTo.py2
-rw-r--r--pyload/plugins/hoster/UnrestrictLi.py2
-rw-r--r--pyload/plugins/hoster/UpleaCom.py2
-rw-r--r--pyload/plugins/hoster/UploadedTo.py2
-rw-r--r--pyload/plugins/hoster/UploadheroCom.py2
-rw-r--r--pyload/plugins/hoster/UploadingCom.py8
-rw-r--r--pyload/plugins/hoster/UpstoreNet.py2
-rw-r--r--pyload/plugins/hoster/VeohCom.py5
-rw-r--r--pyload/plugins/hoster/VimeoCom.py5
-rw-r--r--pyload/plugins/hoster/WebshareCz.py7
-rw-r--r--pyload/plugins/hoster/YoutubeCom.py47
-rw-r--r--pyload/plugins/hoster/ZeveraCom.py5
-rw-r--r--pyload/plugins/hoster/ZippyshareCom.py15
-rw-r--r--pyload/plugins/internal/BasePlugin.py98
-rw-r--r--pyload/plugins/internal/DeadCrypter.py5
-rw-r--r--pyload/plugins/internal/DeadHoster.py5
-rw-r--r--pyload/plugins/internal/MultiHoster.py44
-rw-r--r--pyload/plugins/internal/SimpleHoster.py146
-rw-r--r--pyload/plugins/internal/UnRar.py14
-rw-r--r--pyload/plugins/internal/XFSAccount.py20
-rw-r--r--pyload/plugins/internal/XFSHoster.py22
121 files changed, 1516 insertions, 1128 deletions
diff --git a/module/plugins/accounts/RapiduNet.py b/module/plugins/accounts/RapiduNet.py
new file mode 100644
index 000000000..2fabb6120
--- /dev/null
+++ b/module/plugins/accounts/RapiduNet.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+import re
+from module.plugins.Account import Account
+from module.common.json_layer import json_loads
+
+
+class RapiduNet(Account):
+ __name__ = "RapiduNet"
+ __type__ = "account"
+ __version__ = "0.01"
+
+ __description__ = """Rapidu.net account plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("prOq", None)]
+
+
+ ACCOUNT_INFO_PATTERN = '<a href="premium/" style="padding-left: 0px;">Account: <b>(.*?)</b></a>'
+
+
+ def loadAccountInfo(self, user, req):
+ premium = False
+
+ req.load('https://rapidu.net/ajax.php?a=getChangeLang', post={"_go": "", "lang": "en"})
+ self.html = req.load('https://rapidu.net/', decode=True)
+
+ m = re.search(self.ACCOUNT_INFO_PATTERN, self.html)
+ if m:
+ if m.group(1) == "Premium":
+ premium = True
+
+ return {"validuntil": None, "trafficleft": None, "premium": premium}
+
+
+ def login(self, user, data, req):
+ try:
+ json = req.load('https://rapidu.net/ajax.php?a=getUserLogin', post={"_go": "", "login": user, "pass": data['password'], "member": "1"})
+ json = json_loads(json)
+ self.logDebug(json)
+
+ if not json['message'] == "success":
+ self.wrongPassword()
+ except Exception, e:
+ self.logError(e)
+
diff --git a/module/plugins/accounts/SafesharingEu.py b/module/plugins/accounts/SafesharingEu.py
new file mode 100644
index 000000000..2e58d33b3
--- /dev/null
+++ b/module/plugins/accounts/SafesharingEu.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+from module.plugins.internal.XFSAccount import XFSAccount
+
+
+class SafesharingEu(XFSAccount):
+ __name__ = "SafesharingEu"
+ __type__ = "account"
+ __version__ = "0.02"
+
+ __description__ = """Safesharing.eu account plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("guidobelix", "guidobelix@hotmail.it")]
+
+
+ HOSTER_DOMAIN = "safesharing.eu"
diff --git a/module/plugins/crypter/Go4UpCom.py b/module/plugins/crypter/Go4UpCom.py
new file mode 100644
index 000000000..102bc32b5
--- /dev/null
+++ b/module/plugins/crypter/Go4UpCom.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urlparse import urljoin
+
+from module.plugins.internal.SimpleCrypter import SimpleCrypter, create_getInfo
+
+
+class Go4UpCom(SimpleCrypter):
+ __name__ = "Go4UpCom"
+ __type__ = "crypter"
+ __version__ = "0.11"
+
+ __pattern__ = r'http://go4up\.com/(dl/\w{12}|rd/\w{12}/\d+)'
+
+ __description__ = """Go4Up.com decrypter plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("rlindner81", "rlindner81@gmail.com"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ LINK_PATTERN = r'(http://go4up\.com/rd/.+?)<'
+
+ NAME_PATTERN = r'<title>Download (.+?)<'
+
+ OFFLINE_PATTERN = r'>\s*(404 Page Not Found|File not Found|Mirror does not exist)'
+
+
+ def getLinks(self
+ links = []
+
+ m = re.search(r'(/download/gethosts/.+?)"')
+ if m:
+ self.html = self.load(urljoin("http://go4up.com/", m.group(1)))
+ pages = [self.load(url) for url in re.findall(self.LINK_PATTERN, self.html)]
+ else:
+ pages = [self.html]
+
+ for html in pages:
+ try:
+ links.append(re.search(r'<b><a href="(.+?)"', html).group(1))
+ except:
+ continue
+
+ return links
+
+
+getInfo = create_getInfo(Go4UpCom)
diff --git a/module/plugins/hooks/SkipRev.py b/module/plugins/hooks/SkipRev.py
new file mode 100644
index 000000000..76a48a255
--- /dev/null
+++ b/module/plugins/hooks/SkipRev.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from urllib import unquote
+from urlparse import urljoin, urlparse
+
+from module.plugins.Hook import Hook
+from module.plugins.Plugin import SkipDownload
+
+
+class SkipRev(Hook):
+ __name__ = "SkipRev"
+ __type__ = "hook"
+ __version__ = "0.13"
+
+ __config__ = [("auto", "bool", "Automatically keep all rev files needed by package", True),
+ ("tokeep", "int" , "Min number of rev files to keep for package" , 1),
+ ("unskip", "bool", "Restart a skipped rev when download fails" , True)]
+
+ __description__ = """Skip files ending with extension rev"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def _setup(self):
+ super(self.pyfile.plugin, self).setup()
+ if self.pyfile.hasStatus("skipped"):
+ raise SkipDownload(self.pyfile.getStatusName() or self.pyfile.pluginname)
+
+
+ def pyname(self, pyfile):
+ url = pyfile.url
+ plugin = pyfile.plugin
+
+ if hasattr(plugin, "info") and 'name' in plugin.info and plugin.info['name']:
+ name = plugin.info['name']
+
+ elif hasattr(plugin, "parseInfo"):
+ name = next(plugin.parseInfo([url]))['name']
+
+ elif hasattr(plugin, "getInfo"): #@NOTE: if parseInfo was not found, getInfo should be missing too
+ name = plugin.getInfo(url)['name']
+
+ else:
+ self.logWarning("Unable to grab file name")
+ name = urlparse(unquote(url)).path.split('/')[-1])
+
+ return name
+
+
+ def downloadPreparing(self, pyfile):
+ if pyfile.getStatusName() is "unskipped" or not pyname(pyfile).endswith(".rev"):
+ return
+
+ tokeep = self.getConfig("tokeep")
+
+ if tokeep > 0:
+ saved = [True for link in pyfile.package().getChildren() \
+ if link.name.endswith(".rev") and (link.hasStatus("finished") or link.hasStatus("downloading"))].count(True)
+
+ if saved < tokeep:
+ return
+
+ pyfile.setCustomStatus("SkipRev", "skipped")
+ pyfile.plugin.setup = _setup #: work-around: inject status checker inside the preprocessing routine of the plugin
+
+
+ def downloadFailed(self, pyfile):
+ if self.getConfig("auto") is False:
+
+ if self.getConfig("unskip") is False:
+ return
+
+ if not pyfile.name.endswith(".rev"):
+ return
+
+ for link in pyfile.package().getChildren():
+ if link.hasStatus("skipped") and link.name.endswith(".rev"):
+ link.setCustomStatus("unskipped", "queued")
+ return
diff --git a/module/plugins/hoster/DodanePl.py b/module/plugins/hoster/DodanePl.py
new file mode 100644
index 000000000..58f1c02d8
--- /dev/null
+++ b/module/plugins/hoster/DodanePl.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+
+from module.plugins.internal.DeadHoster import DeadHoster, parseFileInfo
+
+
+class DodanePl(DeadHoster):
+ __name__ = "DodanePl"
+ __type__ = "hoster"
+ __version__ = "0.03"
+
+ __pattern__ = r'http://(?:www\.)?dodane\.pl/file/\d+'
+
+ __description__ = """Dodane.pl hoster plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("z00nx", "z00nx0@gmail.com")]
+
+
+getInfo = create_getInfo(DodanePl)
diff --git a/module/plugins/hoster/NowDownloadSx.py b/module/plugins/hoster/NowDownloadSx.py
new file mode 100644
index 000000000..d2ae08954
--- /dev/null
+++ b/module/plugins/hoster/NowDownloadSx.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
+from module.utils import fixup
+
+
+class NowDownloadSx(SimpleHoster):
+ __name__ = "NowDownloadSx"
+ __type__ = "hoster"
+ __version__ = "0.05"
+
+ __pattern__ = r'http://(?:www\.)?nowdownload\.(at|ch|co|eu|sx)/(dl/|download\.php\?id=)\w+'
+
+ __description__ = """NowDownload.sx hoster plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("godofdream", "soilfiction@gmail.com"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ INFO_PATTERN = r'Downloading</span> <br> (?P<N>.*) (?P<S>[\d.,]+) (?P<U>[\w^_]+) </h4>'
+ OFFLINE_PATTERN = r'>This file does not exist'
+
+ TOKEN_PATTERN = r'"(/api/token\.php\?token=\w+)"'
+ CONTINUE_PATTERN = r'"(/dl2/\w+/\w+)"'
+ WAIT_PATTERN = r'\.countdown\(\{until: \+(\d+),'
+ LINK_PATTERN = r'(http://s\d+\.coolcdn\.info/nowdownload/.+?)["\']'
+
+ NAME_REPLACEMENTS = [("&#?\w+;", fixup), (r'<[^>]*>', '')]
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = -1
+
+
+ def handleFree(self):
+ tokenlink = re.search(self.TOKEN_PATTERN, self.html)
+ continuelink = re.search(self.CONTINUE_PATTERN, self.html)
+ if tokenlink is None or continuelink is None:
+ self.error()
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ wait = int(m.group(1))
+ else:
+ wait = 60
+
+ baseurl = "http://www.nowdownload.at"
+ self.html = self.load(baseurl + str(tokenlink.group(1)))
+ self.wait(wait)
+
+ self.html = self.load(baseurl + str(continuelink.group(1)))
+
+ url = re.search(self.LINK_PATTERN, self.html)
+ if url is None:
+ self.error(_("Download link not found"))
+
+ self.download(str(url.group(1)))
+
+
+getInfo = create_getInfo(NowDownloadSx)
diff --git a/module/plugins/hoster/NowVideoSx.py b/module/plugins/hoster/NowVideoSx.py
new file mode 100644
index 000000000..b59bd79da
--- /dev/null
+++ b/module/plugins/hoster/NowVideoSx.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class NowVideoSx(SimpleHoster):
+ __name__ = "NowVideoSx"
+ __type__ = "hoster"
+ __version__ = "0.07"
+
+ __pattern__ = r'http://(?:www\.)?nowvideo\.(at|ch|co|eu|sx)/(video|mobile/#/videos)/(?P<ID>\w+)'
+
+ __description__ = """NowVideo.sx hoster plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ URL_REPLACEMENTS = [(__pattern__ + ".*", r'http://www.nowvideo.at/video/\g<ID>')]
+
+ NAME_PATTERN = r'<h4>(?P<N>.+?)<'
+ OFFLINE_PATTERN = r'>This file no longer exists'
+
+ LINK_FREE_PATTERN = r'<source src="(.+?)"'
+ LINK_PREMIUM_PATTERN = r'<div id="content_player" >\s*<a href="(.+?)"'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+
+
+ def handleFree(self):
+ self.html = self.load("http://www.nowvideo.at/mobile/video.php", get={'id': self.info['pattern']['ID']})
+
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
+ if m is None:
+ self.error(_("Free download link not found"))
+
+ self.download(m.group(1))
+
+
+getInfo = create_getInfo(NowVideoSx)
diff --git a/module/plugins/hoster/RapiduNet.py b/module/plugins/hoster/RapiduNet.py
new file mode 100644
index 000000000..e14b18a4f
--- /dev/null
+++ b/module/plugins/hoster/RapiduNet.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pycurl import HTTPHEADER
+from time import time, altzone
+
+from module.common.json_layer import json_loads
+from module.plugins.internal.CaptchaService import ReCaptcha
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class RapiduNet(SimpleHoster):
+ __name__ = "RapiduNet"
+ __type__ = "hoster"
+ __version__ = "0.02"
+
+ __pattern__ = r'https?://(?:www\.)?rapidu\.net/(?P<ID>\d{10})'
+
+ __description__ = """Rapidu.net hoster plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("prOq", None)]
+
+
+ COOKIES = [("rapidu.net", "rapidu_lang", "en")]
+
+ FILE_INFO_PATTERN = r'<h1 title="(?P<N>.*)">.*</h1>\s*<small>(?P<S>\d+(\.\d+)?)\s(?P<U>\w+)</small>'
+ OFFLINE_PATTERN = r'404 - File not found'
+
+ ERROR_PATTERN = r'<div class="error">'
+
+ RECAPTCHA_KEY = r'6Ld12ewSAAAAAHoE6WVP_pSfCdJcBQScVweQh8Io'
+
+
+ def setup(self):
+ self.resumeDownload = True
+ self.multiDL = True
+ self.limitDL = 0 if self.premium else 2
+
+
+ def handleFree(self):
+ self.req.http.lastURL = self.pyfile.url
+ self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
+
+ jsvars = self.getJsonResponse("https://rapidu.net/ajax.php?a=getLoadTimeToDownload", {'_go': None})
+
+ if str(jsvars['timeToDownload']) is "stop":
+ t = (24 * 60 * 60) - (int(time()) % (24 *60 * 60)) + altzone
+
+ self.logInfo("You've reach your daily download transfer")
+
+ self.retry(10, 10 if t < 1 else None, "Try tomorrow again") #@NOTE: check t in case of not synchronised clock
+
+ else:
+ self.wait(int(jsvars['timeToDownload']) - int(time()))
+
+ recaptcha = ReCaptcha(self)
+
+ for _i in xrange(10):
+ challenge, code = recaptcha.challenge(self.RECAPTCHA_KEY)
+
+ jsvars = self.getJsonResponse("https://rapidu.net/ajax.php?a=getCheckCaptcha",
+ {'_go' : None,
+ 'captcha1': challenge,
+ 'captcha2': code,
+ 'fileId' : self.info['ID']})
+ if jsvars['message'] == 'success':
+ self.download(jsvars['url'])
+ break
+
+
+ def getJsonResponse(self, url, post_data):
+ response = self.load(url, post=post_data, decode=True)
+ if not response.startswith('{'):
+ self.retry()
+
+ self.logDebug(url, response)
+
+ return json_loads(response)
+
+
+getInfo = create_getInfo(RapiduNet)
diff --git a/module/plugins/hoster/SafesharingEu.py b/module/plugins/hoster/SafesharingEu.py
new file mode 100644
index 000000000..f0936b9e8
--- /dev/null
+++ b/module/plugins/hoster/SafesharingEu.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+from module.plugins.internal.XFSHoster import XFSHoster, create_getInfo
+
+
+class SafesharingEu(XFSHoster):
+ __name__ = "SafesharingEu"
+ __type__ = "hoster"
+ __version__ = "0.05"
+
+ __pattern__ = r'https?://(?:www\.)?safesharing\.eu/\w{12}'
+
+ __description__ = """Safesharing.eu hoster plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("zapp-brannigan", "fuerst.reinje@web.de")]
+
+
+ HOSTER_DOMAIN = "safesharing.eu"
+
+ WAIT_PATTERN = r'You have to wait (\d+) minutes'
+
+ ERROR_PATTERN = r'(?:<div class="alert alert-danger">)(.+?)(?:</div>)'
+
+
+getInfo = create_getInfo(SafesharingEu)
diff --git a/module/plugins/hoster/UploadableCh.py b/module/plugins/hoster/UploadableCh.py
new file mode 100644
index 000000000..77b3d7d8a
--- /dev/null
+++ b/module/plugins/hoster/UploadableCh.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import sleep
+
+from module.plugins.internal.CaptchaService import ReCaptcha
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
+
+
+class UploadableCh(SimpleHoster):
+ __name__ = "UploadableCh"
+ __type__ = "hoster"
+ __version__ = "0.02"
+
+ __pattern__ = r'http://(?:www\.)?uploadable\.ch/file/(?P<ID>\w+)'
+
+ __description__ = """Uploadable.ch hoster plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("zapp-brannigan", "fuerst.reinje@web.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ FILE_INFO_PATTERN = r'div id=\"file_name\" title=.*>(?P<N>.+)<span class=\"filename_normal\">\((?P<S>[\d.]+) (?P<U>\w+)\)</span><'
+
+ OFFLINE_PATTERN = r'>(File not available|This file is no longer available)'
+ TEMP_OFFLINE_PATTERN = r'<div class="icon_err">'
+
+ WAIT_PATTERN = r'data-time="(\d+)" data-format'
+
+ FILE_URL_REPLACEMENTS = [(__pattern__ + ".*", r'http://www.uploadable.ch/file/\g<ID>')]
+
+
+ def setup(self):
+ self.multiDL = False
+ self.chunkLimit = 1
+
+
+ def handleFree(self):
+ # Click the "free user" button and wait
+ a = self.load(self.pyfile.url, cookies=True, post={'downloadLink': "wait"}, decode=True)
+ self.logDebug(a)
+
+ m = re.search(self.WAIT_PATTERN, a)
+ if m is not None:
+ self.wait(int(m.group(1))) #: Expected output: {"waitTime":30}
+ else:
+ self.error("WAIT_PATTERN")
+
+ # Make the recaptcha appear and show it the pyload interface
+ b = self.load(self.pyfile.url, cookies=True, post={'checkDownload': "check"}, decode=True)
+ self.logDebug(b) #: Expected output: {"success":"showCaptcha"}
+
+ recaptcha = ReCaptcha(self)
+
+ challenge, captcha = recaptcha.challenge(self.RECAPTCHA_KEY)
+
+ # Submit the captcha solution
+ self.load("http://www.uploadable.ch/checkReCaptcha.php",
+ cookies=True,
+ post={'recaptcha_challenge_field' : challenge,
+ 'recaptcha_response_field' : captcha,
+ 'recaptcha_shortencode_field': self.info['ID']},
+ decode=True)
+
+ self.wait(3)
+
+ # Get ready for downloading
+ self.load(self.pyfile.url, cookies=True, post={'downloadLink': "show"}, decode=True)
+
+ self.wait(3)
+
+ # Download the file
+ self.download(self.pyfile.url, cookies=True, post={'download': "normal"}, disposition=True)
+
+
+ def checkFile(self):
+ check = self.checkDownload({'wait_or_reconnect': re.compile("Please wait for"),
+ 'is_html' : re.compile("<head>")})
+
+ if check == "wait_or_reconnect":
+ self.logInfo("Downloadlimit reached, please wait or reconnect")
+ self.wait(60 * 60, True)
+ self.retry()
+
+ elif check == "is_html":
+ self.error("Downloaded file is an html file")
+
+
+getInfo = create_getInfo(UploadableCh)
diff --git a/pyload/plugins/account/AlldebridCom.py b/pyload/plugins/account/AlldebridCom.py
index 4f9164468..a3e8ad659 100644
--- a/pyload/plugins/account/AlldebridCom.py
+++ b/pyload/plugins/account/AlldebridCom.py
@@ -36,8 +36,8 @@ class AlldebridCom(Account):
#Get expiration date from API
except Exception:
data = self.getAccountData(user)
- page = req.load("http://www.alldebrid.com/api.php?action=info_user&login=%s&pw=%s" % (user,
- data['password']))
+ page = req.load("http://www.alldebrid.com/api.php",
+ get={'action': "info_user", 'login': user, 'pw': data['password']})
self.logDebug(page)
xml = dom.parseString(page)
exp_time = time() + int(xml.getElementsByTagName("date")[0].childNodes[0].nodeValue) * 24 * 60 * 60
diff --git a/pyload/plugins/account/DebridItaliaCom.py b/pyload/plugins/account/DebridItaliaCom.py
index 30ed9fb1c..41f796327 100644
--- a/pyload/plugins/account/DebridItaliaCom.py
+++ b/pyload/plugins/account/DebridItaliaCom.py
@@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
import re
-import time
+
+from time import mktime, strptime
from pyload.plugins.internal.Account import Account
@@ -9,32 +10,35 @@ from pyload.plugins.internal.Account import Account
class DebridItaliaCom(Account):
__name__ = "DebridItaliaCom"
__type__ = "account"
- __version__ = "0.1"
+ __version__ = "0.11"
__description__ = """Debriditalia.com account plugin"""
__license__ = "GPLv3"
- __authors__ = [("stickell", "l.stickell@yahoo.it")]
+ __authors__ = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
- WALID_UNTIL_PATTERN = r'Premium valid till: (?P<D>[^|]+) \|'
+ WALID_UNTIL_PATTERN = r'Premium valid till: (.+?) \|'
def loadAccountInfo(self, user, req):
+ info = {"premium": False, "validuntil": None, "trafficleft": None}
html = req.load("http://debriditalia.com/")
- if 'Account premium not activated' in html:
- return {"premium": False, "validuntil": None, "trafficleft": None}
+ if 'Account premium not activated' not in html:
+ m = re.search(self.WALID_UNTIL_PATTERN, html)
+ if m:
+ validuntil = int(mktime(strptime(m.group(1), "%d/%m/%Y %H:%M")))
+ info = {"premium": True, "validuntil": validuntil, "trafficleft": -1}
+ else:
+ self.logError(_("Unable to retrieve account information"))
- m = re.search(self.WALID_UNTIL_PATTERN, html)
- if m:
- validuntil = int(time.mktime(time.strptime(m.group('D'), "%d/%m/%Y %H:%M")))
- return {"premium": True, "validuntil": validuntil, "trafficleft": -1}
- else:
- self.logError(_("Unable to retrieve account information"))
+ return info
def login(self, user, data, req):
html = req.load("http://debriditalia.com/login.php",
- get={"u": user, "p": data['password']})
+ get={'u': user, 'p': data['password']})
+
if 'NO' in html:
self.wrongPassword()
diff --git a/pyload/plugins/account/EasybytezCom.py b/pyload/plugins/account/EasybytezCom.py
index 2ffcd5392..64b59413c 100644
--- a/pyload/plugins/account/EasybytezCom.py
+++ b/pyload/plugins/account/EasybytezCom.py
@@ -8,7 +8,7 @@ from pyload.plugins.internal.XFSAccount import XFSAccount
class EasybytezCom(XFSAccount):
__name__ = "EasybytezCom"
__type__ = "account"
- __version__ = "0.10"
+ __version__ = "0.12"
__description__ = """EasyBytez.com account plugin"""
__license__ = "GPLv3"
diff --git a/pyload/plugins/account/FastixRu.py b/pyload/plugins/account/FastixRu.py
index 96db443b7..829316ce3 100644
--- a/pyload/plugins/account/FastixRu.py
+++ b/pyload/plugins/account/FastixRu.py
@@ -16,11 +16,11 @@ class FastixRu(Account):
def loadAccountInfo(self, user, req):
data = self.getAccountData(user)
- page = req.load("http://fastix.ru/api_v2/?apikey=%s&sub=getaccountdetails" % (data['api']))
- page = json_loads(page)
+ page = json_loads(req.load("http://fastix.ru/api_v2/", get={'apikey': data['api'], 'sub': "getaccountdetails"}))
+
points = page['points']
- kb = float(points)
- kb = kb * 1024 ** 2 / 1000
+ kb = float(points) * 1024 ** 2 / 1000
+
if points > 0:
account_info = {"validuntil": -1, "trafficleft": kb}
else:
@@ -29,7 +29,8 @@ class FastixRu(Account):
def login(self, user, data, req):
- page = req.load("http://fastix.ru/api_v2/?sub=get_apikey&email=%s&password=%s" % (user, data['password']))
+ page = req.load("http://fastix.ru/api_v2/",
+ get={'sub': "get_apikey", 'email': user, 'password': data['password']})
api = json_loads(page)
api = api['apikey']
data['api'] = api
diff --git a/pyload/plugins/account/KingfilesNet.py b/pyload/plugins/account/KingfilesNet.py
index ebd5baec3..892027e52 100644
--- a/pyload/plugins/account/KingfilesNet.py
+++ b/pyload/plugins/account/KingfilesNet.py
@@ -9,7 +9,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class KingfilesNet(SimpleHoster):
__name__ = "KingfilesNet"
__type__ = "hoster"
- __version__ = "0.04"
+ __version__ = "0.05"
__pattern__ = r'http://(?:www\.)?kingfiles\.net/(?P<ID>\w{12})'
@@ -30,15 +30,15 @@ class KingfilesNet(SimpleHoster):
def setup(self):
- self.multiDL = True
self.resumeDownload = True
+ self.multiDL = True
def handleFree(self):
# Click the free user button
post_data = {'op': "download1",
'usr_login': "",
- 'id': self.info['ID'],
+ 'id': self.info['pattern']['ID'],
'fname': self.pyfile.name,
'referer': "",
'method_free': "+"}
@@ -57,7 +57,7 @@ class KingfilesNet(SimpleHoster):
self.logDebug("rand = ", rand)
post_data = {'op': "download2",
- 'id': self.info['ID'],
+ 'id': self.info['pattern']['ID'],
'rand': rand,
'referer': self.pyfile.url,
'method_free': "+",
diff --git a/pyload/plugins/account/NetloadIn.py b/pyload/plugins/account/NetloadIn.py
index 6e780225a..341803670 100644
--- a/pyload/plugins/account/NetloadIn.py
+++ b/pyload/plugins/account/NetloadIn.py
@@ -18,7 +18,7 @@ class NetloadIn(Account):
def loadAccountInfo(self, user, req):
- page = req.load("http://netload.in/index.php?id=2&lang=de")
+ page = req.load("http://netload.in/index.php", get={'id': 2, 'lang': "de"})
left = r'>(\d+) (Tag|Tage), (\d+) Stunden<'
left = re.search(left, page)
if left:
diff --git a/pyload/plugins/account/OboomCom.py b/pyload/plugins/account/OboomCom.py
index 4f7f476e2..5ee35f973 100644
--- a/pyload/plugins/account/OboomCom.py
+++ b/pyload/plugins/account/OboomCom.py
@@ -11,7 +11,7 @@ from pyload.plugins.internal.Account import Account
class OboomCom(Account):
__name__ = "OboomCom"
__type__ = "account"
- __version__ = "0.2"
+ __version__ = "0.21"
__description__ = """Oboom.com account plugin"""
__license__ = "GPLv3"
@@ -51,11 +51,11 @@ class OboomCom(Account):
session = accountData['session']
- return {'premium': premium,
- 'validuntil': validUntil,
- 'trafficleft': trafficLeft,
- 'maxtraffic': maxTraffic,
- 'session': session}
+ return {'premium' : premium,
+ 'validuntil' : validUntil,
+ 'trafficleft': trafficLeft / 1024, #@TODO: Remove / 1024 in 0.4.10
+ 'maxtraffic' : maxTraffic / 1024, #@TODO: Remove / 1024 in 0.4.10
+ 'session' : session}
def login(self, user, data, req):
diff --git a/pyload/plugins/account/PremiumTo.py b/pyload/plugins/account/PremiumTo.py
index f7a00e194..e94eed6fb 100644
--- a/pyload/plugins/account/PremiumTo.py
+++ b/pyload/plugins/account/PremiumTo.py
@@ -21,14 +21,14 @@ class PremiumTo(Account):
get={'username': self.username, 'password': self.password})
traffic = sum(map(int, api_r.split(';')))
- return {"trafficleft": int(traffic), "validuntil": -1}
+ return {"trafficleft": int(traffic) / 1024, "validuntil": -1} #@TODO: Remove / 1024 in 0.4.10
def login(self, user, data, req):
self.username = user
self.password = data['password']
- authcode = req.load("http://premium.to/api/getauthcode.php?username=%s&password=%s" % (
- user, self.password)).strip()
+ authcode = req.load("http://premium.to/api/getauthcode.php",
+ get={'username': user, 'password': self.password}).strip()
if "wrong username" in authcode:
self.wrongPassword()
diff --git a/pyload/plugins/account/PremiumizeMe.py b/pyload/plugins/account/PremiumizeMe.py
index 5f972ca8b..c06baf21d 100644
--- a/pyload/plugins/account/PremiumizeMe.py
+++ b/pyload/plugins/account/PremiumizeMe.py
@@ -42,7 +42,8 @@ class PremiumizeMe(Account):
def getAccountStatus(self, user, req):
# Use premiumize.me API v1 (see https://secure.premiumize.me/?show=api)
# to retrieve account info and return the parsed json answer
- answer = req.load(
- "https://api.premiumize.me/pm-api/v1.php?method=accountstatus&params[login]=%s&params[pass]=%s" % (
- user, self.accounts[user]['password']))
+ answer = req.load("https://api.premiumize.me/pm-api/v1.php",
+ get={'method' : "accountstatus",
+ 'params[login]': user,
+ 'params[pass]' : self.accounts[user]['password']})
return json_loads(answer)
diff --git a/pyload/plugins/account/RapidshareCom.py b/pyload/plugins/account/RapidshareCom.py
deleted file mode 100644
index 01adad15f..000000000
--- a/pyload/plugins/account/RapidshareCom.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugins.internal.Account import Account
-
-
-class RapidshareCom(Account):
- __name__ = "RapidshareCom"
- __type__ = "account"
- __version__ = "0.22"
-
- __description__ = """Rapidshare.com account plugin"""
- __license__ = "GPLv3"
- __authors__ = [("mkaay", "mkaay@mkaay.de")]
-
-
- def loadAccountInfo(self, user, req):
- data = self.getAccountData(user)
- api_url_base = "http://api.rapidshare.com/cgi-bin/rsapi.cgi"
- api_param_prem = {"sub": "getaccountdetails", "type": "prem", "login": user,
- "password": data['password'], "withcookie": 1}
- html = req.load(api_url_base, cookies=False, get=api_param_prem)
- if html.startswith("ERROR"):
- raise Exception(html)
- fields = html.split("\n")
- info = {}
- for t in fields:
- if not t.strip():
- continue
- k, v = t.split("=")
- info[k] = v
-
- validuntil = int(info['billeduntil'])
- premium = True if validuntil else False
-
- tmp = {"premium": premium, "validuntil": validuntil, "trafficleft": -1, "maxtraffic": -1}
-
- return tmp
-
-
- def login(self, user, data, req):
- api_url_base = "http://api.rapidshare.com/cgi-bin/rsapi.cgi"
- api_param_prem = {"sub": "getaccountdetails", "type": "prem", "login": user,
- "password": data['password'], "withcookie": 1}
- html = req.load(api_url_base, cookies=False, get=api_param_prem)
- if html.startswith("ERROR"):
- raise Exception(html + "### Note you have to use your account number for login, instead of name")
- fields = html.split("\n")
- info = {}
- for t in fields:
- if not t.strip():
- continue
- k, v = t.split("=")
- info[k] = v
- cj = self.getAccountCookies(user)
- cj.setCookie("rapidshare.com", "enc", info['cookie'])
diff --git a/pyload/plugins/account/RehostTo.py b/pyload/plugins/account/RehostTo.py
index e8ee3ba15..070cdda3a 100644
--- a/pyload/plugins/account/RehostTo.py
+++ b/pyload/plugins/account/RehostTo.py
@@ -15,12 +15,14 @@ class RehostTo(Account):
def loadAccountInfo(self, user, req):
data = self.getAccountData(user)
- page = req.load("http://rehost.to/api.php?cmd=login&user=%s&pass=%s" % (user, data['password']))
+ page = req.load("http://rehost.to/api.php",
+ get={'cmd': "login", 'user': user, 'pass': data['password']})
data = [x.split("=") for x in page.split(",")]
ses = data[0][1]
long_ses = data[1][1]
- page = req.load("http://rehost.to/api.php?cmd=get_premium_credits&long_ses=%s" % long_ses)
+ page = req.load("http://rehost.to/api.php",
+ get={'cmd': "get_premium_credits", 'long_ses': long_ses})
traffic, valid = page.split(",")
account_info = {"trafficleft": int(traffic) * 1024,
@@ -32,7 +34,8 @@ class RehostTo(Account):
def login(self, user, data, req):
- page = req.load("http://rehost.to/api.php?cmd=login&user=%s&pass=%s" % (user, data['password']))
+ page = req.load("http://rehost.to/api.php",
+ get={'cmd': "login", 'user': user, 'pass': data['password']})
if "Login failed." in page:
self.wrongPassword()
diff --git a/pyload/plugins/addon/Checksum.py b/pyload/plugins/addon/Checksum.py
index 3e1b90941..84024ce83 100644
--- a/pyload/plugins/addon/Checksum.py
+++ b/pyload/plugins/addon/Checksum.py
@@ -82,10 +82,15 @@ class Checksum(Addon):
a) if known, the exact filesize in bytes (e.g. "size": 123456789)
b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307")
"""
- if hasattr(pyfile.plugin, "check_data") and (isinstance(pyfile.plugin.check_data, dict)):
+ if hasattr(pyfile.plugin, "check_data") and isinstance(pyfile.plugin.check_data, dict):
data = pyfile.plugin.check_data.copy()
- elif hasattr(pyfile.plugin, "api_data") and (isinstance(pyfile.plugin.api_data, dict)):
+
+ elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict):
data = pyfile.plugin.api_data.copy()
+
+ # elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict):
+ # data = pyfile.plugin.info.copy()
+
else:
return
diff --git a/pyload/plugins/addon/ExtractArchive.py b/pyload/plugins/addon/ExtractArchive.py
index 91f477cf8..eef8f00ef 100644
--- a/pyload/plugins/addon/ExtractArchive.py
+++ b/pyload/plugins/addon/ExtractArchive.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from __future__ import with_statement
+
import os
import sys
@@ -57,7 +59,7 @@ from pyload.utils import safe_join, fs_encode
class ExtractArchive(Addon):
__name__ = "ExtractArchive"
__type__ = "addon"
- __version__ = "0.17"
+ __version__ = "0.18"
__config__ = [("activated" , "bool" , "Activated" , True ),
("fullpath" , "bool" , "Extract full path" , True ),
diff --git a/pyload/plugins/addon/HotFolder.py b/pyload/plugins/addon/HotFolder.py
index e91b9e04f..b16c02cf8 100644
--- a/pyload/plugins/addon/HotFolder.py
+++ b/pyload/plugins/addon/HotFolder.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from __future__ import with_statement
+
import time
from os import listdir, makedirs
@@ -13,10 +15,9 @@ from pyload.utils import fs_encode, safe_join
class HotFolder(Addon):
__name__ = "HotFolder"
__type__ = "addon"
- __version__ = "0.11"
+ __version__ = "0.12"
- __config__ = [("activated" , "bool", "Activated" , False ),
- ("folder" , "str" , "Folder to observe" , "container"),
+ __config__ = [("folder" , "str" , "Folder to observe" , "container"),
("watch_file", "bool", "Observe link file" , False ),
("keep" , "bool", "Keep added containers", True ),
("file" , "str" , "Link file" , "links.txt")]
diff --git a/pyload/plugins/addon/UpdateManager.py b/pyload/plugins/addon/UpdateManager.py
index 622374136..97fa4a399 100644
--- a/pyload/plugins/addon/UpdateManager.py
+++ b/pyload/plugins/addon/UpdateManager.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from __future__ import with_statement
+
import re
import sys
@@ -14,13 +16,14 @@ from pyload.utils import safe_join
class UpdateManager(Addon):
__name__ = "UpdateManager"
__type__ = "addon"
- __version__ = "0.40"
+ __version__ = "0.42"
- __config__ = [("activated" , "bool" , "Activated" , True ),
- ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
- ("interval" , "int" , "Check interval in hours" , 8 ),
- ("reloadplugins", "bool" , "Monitor plugins for code changes (debug mode only)", True ),
- ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , True )]
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
+ ("interval" , "int" , "Check interval in hours" , 8 ),
+ ("autorestart" , "bool" , "Automatically restart pyLoad when required" , True ),
+ ("reloadplugins", "bool" , "Monitor plugins for code changes in debug mode", True ),
+ ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , True )]
__description__ = """Check for updates"""
__license__ = "GPLv3"
@@ -29,8 +32,9 @@ class UpdateManager(Addon):
# event_list = ["pluginConfigChanged"]
- SERVER_URL = "http://updatemanager.pyload.org"
- MIN_INTERVAL = 6 * 60 * 60 #: 6h minimum check interval (value is in seconds)
+ SERVER_URL = "http://updatemanager.pyload.org"
+ VERSION = re.compile(r'__version__.*=.*("|\')([\d.]+)')
+ MIN_INTERVAL = 3 * 60 * 60 #: 3h minimum check interval (value is in seconds)
def pluginConfigChanged(self, plugin, name, value):
@@ -125,7 +129,7 @@ class UpdateManager(Addon):
status = self.update(onlyplugin=self.getConfig("mode") == "plugins only")
- if status == 2:
+ if status is 2 and self.getConfig("autorestart"):
self.core.api.restart()
else:
self.updating = False
@@ -173,21 +177,38 @@ class UpdateManager(Addon):
exitcode = 0
updated = []
- vre = re.compile(r'__version__.*=.*("|\')([\d.]+)')
- url = updates[0]
+ url = updates[0]
schema = updates[1].split('|')
if "BLACKLIST" in updates:
blacklist = updates[updates.index('BLACKLIST') + 1:]
- updates = updates[2:updates.index('BLACKLIST')]
+ updates = updates[2:updates.index('BLACKLIST')]
else:
blacklist = None
- updates = updates[2:]
+ updates = updates[2:]
+
+ upgradable = [dict(zip(schema, x.split('|'))) for x in updates]
+ blacklisted = [(x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]) for x in blacklist] if blacklist else []
- upgradable = sorted(map(lambda x: dict(zip(schema, x.split('|'))), updates),
- key=itemgetter("type", "name"))
+ if blacklist:
+ # Protect internal plugins against removing
+ for i, t, n in enumerate(blacklisted):
+ if t == "internal":
+ blacklisted.pop(i)
+ continue
+
+ for idx, plugin in enumerate(upgradable):
+ if n == plugin['name'] and t == plugin['type']:
+ upgradable.pop(idx)
+ break
+
+ for t, n in self.removePlugins(sorted(blacklisted)):
+ self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % {
+ 'type': t,
+ 'name': n,
+ })
- for plugin in upgradable:
+ for plugin in sorted(upgradable, key=itemgetter("type", "name")):
filename = plugin['name']
type = plugin['type']
version = plugin['version']
@@ -215,34 +236,18 @@ class UpdateManager(Addon):
'newver': newver})
try:
content = getURL(url % plugin)
- m = vre.search(content)
+ m = self.VERSION.search(content)
if m and m.group(2) == version:
- f = open(safe_join("userplugins", prefix, filename), "wb")
- f.write(content)
- f.close()
+ with open(safe_join("userplugins", prefix, filename), "wb") as f:
+ f.write(content)
+
updated.append((prefix, name))
else:
raise Exception, _("Version mismatch")
except Exception, e:
- self.logError(_("Error updating plugin %s") % filename, e)
-
- if blacklist:
- blacklisted = map(lambda x: (x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]), blacklist)
-
- # Always protect internal plugins from removing
- for i, n, t in blacklisted.enumerate():
- if t == "internal":
- del blacklisted[i]
-
- blacklisted = sorted(blacklisted)
- removed = self.removePlugins(blacklisted)
- for t, n in removed:
- self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % {
- 'type': t,
- 'name': n,
- })
+ self.logError(_("Error updating plugin: %s") % filename, str(e))
if updated:
reloaded = self.core.pluginManager.reloadPlugins(updated)
diff --git a/pyload/plugins/container/RSDF.py b/pyload/plugins/container/RSDF.py
index 67325f20d..41811469f 100644
--- a/pyload/plugins/container/RSDF.py
+++ b/pyload/plugins/container/RSDF.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from __future__ import with_statement
+
import base64
import binascii
import re
@@ -10,7 +12,7 @@ from pyload.utils import fs_encode
class RSDF(Container):
__name__ = "RSDF"
- __version__ = "0.23"
+ __version__ = "0.24"
__pattern__ = r'.+\.rsdf'
diff --git a/pyload/plugins/crypter/FilecryptCc.py b/pyload/plugins/crypter/FilecryptCc.py
index cb00da5e9..160253c06 100644
--- a/pyload/plugins/crypter/FilecryptCc.py
+++ b/pyload/plugins/crypter/FilecryptCc.py
@@ -121,7 +121,7 @@ class FilecryptCc(Crypter):
vjk = re.findall('<input type="hidden" name="jk" value="function f\(\){ return \'(.*)\';}">', self.siteWithLinks)
vcrypted = re.findall('<input type="hidden" name="crypted" value="(.*)">', self.siteWithLinks)
- for i in range(0, len(vcrypted)):
+ for i in xrange(len(vcrypted)):
self.links.extend(self._getLinks(vcrypted[i], vjk[i]))
except Exception, e:
diff --git a/pyload/plugins/crypter/LinkCryptWs.py b/pyload/plugins/crypter/LinkCryptWs.py
index 144bd7bb2..219d836dd 100644
--- a/pyload/plugins/crypter/LinkCryptWs.py
+++ b/pyload/plugins/crypter/LinkCryptWs.py
@@ -15,14 +15,15 @@ from pyload.utils import html_unescape
class LinkCryptWs(Crypter):
__name__ = "LinkCryptWs"
__type__ = "crypter"
- __version__ = "0.06"
+ __version__ = "0.07"
__pattern__ = r'http://(?:www\.)?linkcrypt\.ws/(dir|container)/(?P<ID>\w+)'
__description__ = """LinkCrypt.ws decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("kagenoshin", "kagenoshin[AT]gmx[DOT]ch"),
- ("glukgluk", None)]
+ ("glukgluk", None),
+ ("Gummibaer", None)]
CRYPTED_KEY = "crypted"
@@ -37,7 +38,7 @@ class LinkCryptWs(Crypter):
def prepare(self):
# Init
- self.fileid = re.match(self.__pattern__, pyfile.url).group('ID')
+ self.fileid = re.match(self.__pattern__, self.pyfile.url).group('ID')
self.req.cj.setCookie("linkcrypt.ws", "language", "en")
diff --git a/pyload/plugins/crypter/MediafireCom.py b/pyload/plugins/crypter/MediafireCom.py
index 392f59c52..28c4fa984 100644
--- a/pyload/plugins/crypter/MediafireCom.py
+++ b/pyload/plugins/crypter/MediafireCom.py
@@ -42,8 +42,10 @@ class MediafireCom(Crypter):
folder_key = m.group(1)
self.logDebug("FOLDER KEY: %s" % folder_key)
- json_resp = json_loads(self.load(
- "http://www.mediafire.com/api/folder/get_info.php?folder_key=%s&response_format=json&version=1" % folder_key))
+ json_resp = json_loads(self.load("http://www.mediafire.com/api/folder/get_info.php",
+ get={'folder_key' : folder_key,
+ 'response_format': "json",
+ 'version' : 1}))
#self.logInfo(json_resp)
if json_resp['response']['result'] == "Success":
for link in json_resp['response']['folder_info']['files']:
diff --git a/pyload/plugins/crypter/RelinkUs.py b/pyload/plugins/crypter/RelinkUs.py
index 419ce4506..c5e312f93 100644
--- a/pyload/plugins/crypter/RelinkUs.py
+++ b/pyload/plugins/crypter/RelinkUs.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from __future__ import with_statement
+
import base64
import binascii
import re
@@ -12,7 +14,7 @@ from pyload.plugins.internal.Crypter import Crypter
class RelinkUs(Crypter):
__name__ = "RelinkUs"
__type__ = "crypter"
- __version__ = "3.1"
+ __version__ = "3.11"
__pattern__ = r'http://(?:www\.)?relink\.us/(f/|((view|go)\.php\?id=))(?P<id>.+)'
__config__ = [("use_subfolder", "bool", "Save package to subfolder", True),
diff --git a/pyload/plugins/hook/AlldebridCom.py b/pyload/plugins/hook/AlldebridCom.py
index 9ed80f101..cf79af917 100644
--- a/pyload/plugins/hook/AlldebridCom.py
+++ b/pyload/plugins/hook/AlldebridCom.py
@@ -22,6 +22,6 @@ class AlldebridCom(MultiHoster):
def getHoster(self):
https = "https" if self.getConfig("https") else "http"
- page = getURL(https + "://www.alldebrid.com/api.php?action=get_host").replace("\"", "").strip()
+ page = getURL(https + "://www.alldebrid.com/api.php", get={'action': "get_host"}).replace("\"", "").strip()
return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugins/hook/Captcha9kw.py b/pyload/plugins/hook/Captcha9kw.py
index fa4710542..f01f45011 100644
--- a/pyload/plugins/hook/Captcha9kw.py
+++ b/pyload/plugins/hook/Captcha9kw.py
@@ -16,18 +16,19 @@ from pyload.plugins.internal.Addon import Hook
class Captcha9kw(Hook):
__name__ = "Captcha9kw"
__type__ = "hook"
- __version__ = "0.24"
-
- __config__ = [("ssl", "bool", "Use HTTPS", True),
- ("force", "bool", "Force captcha resolving even if client is connected", True),
- ("confirm", "bool", "Confirm Captcha (cost +6 credits)", False),
- ("captchaperhour", "int", "Captcha per hour", "9999"),
- ("prio", "int", "Priority (max 10)(cost +0 -> +10 credits)", "0"),
- ("queue", "int", "Max. Queue (max 999)", "50"),
- ("hoster_options", "string", "Hoster options (format: pluginname:prio=1:selfsolfe=1:confirm=1:timeout=900|...)", "ShareonlineBiz:prio=0:timeout=999 | UploadedTo:prio=0:timeout=999"),
- ("selfsolve", "bool", "Selfsolve (manually solve your captcha in your 9kw client if active)", "0"),
- ("passkey", "password", "API key", ""),
- ("timeout", "int", "Timeout in seconds (min 60, max 3999)", "900")]
+ __version__ = "0.25"
+
+ __config__ = [("ssl" , "bool" , "Use HTTPS" , True ),
+ ("force" , "bool" , "Force captcha resolving even if client is connected" , True ),
+ ("confirm" , "bool" , "Confirm Captcha (cost +6 credits)" , False ),
+ ("captchaperhour", "int" , "Captcha per hour" , "9999" ),
+ ("captchapermin" , "int" , "Captcha per minute" , "9999" ),
+ ("prio" , "int" , "Priority (max 10)(cost +0 -> +10 credits)" , "0" ),
+ ("queue" , "int" , "Max. Queue (max 999)" , "50" ),
+ ("hoster_options", "string" , "Hoster options (format: pluginname:prio=1:selfsolfe=1:confirm=1:timeout=900|...)", "ShareonlineBiz:prio=0:timeout=999 | UploadedTo:prio=0:timeout=999"),
+ ("selfsolve" , "bool" , "Selfsolve (manually solve your captcha in your 9kw client if active)" , "0" ),
+ ("passkey" , "password", "API key" , "" ),
+ ("timeout" , "int" , "Timeout in seconds (min 60, max 3999)" , "900" )]
__description__ = """Send captchas to 9kw.eu"""
__license__ = "GPLv3"
@@ -52,7 +53,7 @@ class Captcha9kw(Hook):
if res.isdigit():
self.logInfo(_("%s credits left") % res)
- credits = self.info["credits"] = int(res)
+ credits = self.info['credits'] = int(res)
return credits
else:
self.logError(res)
@@ -82,7 +83,8 @@ class Captcha9kw(Hook):
'confirm' : self.getConfig("confirm"),
'timeout' : min(max(self.getConfig("timeout"), 300), 3999),
'selfsolve' : self.getConfig("selfsolve"),
- 'cph' : self.getConfig("captchaperhour")}
+ 'cph' : self.getConfig("captchaperhour"),
+ 'cpm' : self.getConfig("captchapermin")}
for opt in str(self.getConfig("hoster_options").split('|')):
@@ -109,6 +111,7 @@ class Captcha9kw(Hook):
'maxtimeout' : option['timeout'],
'selfsolve' : option['selfsolve'],
'captchaperhour': option['cph'],
+ 'captchapermin' : option['cpm'],
'case-sensitive': option['case_sensitive'],
'min_len' : option['min'],
'max_len' : option['max'],
diff --git a/pyload/plugins/hook/DebridItaliaCom.py b/pyload/plugins/hook/DebridItaliaCom.py
index 9c8f866f0..01e085ad2 100644
--- a/pyload/plugins/hook/DebridItaliaCom.py
+++ b/pyload/plugins/hook/DebridItaliaCom.py
@@ -1,12 +1,15 @@
# -*- coding: utf-8 -*-
+import re
+
+from pyload.network.RequestFactory import getURL
from pyload.plugins.internal.MultiHoster import MultiHoster
class DebridItaliaCom(MultiHoster):
__name__ = "DebridItaliaCom"
__type__ = "hook"
- __version__ = "0.07"
+ __version__ = "0.08"
__config__ = [("hosterListMode", "all;listed;unlisted", "Use for hosters (if supported)", "all"),
("hosterList", "str", "Hoster list (comma separated)", ""),
@@ -15,14 +18,10 @@ class DebridItaliaCom(MultiHoster):
__description__ = """Debriditalia.com hook plugin"""
__license__ = "GPLv3"
- __authors__ = [("stickell", "l.stickell@yahoo.it")]
+ __authors__ = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
def getHoster(self):
- return ["netload.in", "hotfile.com", "rapidshare.com", "multiupload.com",
- "uploading.com", "megashares.com", "crocko.com", "filepost.com",
- "bitshare.com", "share-links.biz", "putlocker.com", "uploaded.to",
- "speedload.org", "rapidgator.net", "likeupload.net", "cyberlocker.ch",
- "depositfiles.com", "extabit.com", "filefactory.com", "sharefiles.co",
- "ryushare.com", "tusfiles.net", "nowvideo.co", "cloudzer.net", "letitbit.net",
- "easybytez.com", "uptobox.com", "ddlstorage.com"]
+ html = getURL("http://www.debriditalia.com/status.php")
+ return re.findall(r'title="(.+?)"> \1</td><td><img src="/images/(?:attivo|testing)', html)
diff --git a/pyload/plugins/hook/EasybytezCom.py b/pyload/plugins/hook/EasybytezCom.py
index 15033c8e7..9d92b96f7 100644
--- a/pyload/plugins/hook/EasybytezCom.py
+++ b/pyload/plugins/hook/EasybytezCom.py
@@ -23,14 +23,17 @@ class EasybytezCom(MultiHoster):
user = self.account.selectAccount()[0]
try:
- req = self.account.getAccountRequest(user)
+ req = self.account.getAccountRequest(user)
page = req.load("http://www.easybytez.com")
- m = re.search(r'</textarea>\s*Supported sites:(.*)', page)
- return m.group(1).split(',')
+ hosters = re.search(r'</textarea>\s*Supported sites:(.*)', page).group(1).split(',')
+
except Exception, e:
- self.logDebug(e)
self.logWarning(_("Unable to load supported hoster list, using last known"))
- return ["bitshare.com", "crocko.com", "ddlstorage.com", "depositfiles.com", "extabit.com", "hotfile.com",
- "mediafire.com", "netload.in", "rapidgator.net", "rapidshare.com", "uploading.com", "uload.to",
- "uploaded.to"]
+ self.logDebug(e)
+
+ hosters = ["bitshare.com", "crocko.com", "ddlstorage.com", "depositfiles.com", "extabit.com", "hotfile.com",
+ "mediafire.com", "netload.in", "rapidgator.net", "rapidshare.com", "uploading.com", "uload.to",
+ "uploaded.to"]
+ finally:
+ return hosters
diff --git a/pyload/plugins/hook/FastixRu.py b/pyload/plugins/hook/FastixRu.py
index a7a5e6b8c..0ae853544 100644
--- a/pyload/plugins/hook/FastixRu.py
+++ b/pyload/plugins/hook/FastixRu.py
@@ -20,8 +20,9 @@ class FastixRu(MultiHoster):
def getHoster(self):
- page = getURL(
- "http://fastix.ru/api_v2/?apikey=5182964c3f8f9a7f0b00000a_kelmFB4n1IrnCDYuIFn2y&sub=allowed_sources")
+ page = getURL("http://fastix.ru/api_v2",
+ get={'apikey': "5182964c3f8f9a7f0b00000a_kelmFB4n1IrnCDYuIFn2y",
+ 'sub' : "allowed_sources"})
host_list = json_loads(page)
host_list = host_list['allow']
return host_list
diff --git a/pyload/plugins/hook/FreeWayMe.py b/pyload/plugins/hook/FreeWayMe.py
index b9955c90a..5d955e156 100644
--- a/pyload/plugins/hook/FreeWayMe.py
+++ b/pyload/plugins/hook/FreeWayMe.py
@@ -20,6 +20,6 @@ class FreeWayMe(MultiHoster):
def getHoster(self):
- hostis = getURL("https://www.free-way.me/ajax/jd.php", get={"id": 3}).replace("\"", "").strip()
+ hostis = getURL("https://www.free-way.me/ajax/jd.php", get={'id': 3}).replace("\"", "").strip()
self.logDebug("Hosters", hostis)
return [x.strip() for x in hostis.split(",") if x.strip()]
diff --git a/pyload/plugins/hook/LinksnappyCom.py b/pyload/plugins/hook/LinksnappyCom.py
index 0957b6a91..381eb6a2a 100644
--- a/pyload/plugins/hook/LinksnappyCom.py
+++ b/pyload/plugins/hook/LinksnappyCom.py
@@ -21,7 +21,7 @@ class LinksnappyCom(MultiHoster):
def getHoster(self):
- json_data = getURL('http://gen.linksnappy.com/lseAPI.php?act=FILEHOSTS')
+ json_data = getURL("http://gen.linksnappy.com/lseAPI.php", get={'act': "FILEHOSTS"})
json_data = json_loads(json_data)
return json_data['return'].keys()
diff --git a/pyload/plugins/hook/MegaDebridEu.py b/pyload/plugins/hook/MegaDebridEu.py
index 6c3e2b03a..5a52dbf41 100644
--- a/pyload/plugins/hook/MegaDebridEu.py
+++ b/pyload/plugins/hook/MegaDebridEu.py
@@ -18,7 +18,7 @@ class MegaDebridEu(MultiHoster):
def getHoster(self):
- reponse = getURL('http://www.mega-debrid.eu/api.php?action=getHosters')
+ reponse = getURL("http://www.mega-debrid.eu/api.php", get={'action': "getHosters"})
json_data = json_loads(reponse)
if json_data['response_code'] == "ok":
diff --git a/pyload/plugins/hook/MyfastfileCom.py b/pyload/plugins/hook/MyfastfileCom.py
index affaa2261..07988d2b9 100644
--- a/pyload/plugins/hook/MyfastfileCom.py
+++ b/pyload/plugins/hook/MyfastfileCom.py
@@ -23,7 +23,7 @@ class MyfastfileCom(MultiHoster):
def getHoster(self):
- json_data = getURL('http://myfastfile.com/api.php?hosts', decode=True)
+ json_data = getURL("http://myfastfile.com/api.php", get={'hosts': ""}, decode=True)
self.logDebug("JSON data", json_data)
json_data = json_loads(json_data)
diff --git a/pyload/plugins/hook/OverLoadMe.py b/pyload/plugins/hook/OverLoadMe.py
index 2766165fd..83ce3e842 100644
--- a/pyload/plugins/hook/OverLoadMe.py
+++ b/pyload/plugins/hook/OverLoadMe.py
@@ -23,8 +23,7 @@ class OverLoadMe(MultiHoster):
def getHoster(self):
https = "https" if self.getConfig("https") else "http"
page = getURL(https + "://api.over-load.me/hoster.php",
- get={"auth": "0001-cb1f24dadb3aa487bda5afd3b76298935329be7700cd7-5329be77-00cf-1ca0135f"}
- ).replace("\"", "").strip()
+ get={'auth': "0001-cb1f24dadb3aa487bda5afd3b76298935329be7700cd7-5329be77-00cf-1ca0135f"}).replace("\"", "").strip()
self.logDebug("Hosterlist", page)
return [x.strip() for x in page.split(",") if x.strip()]
diff --git a/pyload/plugins/hook/PremiumizeMe.py b/pyload/plugins/hook/PremiumizeMe.py
index a751e7b61..5824115b6 100644
--- a/pyload/plugins/hook/PremiumizeMe.py
+++ b/pyload/plugins/hook/PremiumizeMe.py
@@ -30,8 +30,8 @@ class PremiumizeMe(MultiHoster):
# Get supported hosters list from premiumize.me using the
# json API v1 (see https://secure.premiumize.me/?show=api)
- answer = getURL("https://api.premiumize.me/pm-api/v1.php?method=hosterlist&params[login]=%s&params[pass]=%s" % (
- user, data['password']))
+ answer = getURL("https://api.premiumize.me/pm-api/v1.php"
+ get={'method': "hosterlist", 'params[login]': user, 'params[pass]': data['password']})
data = json_loads(answer)
# If account is not valid thera are no hosters available
diff --git a/pyload/plugins/hook/RPNetBiz.py b/pyload/plugins/hook/RPNetBiz.py
index 3bbdcf839..dc3caf39e 100644
--- a/pyload/plugins/hook/RPNetBiz.py
+++ b/pyload/plugins/hook/RPNetBiz.py
@@ -29,7 +29,7 @@ class RPNetBiz(MultiHoster):
(user, data) = self.account.selectAccount()
res = getURL("https://premium.rpnet.biz/client_api.php",
- get={"username": user, "password": data['password'], "action": "showHosterList"})
+ get={'username': user, 'password': data['password'], 'action': "showHosterList"})
hoster_list = json_loads(res)
# If account is not valid thera are no hosters available
diff --git a/pyload/plugins/hook/RehostTo.py b/pyload/plugins/hook/RehostTo.py
index 2c8739869..6c334bf06 100644
--- a/pyload/plugins/hook/RehostTo.py
+++ b/pyload/plugins/hook/RehostTo.py
@@ -20,7 +20,8 @@ class RehostTo(MultiHoster):
def getHoster(self):
- page = getURL("http://rehost.to/api.php?cmd=get_supported_och_dl&long_ses=%s" % self.long_ses)
+ page = getURL("http://rehost.to/api.php",
+ get={'cmd': "get_supported_och_dl", 'long_ses': self.long_ses})
return [x.strip() for x in page.replace("\"", "").split(",")]
diff --git a/pyload/plugins/hook/SimplyPremiumCom.py b/pyload/plugins/hook/SimplyPremiumCom.py
index 9945cce38..8c32cbf7a 100644
--- a/pyload/plugins/hook/SimplyPremiumCom.py
+++ b/pyload/plugins/hook/SimplyPremiumCom.py
@@ -21,7 +21,7 @@ class SimplyPremiumCom(MultiHoster):
def getHoster(self):
- json_data = getURL('http://www.simply-premium.com/api/hosts.php?format=json&online=1')
+ json_data = getURL("http://www.simply-premium.com/api/hosts.php", get={'format': "json", 'online': 1})
json_data = json_loads(json_data)
host_list = [element['regex'] for element in json_data['result']]
diff --git a/pyload/plugins/hook/SimplydebridCom.py b/pyload/plugins/hook/SimplydebridCom.py
index 4668da45b..89d8cb752 100644
--- a/pyload/plugins/hook/SimplydebridCom.py
+++ b/pyload/plugins/hook/SimplydebridCom.py
@@ -18,5 +18,5 @@ class SimplydebridCom(MultiHoster):
def getHoster(self):
- page = getURL("http://simply-debrid.com/api.php?list=1")
+ page = getURL("http://simply-debrid.com/api.php", get={'list': 1})
return [x.strip() for x in page.rstrip(';').replace("\"", "").split(";")]
diff --git a/pyload/plugins/hook/UnrestrictLi.py b/pyload/plugins/hook/UnrestrictLi.py
index cfe580048..255cee43d 100644
--- a/pyload/plugins/hook/UnrestrictLi.py
+++ b/pyload/plugins/hook/UnrestrictLi.py
@@ -22,7 +22,7 @@ class UnrestrictLi(MultiHoster):
def getHoster(self):
- json_data = getURL('http://unrestrict.li/api/jdownloader/hosts.php?format=json')
+ json_data = getURL("http://unrestrict.li/api/jdownloader/hosts.php", get={'format': "json"})
json_data = json_loads(json_data)
host_list = [element['host'] for element in json_data['result']]
diff --git a/pyload/plugins/hook/XFileSharingPro.py b/pyload/plugins/hook/XFileSharingPro.py
index 268e91909..42c4c6264 100644
--- a/pyload/plugins/hook/XFileSharingPro.py
+++ b/pyload/plugins/hook/XFileSharingPro.py
@@ -8,11 +8,11 @@ from pyload.plugins.internal.Addon import Hook
class XFileSharingPro(Hook):
__name__ = "XFileSharingPro"
__type__ = "hook"
- __version__ = "0.24"
+ __version__ = "0.25"
__config__ = [("activated", "bool", "Activated", True),
("use_hoster_list", "bool", "Load listed hosters only", True),
- ("use_crypter_list", "bool", "Load listed crypters only", True),
+ ("use_crypter_list", "bool", "Load listed crypters only", False),
("use_builtin_list", "bool", "Load built-in plugin list", True),
("hoster_list", "str", "Hoster list (comma separated)", ""),
("crypter_list", "str", "Crypter list (comma separated)", "")]
@@ -23,9 +23,9 @@ class XFileSharingPro(Hook):
# event_list = ["pluginConfigChanged"]
- regexp = {'hoster' : (r'https?://(?:www\.)?([\w^_]+(?:\.[a-zA-Z]{2,})+(?:\:\d+)?)/(?:embed-)?\w{12}(?:\W|$)',
+ regexp = {'hoster' : (r'https?://(?:www\.)?([\w.^_]+(?:\.[a-zA-Z]{2,})(?:\:\d+)?)/(?:embed-)?\w{12}(?:\W|$)',
r'https?://(?:[^/]+\.)?(%s)/(?:embed-)?\w+'),
- 'crypter': (r'https?://(?:www\.)?([\w^_]+(?:\.[a-zA-Z]{2,})+(?:\:\d+)?)/(?:user|folder)s?/\w+',
+ 'crypter': (r'https?://(?:www\.)?([\w.^_]+(?:\.[a-zA-Z]{2,})(?:\:\d+)?)/(?:user|folder)s?/\w+',
r'https?://(?:[^/]+\.)?(%s)/(?:user|folder)s?/\w+')}
HOSTER_LIST = [#WORKING HOSTERS:
diff --git a/pyload/plugins/hook/ZeveraCom.py b/pyload/plugins/hook/ZeveraCom.py
index 09e3953a2..21fdf6c92 100644
--- a/pyload/plugins/hook/ZeveraCom.py
+++ b/pyload/plugins/hook/ZeveraCom.py
@@ -18,5 +18,5 @@ class ZeveraCom(MultiHoster):
def getHoster(self):
- page = getURL("http://www.zevera.com/jDownloader.ashx?cmd=gethosters")
+ page = getURL("http://www.zevera.com/jDownloader.ashx", get={'cmd': "gethosters"})
return [x.strip() for x in page.replace("\"", "").split(",")]
diff --git a/pyload/plugins/hoster/AlldebridCom.py b/pyload/plugins/hoster/AlldebridCom.py
index 7e5adf8ba..d575c305e 100644
--- a/pyload/plugins/hoster/AlldebridCom.py
+++ b/pyload/plugins/hoster/AlldebridCom.py
@@ -45,12 +45,10 @@ class AlldebridCom(Hoster):
self.fail(_("No AllDebrid account provided"))
else:
self.logDebug("Old URL: %s" % pyfile.url)
- password = self.getPassword().splitlines()
- password = "" if not password else password[0]
+ password = self.getPassword().splitlines()[0] or ""
- url = "http://www.alldebrid.com/service.php?link=%s&json=true&pw=%s" % (pyfile.url, password)
- page = self.load(url)
- data = json_loads(page)
+ data = json_loads(self.load("http://www.alldebrid.com/service.php",
+ get={'link': pyfile.url, 'json': "true", 'pw': password}))
self.logDebug("Json data", data)
diff --git a/pyload/plugins/hoster/BezvadataCz.py b/pyload/plugins/hoster/BezvadataCz.py
index 2f2afc6ac..431fbfbeb 100644
--- a/pyload/plugins/hoster/BezvadataCz.py
+++ b/pyload/plugins/hoster/BezvadataCz.py
@@ -23,7 +23,8 @@ class BezvadataCz(SimpleHoster):
def setup(self):
- self.multiDL = self.resumeDownload = True
+ self.resumeDownload = True
+ self.multiDL = True
def handleFree(self):
@@ -83,6 +84,8 @@ class BezvadataCz(SimpleHoster):
elif '<div class="infobox' in self.html:
self.tempOffline()
+ self.info.pop('error', None)
+
def loadcaptcha(self, data, *args, **kwargs):
return data.decode("base64")
diff --git a/pyload/plugins/hoster/BitshareCom.py b/pyload/plugins/hoster/BitshareCom.py
index a557a43b0..02aa23036 100644
--- a/pyload/plugins/hoster/BitshareCom.py
+++ b/pyload/plugins/hoster/BitshareCom.py
@@ -31,7 +31,7 @@ class BitshareCom(SimpleHoster):
def setup(self):
- self.multiDL = self.premium
+ self.multiDL = self.premium
self.chunkLimit = 1
diff --git a/pyload/plugins/hoster/CatShareNet.py b/pyload/plugins/hoster/CatShareNet.py
index c6600f4b4..089e137a0 100644
--- a/pyload/plugins/hoster/CatShareNet.py
+++ b/pyload/plugins/hoster/CatShareNet.py
@@ -31,7 +31,7 @@ class CatShareNet(SimpleHoster):
def setup(self):
- self.multiDL = self.premium
+ self.multiDL = self.premium
self.resumeDownload = True
diff --git a/pyload/plugins/hoster/CrockoCom.py b/pyload/plugins/hoster/CrockoCom.py
index 012fb7f0b..6d86741d2 100644
--- a/pyload/plugins/hoster/CrockoCom.py
+++ b/pyload/plugins/hoster/CrockoCom.py
@@ -37,7 +37,7 @@ class CrockoCom(SimpleHoster):
for _i in xrange(5):
m = re.search(self.CAPTCHA_PATTERN, self.html)
if m:
- url, wait_time = 'http://crocko.com' + m.group(1), m.group(2)
+ url, wait_time = 'http://crocko.com' + m.group(1), int(m.group(2))
self.wait(wait_time)
self.html = self.load(url)
else:
diff --git a/pyload/plugins/hoster/DailymotionCom.py b/pyload/plugins/hoster/DailymotionCom.py
index cd66e6e21..9e665912a 100644
--- a/pyload/plugins/hoster/DailymotionCom.py
+++ b/pyload/plugins/hoster/DailymotionCom.py
@@ -9,19 +9,17 @@ from pyload.plugins.internal.Hoster import Hoster
def getInfo(urls):
- result = [] #: [ .. (name, size, status, url) .. ]
- regex = re.compile(DailymotionCom.__pattern__)
- apiurl = "https://api.dailymotion.com/video/"
+ result = []
+ regex = re.compile(DailymotionCom.__pattern__)
+ apiurl = "https://api.dailymotion.com/video/%s"
request = {"fields": "access_error,status,title"}
+
for url in urls:
- id = regex.search(url).group("ID")
- page = getURL(apiurl + id, get=request)
+ id = regex.match(url).group("ID")
+ page = getURL(apiurl % id, get=request)
info = json_loads(page)
- if "title" in info:
- name = info['title'] + ".mp4"
- else:
- name = url
+ name = info['title'] + ".mp4" if "title" in info else url
if "error" in info or info['access_error']:
status = "offline"
@@ -35,6 +33,7 @@ def getInfo(urls):
status = "offline"
result.append((name, 0, statusMap[status], url))
+
return result
@@ -43,8 +42,8 @@ class DailymotionCom(Hoster):
__type__ = "hoster"
__version__ = "0.2"
- __pattern__ = r'https?://(?:www\.)?dailymotion\.com/.*?video/(?P<ID>[\w^_]+)'
- __config__ = [("quality", "Lowest;LD 144p;LD 240p;SD 384p;HQ 480p;HD 720p;HD 1080p;Highest", "Quality", "Highest")]
+ __pattern__ = r'https?://(?:www\.)?dailymotion\.com/.*video/(?P<ID>[\w^_]+)'
+ __config__ = [("quality", "Lowest;LD 144p;LD 240p;SD 384p;HQ 480p;HD 720p;HD 1080p;Highest", "Quality", "Highest")]
__description__ = """Dailymotion.com hoster plugin"""
__license__ = "GPLv3"
@@ -52,29 +51,36 @@ class DailymotionCom(Hoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
+ self.resumeDownload = True
+ self.multiDL = True
def getStreams(self):
streams = []
+
for result in re.finditer(r"\"(?P<URL>http:\\/\\/www.dailymotion.com\\/cdn\\/H264-(?P<QF>.*?)\\.*?)\"",
self.html):
url = result.group("URL")
- qf = result.group("QF")
- link = url.replace("\\", "")
+ qf = result.group("QF")
+
+ link = url.replace("\\", "")
quality = tuple(int(x) for x in qf.split("x"))
+
streams.append((quality, link))
+
return sorted(streams, key=lambda x: x[0][::-1])
def getQuality(self):
q = self.getConfig("quality")
+
if q == "Lowest":
quality = 0
elif q == "Highest":
quality = -1
else:
quality = int(q.rsplit(" ")[1][:-1])
+
return quality
@@ -91,14 +97,18 @@ class DailymotionCom(Hoster):
idx = quality
s = streams[idx]
+
self.logInfo(_("Download video quality %sx%s") % s[0])
+
return s[1]
def checkInfo(self, pyfile):
pyfile.name, pyfile.size, pyfile.status, pyfile.url = getInfo([pyfile.url])[0]
+
if pyfile.status == 1:
self.offline()
+
elif pyfile.status == 6:
self.tempOffline()
@@ -111,6 +121,5 @@ class DailymotionCom(Hoster):
streams = self.getStreams()
quality = self.getQuality()
- link = self.getLink(streams, quality)
- self.download(link)
+ self.download(self.getLink(streams, quality))
diff --git a/pyload/plugins/hoster/DataHu.py b/pyload/plugins/hoster/DataHu.py
index adadbfe5d..6210f18f4 100644
--- a/pyload/plugins/hoster/DataHu.py
+++ b/pyload/plugins/hoster/DataHu.py
@@ -28,18 +28,15 @@ class DataHu(SimpleHoster):
def setup(self):
self.resumeDownload = True
- self.multiDL = self.premium
+ self.multiDL = self.premium
def handleFree(self):
m = re.search(self.LINK_PATTERN, self.html)
- if m:
- url = m.group(1)
- self.logDebug("Direct link: " + url)
- else:
+ if m is None:
self.error(_("LINK_PATTERN not found"))
- self.download(url, disposition=True)
+ self.download(m.group(1), disposition=True)
getInfo = create_getInfo(DataHu)
diff --git a/pyload/plugins/hoster/DateiTo.py b/pyload/plugins/hoster/DateiTo.py
index 2f83960e6..d22a0a3ce 100644
--- a/pyload/plugins/hoster/DateiTo.py
+++ b/pyload/plugins/hoster/DateiTo.py
@@ -9,7 +9,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class DateiTo(SimpleHoster):
__name__ = "DateiTo"
__type__ = "hoster"
- __version__ = "0.04"
+ __version__ = "0.05"
__pattern__ = r'http://(?:www\.)?datei\.to/datei/(?P<ID>\w+)\.html'
@@ -18,18 +18,19 @@ class DateiTo(SimpleHoster):
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
- NAME_PATTERN = r'Dateiname:</td>\s*<td colspan="2"><strong>(?P<N>.*?)</'
- SIZE_PATTERN = r'Dateigr&ouml;&szlig;e:</td>\s*<td colspan="2">(?P<S>.*?)</'
+ NAME_PATTERN = r'Dateiname:</td>\s*<td colspan="2"><strong>(?P<N>.*?)</'
+ SIZE_PATTERN = r'Dateigr&ouml;&szlig;e:</td>\s*<td colspan="2">(?P<S>.*?)</'
OFFLINE_PATTERN = r'>Datei wurde nicht gefunden<|>Bitte wÀhle deine Datei aus... <'
- PARALELL_PATTERN = r'>Du lÀdst bereits eine Datei herunter<'
- WAIT_PATTERN = r'countdown\({seconds: (\d+)'
+ WAIT_PATTERN = r'countdown\({seconds: (\d+)'
+ MULTIDL_PATTERN = r'>Du lÀdst bereits eine Datei herunter<'
+
DATA_PATTERN = r'url: "(.*?)", data: "(.*?)",'
def handleFree(self):
url = 'http://datei.to/ajax/download.php'
- data = {'P': 'I', 'ID': self.info['ID']}
+ data = {'P': 'I', 'ID': self.info['pattern']['ID']}
recaptcha = ReCaptcha(self)
for _i in xrange(10):
@@ -55,16 +56,19 @@ class DateiTo(SimpleHoster):
else:
self.fail(_("Too bad..."))
- download_url = self.html
- self.download(download_url)
+ self.download(self.html)
def checkErrors(self):
- m = re.search(self.PARALELL_PATTERN, self.html)
+ m = re.search(self.MULTIDL_PATTERN, self.html)
if m:
m = re.search(self.WAIT_PATTERN, self.html)
wait_time = int(m.group(1)) if m else 30
- self.retry(wait_time=wait_time)
+
+ errmsg = self.info['error'] = _("Parallel downloads")
+ self.retry(wait_time=wait_time, reason=errmsg)
+
+ self.info.pop('error', None)
def doWait(self):
diff --git a/pyload/plugins/hoster/DebridItaliaCom.py b/pyload/plugins/hoster/DebridItaliaCom.py
index 81cf0b830..a629b06a9 100644
--- a/pyload/plugins/hoster/DebridItaliaCom.py
+++ b/pyload/plugins/hoster/DebridItaliaCom.py
@@ -3,48 +3,51 @@
import re
from pyload.plugins.internal.Hoster import Hoster
+from pyload.plugins.internal.SimpleHoster import replace_patterns
class DebridItaliaCom(Hoster):
__name__ = "DebridItaliaCom"
__type__ = "hoster"
- __version__ = "0.05"
+ __version__ = "0.07"
- __pattern__ = r'https?://(?:[^/]*\.)?debriditalia\.com'
+ __pattern__ = r'http://s\d+\.debriditalia\.com/dl/\d+'
__description__ = """Debriditalia.com hoster plugin"""
__license__ = "GPLv3"
- __authors__ = [("stickell", "l.stickell@yahoo.it")]
+ __authors__ = [("stickell", "l.stickell@yahoo.it"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ URL_REPLACEMENTS = [(r'(/dl/\d+)$', '\1/')]
def setup(self):
- self.chunkLimit = -1
+ self.chunkLimit = -1
self.resumeDownload = True
def process(self, pyfile):
+ pyfile.url = replace_patterns(pyfile.url, cls.URL_REPLACEMENTS)
+
if re.match(self.__pattern__, pyfile.url):
- new_url = pyfile.url
+ link = pyfile.url
+
elif not self.account:
self.logError(_("Please enter your %s account or deactivate this plugin") % "DebridItalia")
self.fail(_("No DebridItalia account provided"))
+
else:
- self.logDebug("Old URL: %s" % pyfile.url)
- url = "http://debriditalia.com/linkgen2.php?xjxfun=convertiLink&xjxargs[]=S<![CDATA[%s]]>" % pyfile.url
- page = self.load(url)
- self.logDebug("XML data: %s" % page)
+ html = self.load("http://www.debriditalia.com/api.php", get={'generate': "", 'link': pyfile.url})
- if 'File not available' in page:
- self.fail(_("File not available"))
- else:
- new_url = re.search(r'<a href="(?:[^"]+)">(?P<direct>[^<]+)</a>', page).group('direct')
+ if "ERROR" in html:
+ self.fail(re.search(r'ERROR:(.*)', html).strip())
- if new_url != pyfile.url:
- self.logDebug("New URL: %s" % new_url)
+ link = html.strip()
- self.download(new_url, disposition=True)
+ self.download(link, disposition=True)
- check = self.checkDownload({"empty": re.compile(r"^$")})
+ check = self.checkDownload({'empty': re.compile(r'^$')})
if check == "empty":
self.retry(5, 2 * 60, "Empty file downloaded")
diff --git a/pyload/plugins/hoster/DlFreeFr.py b/pyload/plugins/hoster/DlFreeFr.py
index 2bfd88c22..d9950f0a4 100644
--- a/pyload/plugins/hoster/DlFreeFr.py
+++ b/pyload/plugins/hoster/DlFreeFr.py
@@ -59,10 +59,10 @@ class AdYouLike(object):
# "all":{"element_id":"ayl_private_cap_92300","lang":"fr","env":"prod"}}
ayl_data = json_loads(adyoulike_data_string)
- res = self.plugin.load(
- r'http://api-ayl.appspot.com/challenge?key=%(ayl_key)s&env=%(ayl_env)s&callback=%(callback)s' % {
- "ayl_key": ayl_data[self.engine]['key'], "ayl_env": ayl_data['all']['env'],
- "callback": self.ADYOULIKE_CALLBACK})
+ res = self.plugin.load("http://api-ayl.appspot.com/challenge",
+ get={'key' : ayl_data[self.engine]['key'],
+ 'env' : ayl_data['all']['env'],
+ 'callback': self.ADYOULIKE_CALLBACK})
m = re.search(self.ADYOULIKE_CHALLENGE_PATTERN, res)
challenge_string = None
@@ -130,9 +130,10 @@ class DlFreeFr(SimpleHoster):
def setup(self):
- self.multiDL = self.resumeDownload = True
- self.limitDL = 5
- self.chunkLimit = 1
+ self.resumeDownload = True
+ self.multiDL = True
+ self.limitDL = 5
+ self.chunkLimit = 1
def init(self):
diff --git a/pyload/plugins/hoster/FastixRu.py b/pyload/plugins/hoster/FastixRu.py
index 0e353f362..42be856ed 100644
--- a/pyload/plugins/hoster/FastixRu.py
+++ b/pyload/plugins/hoster/FastixRu.py
@@ -46,10 +46,13 @@ class FastixRu(Hoster):
self.logDebug("Old URL: %s" % pyfile.url)
api_key = self.account.getAccountData(self.user)
api_key = api_key['api']
- url = "http://fastix.ru/api_v2/?apikey=%s&sub=getdirectlink&link=%s" % (api_key, pyfile.url)
- page = self.load(url)
+
+ page = self.load("http://fastix.ru/api_v2/",
+ get={'apikey': api_key, 'sub': "getdirectlink", 'link': pyfile.url})
data = json_loads(page)
+
self.logDebug("Json data", data)
+
if "error\":true" in page:
self.offline()
else:
diff --git a/pyload/plugins/hoster/FastshareCz.py b/pyload/plugins/hoster/FastshareCz.py
index 17ba2add4..11ae4ca42 100644
--- a/pyload/plugins/hoster/FastshareCz.py
+++ b/pyload/plugins/hoster/FastshareCz.py
@@ -1,7 +1,4 @@
# -*- coding: utf-8 -*-
-#
-# Test links:
-# http://www.fastshare.cz/2141189/random.bin
import re
@@ -13,33 +10,43 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class FastshareCz(SimpleHoster):
__name__ = "FastshareCz"
__type__ = "hoster"
- __version__ = "0.23"
+ __version__ = "0.24"
__pattern__ = r'http://(?:www\.)?fastshare\.cz/\d+/.+'
__description__ = """FastShare.cz hoster plugin"""
__license__ = "GPLv3"
- __authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it"),
- ("Walter Purcaro", "vuolter@gmail.com")]
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+ URL_REPLACEMENTS = [("#.*", "")]
+
+ COOKIES = [("fastshare.cz", "lang", "en")]
+ CONTENT_DISPOSITION = True
- INFO_PATTERN = r'<h1 class="dwp">(?P<N>[^<]+)</h1>\s*<div class="fileinfo">\s*Size\s*: (?P<S>\d+) (?P<U>[\w^_]+),'
+ INFO_PATTERN = r'<h1 class="dwp">(?P<N>[^<]+)</h1>\s*<div class="fileinfo">\s*Size\s*: (?P<S>\d+) (?P<U>[\w^_]+),'
OFFLINE_PATTERN = r'>(The file has been deleted|Requested page not found)'
- URL_REPLACEMENTS = [("#.*", "")]
+ LINK_FREE_PATTERN = r'action=(/free/.*?)>\s*<img src="([^"]*)"><br'
+ LINK_PREMIUM_PATTERN = r'(http://data\d+\.fastshare\.cz/download\.php\?id=\d+&)'
- COOKIES = [("fastshare.cz", "lang", "en")]
+ SLOT_ERROR = "> 100% of FREE slots are full"
+ CREDIT_ERROR = " credit for "
- FREE_URL_PATTERN = r'action=(/free/.*?)>\s*<img src="([^"]*)"><br'
- PREMIUM_URL_PATTERN = r'(http://data\d+\.fastshare\.cz/download\.php\?id=\d+&)'
- CREDIT_PATTERN = r' credit for '
+ def checkErrors(self):
+ if self.SLOT_ERROR in self.html:
+ errmsg = self.info['error'] = _("No free slots")
+ self.retry(12, 60, errmsg)
- def handleFree(self):
- if "> 100% of FREE slots are full" in self.html:
- self.retry(12, 60, _("No free slots"))
+ if self.CREDIT_ERROR in self.html:
+ errmsg = self.info['error'] = _("Not enough traffic left")
+ self.logWarning(errmsg)
+ self.resetAccount()
+
+ self.info.pop('error', None)
+
+ def handleFree(self):
m = re.search(self.FREE_URL_PATTERN, self.html)
if m:
action, captcha_src = m.groups()
@@ -48,38 +55,23 @@ class FastshareCz(SimpleHoster):
baseurl = "http://www.fastshare.cz"
captcha = self.decryptCaptcha(urljoin(baseurl, captcha_src))
- self.download(urljoin(baseurl, action), post={"code": captcha, "btn.x": 77, "btn.y": 18})
+ self.download(urljoin(baseurl, action), post={'code': captcha, 'btn.x': 77, 'btn.y': 18})
+
+ def checkFile(self):
check = self.checkDownload({
- 'paralell_dl': "<title>FastShare.cz</title>|<script>alert\('Pres FREE muzete stahovat jen jeden soubor najednou.'\)",
- 'wrong_captcha': "Download for FREE"
+ 'paralell_dl' : re.compile(r"<title>FastShare.cz</title>|<script>alert\('Pres FREE muzete stahovat jen jeden soubor najednou.'\)"),
+ 'wrong_captcha': re.compile(r'Download for FREE'),
+ 'credit' : re.compile(self.CREDIT_ERROR)
})
if check == "paralell_dl":
self.retry(6, 10 * 60, _("Paralell download"))
+
elif check == "wrong_captcha":
self.retry(max_tries=5, reason=_("Wrong captcha"))
-
- def handlePremium(self):
- header = self.load(self.pyfile.url, just_header=True)
- if "location" in header:
- url = header['location']
- elif self.CREDIT_PATTERN in self.html:
- self.logWarning(_("Not enough traffic left"))
- self.resetAccount()
- else:
- m = re.search(self.PREMIUM_URL_PATTERN, self.html)
- if m:
- url = m.group(1)
- else:
- self.error(_("PREMIUM_URL_PATTERN not found"))
-
- self.logDebug("PREMIUM URL: " + url)
- self.download(url, disposition=True)
-
- check = self.checkDownload({"credit": re.compile(self.CREDIT_PATTERN)})
- if check == "credit":
+ elif check == "credit":
self.resetAccount()
diff --git a/pyload/plugins/hoster/FileSharkPl.py b/pyload/plugins/hoster/FileSharkPl.py
index 99cb4b51b..ad8321d2d 100644
--- a/pyload/plugins/hoster/FileSharkPl.py
+++ b/pyload/plugins/hoster/FileSharkPl.py
@@ -10,7 +10,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class FileSharkPl(SimpleHoster):
__name__ = "FileSharkPl"
__type__ = "hoster"
- __version__ = "0.01"
+ __version__ = "0.03"
__pattern__ = r'http://(?:www\.)?fileshark\.pl/pobierz/\d{6}/\w{5}'
@@ -20,22 +20,23 @@ class FileSharkPl(SimpleHoster):
("Walter Purcaro", "vuolter@gmail.com")]
+ CONTENT_DISPOSITION = True
+
NAME_PATTERN = r'<h2 class="name-file">(?P<N>.+)</h2>'
SIZE_PATTERN = r'<p class="size-file">(.*?)<strong>(?P<S>\d+\.?\d*)\s(?P<U>\w+)</strong></p>'
OFFLINE_PATTERN = '(P|p)lik zosta. (usuni.ty|przeniesiony)'
- DOWNLOAD_ALERT = r'<p class="lead text-center alert alert-warning">(.*?)</p>'
- IP_BLOCKED_PATTERN = 'Strona jest dost.pna wy..cznie dla u.ytkownik.w znajduj.cych si. na terenie Polski'
- DOWNLOAD_SLOTS_ERROR_PATTERN = r'Osi.gni.to maksymaln. liczb. .ci.ganych jednocze.nie plik.w\.'
-
- DOWNLOAD_URL_FREE = r'<a href="(.*?)" class="btn-upload-free">'
- DOWNLOAD_URL_PREMIUM = r'<a href="(.*?)" class="btn-upload-premium">'
+ LINK_FREE_PATTERN = r'<a href="(.*?)" class="btn-upload-free">'
+ LINK_PREMIUM_PATTERN = r'<a href="(.*?)" class="btn-upload-premium">'
- SECONDS_PATTERN = r'var timeToDownload = (\d+);'
+ WAIT_PATTERN = r'var timeToDownload = (\d+);'
+ ERROR_PATTERN = r'<p class="lead text-center alert alert-warning">(.*?)</p>'
+ IP_ERROR_PATTERN = r'Strona jest dost.pna wy..cznie dla u.ytkownik.w znajduj.cych si. na terenie Polski'
+ SLOT_ERROR_PATTERN = r'Osi.gni.to maksymaln. liczb. .ci.ganych jednocze.nie plik.w\.'
- CAPTCHA_IMG_PATTERN = '<img src="data:image/jpeg;base64,(.*?)" title="captcha"'
- CAPTCHA_TOKEN_PATTERN = r'name="form\[_token\]" value="(.*?)" />'
+ CAPTCHA_PATTERN = '<img src="data:image/jpeg;base64,(.*?)" title="captcha"'
+ TOKEN_PATTERN = r'name="form\[_token\]" value="(.*?)" />'
def setup(self):
@@ -47,77 +48,79 @@ class FileSharkPl(SimpleHoster):
self.multiDL = False
- def prepare(self):
- super(FileSharkPl, self).prepare()
+ def checkErrors(self):
+ # check if file is now available for download (-> file name can be found in html body)
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ errmsg = self.info['error'] = _("Another download already run")
+ self.retry(15, int(m.group(1)), errmsg)
- m = re.search(self.DOWNLOAD_ALERT, self.html):
+ m = re.search(self.ERROR_PATTERN, self.html):
if m:
- return
+ alert = m.group(1)
- alert = m.group(1)
+ if re.match(self.IP_ERROR_PATTERN, alert):
+ self.fail(_("Only connections from Polish IP are allowed"))
- if re.match(self.IP_BLOCKED_PATTERN, alert):
- self.fail(_("Only connections from Polish IP are allowed"))
- elif re.match(self.DOWNLOAD_SLOTS_ERROR_PATTERN, alert):
- self.logInfo(_("No free download slots available"))
- self.retry(10, 30 * 60, _("Still no free download slots available"))
- else:
- self.logInfo(alert)
- self.retry(10, 10 * 60, _("Try again later"))
+ elif re.match(self.SLOT_ERROR_PATTERN, alert):
+ errmsg = self.info['error'] = _("No free download slots available")
+ self.logWarning(errmsg)
+ self.retry(10, 30 * 60, _("Still no free download slots available"))
+
+ else:
+ self.info['error'] = alert
+ self.retry(10, 10 * 60, _("Try again later"))
+
+ self.info.pop('error', None)
#@NOTE: handlePremium method was never been tested
def handlePremium(self):
- self.logDebug("Premium accounts support in experimental modus!")
- m = re.search(self.DOWNLOAD_URL_PREMIUM, self.html)
- file_url = urljoin("http://fileshark.pl", m.group(1))
-
- self.download(file_url, disposition=True)
- self.checkDownload()
+ super(FilerNet, self).handlePremium()
+ if self.link:
+ self.link = urljoin("http://fileshark.pl/", self.link)
def handleFree(self):
- m = re.search(self.DOWNLOAD_URL_FREE, self.html)
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
if m is None:
self.error(_("Download url not found"))
- file_url = urljoin("http://fileshark.pl", m.group(1))
+ link = urljoin("http://fileshark.pl", m.group(1))
- m = re.search(self.SECONDS_PATTERN, self.html)
+ m = re.search(self.WAIT_PATTERN, self.html)
if m:
seconds = int(m.group(1))
self.logDebug("Wait %s seconds" % seconds)
- self.wait(seconds + 2)
+ self.wait(seconds)
action, inputs = self.parseHtmlForm('action=""')
- m = re.search(self.CAPTCHA_TOKEN_PATTERN, self.html)
+
+ m = re.search(self.TOKEN_PATTERN, self.html)
if m is None:
self.retry(reason=_("Captcha form not found"))
inputs['form[_token]'] = m.group(1)
- m = re.search(self.CAPTCHA_IMG_PATTERN, self.html)
+ m = re.search(self.CAPTCHA_PATTERN, self.html)
if m is None:
self.retry(reason=_("Captcha image not found"))
- tmp_load = self.load
- self.load = self.decode64 #: injects decode64 inside decryptCaptcha
+ tmp_load = self.load
+ self.load = self._decode64 #: work-around: injects decode64 inside decryptCaptcha
inputs['form[captcha]'] = self.decryptCaptcha(m.group(1), imgtype='jpeg')
inputs['form[start]'] = ""
self.load = tmp_load
- self.download(file_url, post=inputs, cookies=True, disposition=True)
- self.checkDownload()
+ self.download(link, post=inputs, cookies=True, disposition=True)
- def checkDownload(self):
- check = super(FileSharkPl, self).checkDownload({
- 'wrong_captcha': re.compile(r'<label for="form_captcha" generated="true" class="error">(.*?)</label>'),
- 'wait_pattern': re.compile(self.SECONDS_PATTERN),
- 'DL-found': re.compile('<a href="(.*)">')
- })
+ def checkFile(self):
+ check = self.checkDownload({'wrong_captcha': re.compile(r'<label for="form_captcha" generated="true" class="error">(.*?)</label>'),
+ 'wait_pattern' : re.compile(self.SECONDS_PATTERN),
+ 'DL-found' : re.compile('<a href="(.*)">')})
if check == "DL-found":
self.correctCaptcha()
@@ -130,7 +133,7 @@ class FileSharkPl(SimpleHoster):
self.retry()
- def decode64(self, data, *args, **kwargs):
+ def _decode64(self, data, *args, **kwargs):
return data.decode("base64")
diff --git a/pyload/plugins/hoster/FileStoreTo.py b/pyload/plugins/hoster/FileStoreTo.py
index f1425d3d6..f08474d28 100644
--- a/pyload/plugins/hoster/FileStoreTo.py
+++ b/pyload/plugins/hoster/FileStoreTo.py
@@ -23,7 +23,8 @@ class FileStoreTo(SimpleHoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
+ self.resumeDownload = True
+ self.multiDL = True
def handleFree(self):
diff --git a/pyload/plugins/hoster/FilecloudIo.py b/pyload/plugins/hoster/FilecloudIo.py
index 7dc9a3a16..0c9f1b5ee 100644
--- a/pyload/plugins/hoster/FilecloudIo.py
+++ b/pyload/plugins/hoster/FilecloudIo.py
@@ -10,7 +10,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class FilecloudIo(SimpleHoster):
__name__ = "FilecloudIo"
__type__ = "hoster"
- __version__ = "0.04"
+ __version__ = "0.05"
__pattern__ = r'http://(?:www\.)?(?:filecloud\.io|ifile\.it|mihd\.net)/(?P<ID>\w+).*'
@@ -34,12 +34,13 @@ class FilecloudIo(SimpleHoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
- self.chunkLimit = 1
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = 1
def handleFree(self):
- data = {"ukey": self.info['ID']}
+ data = {"ukey": self.info['pattern']['ID']}
m = re.search(self.AB1_PATTERN, self.html)
if m is None:
@@ -94,7 +95,7 @@ class FilecloudIo(SimpleHoster):
if res['dl']:
self.html = self.load('http://filecloud.io/download.html')
- m = re.search(self.LINK_PATTERN % self.info['ID'], self.html)
+ m = re.search(self.LINK_PATTERN % self.info['pattern']['ID'], self.html)
if m is None:
self.error(_("LINK_PATTERN not found"))
@@ -109,7 +110,7 @@ class FilecloudIo(SimpleHoster):
def handlePremium(self):
akey = self.account.getAccountData(self.user)['akey']
- ukey = self.info['ID']
+ ukey = self.info['pattern']['ID']
self.logDebug("Akey: %s | Ukey: %s" % (akey, ukey))
rep = self.load("http://api.filecloud.io/api-fetch_download_url.api",
post={"akey": akey, "ukey": ukey})
diff --git a/pyload/plugins/hoster/FilefactoryCom.py b/pyload/plugins/hoster/FilefactoryCom.py
index 969802703..ac7899ec5 100644
--- a/pyload/plugins/hoster/FilefactoryCom.py
+++ b/pyload/plugins/hoster/FilefactoryCom.py
@@ -56,7 +56,7 @@ class FilefactoryCom(SimpleHoster):
m = re.search(self.WAIT_PATTERN, self.html)
if m:
- self.wait(m.group(1))
+ self.wait(int(m.group(1)))
self.download(dl_link, disposition=True)
diff --git a/pyload/plugins/hoster/FilepostCom.py b/pyload/plugins/hoster/FilepostCom.py
index 97fdd6c67..5995b4aba 100644
--- a/pyload/plugins/hoster/FilepostCom.py
+++ b/pyload/plugins/hoster/FilepostCom.py
@@ -12,9 +12,9 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class FilepostCom(SimpleHoster):
__name__ = "FilepostCom"
__type__ = "hoster"
- __version__ = "0.29"
+ __version__ = "0.30"
- __pattern__ = r'https?://(?:www\.)?(?:filepost\.com/files|fp\.io)/([^/]+).*'
+ __pattern__ = r'https?://(?:www\.)?(?:filepost\.com/files|fp\.io)/(?P<ID>[^/]+)'
__description__ = """Filepost.com hoster plugin"""
__license__ = "GPLv3"
@@ -30,9 +30,6 @@ class FilepostCom(SimpleHoster):
def handleFree(self):
- # Find token and captcha key
- file_id = re.match(self.__pattern__, self.pyfile.url).group(1)
-
m = re.search(self.FLP_TOKEN_PATTERN, self.html)
if m is None:
self.error(_("Token"))
@@ -45,13 +42,13 @@ class FilepostCom(SimpleHoster):
# Get wait time
get_dict = {'SID': self.req.cj.getCookie('SID'), 'JsHttpRequest': str(int(time() * 10000)) + '-xml'}
- post_dict = {'action': 'set_download', 'token': flp_token, 'code': file_id}
+ post_dict = {'action': 'set_download', 'token': flp_token, 'code': self.info['pattern']['ID']}
wait_time = int(self.getJsonResponse(get_dict, post_dict, 'wait_time'))
if wait_time > 0:
self.wait(wait_time)
- post_dict = {"token": flp_token, "code": file_id, "file_pass": ''}
+ post_dict = {"token": flp_token, "code": self.info['pattern']['ID'], "file_pass": ''}
if 'var is_pass_exists = true;' in self.html:
# Solve password
diff --git a/pyload/plugins/hoster/FilerNet.py b/pyload/plugins/hoster/FilerNet.py
index e34a5799e..d73467947 100644
--- a/pyload/plugins/hoster/FilerNet.py
+++ b/pyload/plugins/hoster/FilerNet.py
@@ -4,7 +4,6 @@
# http://filer.net/get/ivgf5ztw53et3ogd
# http://filer.net/get/hgo14gzcng3scbvv
-import pycurl
import re
from urlparse import urljoin
@@ -16,84 +15,67 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class FilerNet(SimpleHoster):
__name__ = "FilerNet"
__type__ = "hoster"
- __version__ = "0.07"
+ __version__ = "0.09"
__pattern__ = r'https?://(?:www\.)?filer\.net/get/\w+'
__description__ = """Filer.net hoster plugin"""
__license__ = "GPLv3"
- __authors__ = [("stickell", "l.stickell@yahoo.it")]
+ __authors__ = [("stickell", "l.stickell@yahoo.it")
+ ("Walter Purcaro", "vuolter@gmail.com")]
- INFO_PATTERN = r'<h1 class="page-header">Free Download (?P<N>\S+) <small>(?P<S>[\w.]+) (?P<U>[\w^_]+)</small></h1>'
+ CONTENT_DISPOSITION = True
+
+ INFO_PATTERN = r'<h1 class="page-header">Free Download (?P<N>\S+) <small>(?P<S>[\w.]+) (?P<U>[\w^_]+)</small></h1>'
OFFLINE_PATTERN = r'Nicht gefunden'
- LINK_PATTERN = r'href="([^"]+)">Get download</a>'
+ LINK_FREE_PATTERN = LINK_PREMIUM_PATTERN = r'href="([^"]+)">Get download</a>'
- def handleFree(self):
+ def checkErrors(self):
# Wait between downloads
m = re.search(r'musst du <span id="time">(\d+)</span> Sekunden warten', self.html)
if m:
- self.retry(wait_time=int(m.group(1)), reason=_("Wait between free downloads"))
+ errmsg = self.info['error'] = _("Wait between free downloads")
+ self.retry(wait_time=int(m.group(1)), reason=errmsg)
- self.html = self.load(self.pyfile.url, decode=True)
+ self.info.pop('error', None)
- inputs = self.parseHtmlForm(input_names='token')[1]
+
+ def handleFree(self):
+ inputs = self.parseHtmlForm(input_names={'token': re.compile(r'.+')})[1]
if 'token' not in inputs:
self.error(_("Unable to detect token"))
- token = inputs['token']
- self.logDebug("Token: " + token)
- self.html = self.load(self.pyfile.url, post={'token': token}, decode=True)
+ self.html = self.load(self.pyfile.url, post={'token': inputs['token']}, decode=True)
- inputs = self.parseHtmlForm(input_names='hash')[1]
+ inputs = self.parseHtmlForm(input_names={'hash': re.compile(r'.+')})[1]
if 'hash' not in inputs:
self.error(_("Unable to detect hash"))
- hash_data = inputs['hash']
- self.logDebug("Hash: " + hash_data)
- downloadURL = r''
recaptcha = ReCaptcha(self)
for _i in xrange(5):
challenge, response = recaptcha.challenge()
- post_data = {'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field': response,
- 'hash': hash_data}
-
- # Workaround for 0.4.9 just_header issue. In 0.5 clean the code using just_header
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
- self.load(self.pyfile.url, post=post_data)
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
-
- if 'location' in self.req.http.header.lower():
- location = re.search(r'location: (\S+)', self.req.http.header, re.I).group(1)
- downloadURL = urljoin('http://filer.net', location)
+
+ header = self.load(self.pyfile.url,
+ post={'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response,
+ 'hash' : inputs['hash']})
+
+ if 'location' in header and header['location']:
self.correctCaptcha()
- break
+ self.link = urljoin('http://filer.net', header['location'])
+ return
else:
self.invalidCaptcha()
- if not downloadURL:
- self.fail(_("No Download url retrieved/all captcha attempts failed"))
-
- self.download(downloadURL, disposition=True)
-
def handlePremium(self):
- header = self.load(self.pyfile.url, just_header=True)
- if 'location' in header: # Direct Download ON
- dl = self.pyfile.url
- else: # Direct Download OFF
- html = self.load(self.pyfile.url)
- m = re.search(self.LINK_PATTERN, html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
- dl = 'http://filer.net' + m.group(1)
-
- self.logDebug("Direct link: " + dl)
- self.download(dl, disposition=True)
+ super(FilerNet, self).handlePremium()
+ if self.link:
+ self.link = urljoin("http://filer.net/", self.link)
getInfo = create_getInfo(FilerNet)
diff --git a/pyload/plugins/hoster/FileserveCom.py b/pyload/plugins/hoster/FileserveCom.py
index f486d9f56..0d0833038 100644
--- a/pyload/plugins/hoster/FileserveCom.py
+++ b/pyload/plugins/hoster/FileserveCom.py
@@ -59,9 +59,9 @@ class FileserveCom(Hoster):
def setup(self):
self.resumeDownload = self.multiDL = self.premium
-
self.file_id = re.match(self.__pattern__, self.pyfile.url).group('id')
- self.url = "%s%s" % (self.URLS[0], self.file_id)
+ self.url = "%s%s" % (self.URLS[0], self.file_id)
+
self.logDebug("File ID: %s URL: %s" % (self.file_id, self.url))
diff --git a/pyload/plugins/hoster/FreakshareCom.py b/pyload/plugins/hoster/FreakshareCom.py
index bd31a5752..c298e5a24 100644
--- a/pyload/plugins/hoster/FreakshareCom.py
+++ b/pyload/plugins/hoster/FreakshareCom.py
@@ -163,10 +163,6 @@ class FreakshareCom(Hoster):
herewego = self.load(self.pyfile.url, None, request_options) # the actual download-Page
- # comment this in, when it doesnt work
- # with open("DUMP__FS_.HTML", "w") as fp:
- # fp.write(herewego)
-
to_sort = re.findall(r"<input\stype=\".*?\"\svalue=\"(\S*?)\".*?name=\"(\S*?)\"\s.*?\/>", herewego)
request_options = dict((n, v) for (v, n) in to_sort)
diff --git a/pyload/plugins/hoster/FreeWayMe.py b/pyload/plugins/hoster/FreeWayMe.py
index 128f54958..153b41b32 100644
--- a/pyload/plugins/hoster/FreeWayMe.py
+++ b/pyload/plugins/hoster/FreeWayMe.py
@@ -17,8 +17,8 @@ class FreeWayMe(Hoster):
def setup(self):
self.resumeDownload = False
- self.chunkLimit = 1
- self.multiDL = self.premium
+ self.multiDL = self.premium
+ self.chunkLimit = 1
def process(self, pyfile):
diff --git a/pyload/plugins/hoster/FshareVn.py b/pyload/plugins/hoster/FshareVn.py
index 92f7c659a..d851209a2 100644
--- a/pyload/plugins/hoster/FshareVn.py
+++ b/pyload/plugins/hoster/FshareVn.py
@@ -112,6 +112,8 @@ class FshareVn(SimpleHoster):
self.logError(msg)
self.retry(30, 2 * 60, msg)
+ self.info.pop('error', None)
+
def checkDownloadedFile(self):
# check download
diff --git a/pyload/plugins/hoster/GamefrontCom.py b/pyload/plugins/hoster/GamefrontCom.py
index 195c6037e..50a5be554 100644
--- a/pyload/plugins/hoster/GamefrontCom.py
+++ b/pyload/plugins/hoster/GamefrontCom.py
@@ -25,8 +25,9 @@ class GamefrontCom(Hoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
- self.chunkLimit = -1
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = -1
def process(self, pyfile):
diff --git a/pyload/plugins/hoster/GigapetaCom.py b/pyload/plugins/hoster/GigapetaCom.py
index 491fcad01..419a97558 100644
--- a/pyload/plugins/hoster/GigapetaCom.py
+++ b/pyload/plugins/hoster/GigapetaCom.py
@@ -61,5 +61,7 @@ class GigapetaCom(SimpleHoster):
self.wait(5 * 60, True)
self.retry()
+ self.info.pop('error', None)
+
getInfo = create_getInfo(GigapetaCom)
diff --git a/pyload/plugins/hoster/GooIm.py b/pyload/plugins/hoster/GooIm.py
index 28f50661b..f3626cc57 100644
--- a/pyload/plugins/hoster/GooIm.py
+++ b/pyload/plugins/hoster/GooIm.py
@@ -25,7 +25,8 @@ class GooIm(SimpleHoster):
def setup(self):
- self.multiDL = self.resumeDownload = True
+ self.resumeDownload = True
+ self.multiDL = True
def handleFree(self):
diff --git a/pyload/plugins/hoster/JumbofilesCom.py b/pyload/plugins/hoster/JumbofilesCom.py
index e39bbcc20..6b8611a45 100644
--- a/pyload/plugins/hoster/JumbofilesCom.py
+++ b/pyload/plugins/hoster/JumbofilesCom.py
@@ -23,7 +23,8 @@ class JumbofilesCom(SimpleHoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
+ self.resumeDownload = True
+ self.multiDL = True
def handleFree(self):
diff --git a/pyload/plugins/hoster/JunocloudMe.py b/pyload/plugins/hoster/JunocloudMe.py
index 908402775..cef475c1b 100644
--- a/pyload/plugins/hoster/JunocloudMe.py
+++ b/pyload/plugins/hoster/JunocloudMe.py
@@ -6,7 +6,7 @@ from pyload.plugins.internal.XFSHoster import XFSHoster, create_getInfo
class JunocloudMe(XFSHoster):
__name__ = "JunocloudMe"
__type__ = "hoster"
- __version__ = "0.04"
+ __version__ = "0.05"
__pattern__ = r'http://(?:\w+\.)?junocloud\.me/\w{12}'
@@ -17,9 +17,8 @@ class JunocloudMe(XFSHoster):
HOSTER_DOMAIN = "junocloud.me"
- URL_REPLACEMENTS = [(r'//www\.', "//dl3.")]
+ URL_REPLACEMENTS = [(r'//(www\.)?junocloud', "//dl3.junocloud")]
- NAME_PATTERN = r'<p class="request_file">http://junocloud.me/w{12}/(?P<N>.+?)</p>'
SIZE_PATTERN = r'<p class="request_filesize">Size: (?P<S>[\d.,]+) (?P<U>[\w^_]+)</p>'
OFFLINE_PATTERN = r'>No such file with this filename<'
diff --git a/pyload/plugins/hoster/Keep2shareCc.py b/pyload/plugins/hoster/Keep2shareCc.py
index 6fc521107..6fef901d8 100644
--- a/pyload/plugins/hoster/Keep2shareCc.py
+++ b/pyload/plugins/hoster/Keep2shareCc.py
@@ -2,16 +2,16 @@
import re
-from urlparse import urlparse, urljoin
+from urlparse import urljoin, urlparse
from pyload.plugins.internal.CaptchaService import ReCaptcha
-from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
+from pyload.plugins.internal.SimpleHoster import _isDirectLink, SimpleHoster, create_getInfo
class Keep2shareCc(SimpleHoster):
__name__ = "Keep2shareCc"
__type__ = "hoster"
- __version__ = "0.15"
+ __version__ = "0.16"
__pattern__ = r'https?://(?:www\.)?(keep2share|k2s|keep2s)\.cc/file/(?P<ID>\w+)'
@@ -21,75 +21,98 @@ class Keep2shareCc(SimpleHoster):
("Walter Purcaro", "vuolter@gmail.com")]
+ URL_REPLACEMENTS = [(__pattern__ + ".*", "http://k2s.cc/file/\g<ID>")]
+
+ CONTENT_DISPOSITION = True
+
NAME_PATTERN = r'File: <span>(?P<N>.+)</span>'
SIZE_PATTERN = r'Size: (?P<S>[^<]+)</div>'
- OFFLINE_PATTERN = r'File not found or deleted|Sorry, this file is blocked or deleted|Error 404'
- LINK_PATTERN = r'To download this file with slow speed, use <a href="([^"]+)">this link</a>'
+ OFFLINE_PATTERN = r'File not found or deleted|Sorry, this file is blocked or deleted|Error 404'
+ TEMP_OFFLINE_PATTERN = r'Downloading blocked due to'
+
+ LINK_FREE_PATTERN = LINK_PREMIUM_PATTERN = r'"([^"]+url.html?file=.+?)"|window\.location\.href = \'(.+?)\';'
+
CAPTCHA_PATTERN = r'src="(/file/captcha\.html.+?)"'
- WAIT_PATTERN = r'Please wait ([\d:]+) to download this file'
- MULTIDL_ERROR = r'Free account does not allow to download more than one file at the same time'
+ WAIT_PATTERN = r'Please wait ([\d:]+) to download this file'
+ TEMP_ERROR_PATTERN = r'>\s*(Download count files exceed|Traffic limit exceed|Free account does not allow to download more than one file at the same time)'
+ ERROR_PATTERN = r'>\s*(Free user can\'t download large files|You no can access to this file|This download available only for premium users|This is private file)'
- def handleFree(self):
- self.sanitize_url()
- self.html = self.load(self.pyfile.url)
- self.fid = re.search(r'<input type="hidden" name="slow_id" value="([^"]+)">', self.html).group(1)
+ def checkErrors(self):
+ m = re.search(self.TEMP_ERROR_PATTERN, self.html)
+ if m:
+ self.info['error'] = m.group(1)
+ self.wantReconnect = True
+ self.retry(wait_time=30 * 60, reason=m.group(0))
+
+ m = re.search(self.ERROR_PATTERN, self.html)
+ if m:
+ errmsg = self.info['error'] = m.group(1)
+ self.error(errmsg)
+
+ m = re.search(self.WAIT_PATTERN, self.html)
+ if m:
+ self.logDebug("Hoster told us to wait for %s" % m.group(1))
+
+ # string to time convert courtesy of https://stackoverflow.com/questions/10663720
+ ftr = [3600, 60, 1]
+ wait_time = sum([a * b for a, b in zip(ftr, map(int, m.group(1).split(':')))])
+
+ self.wantReconnect = True
+ self.retry(wait_time=wait_time, reason="Please wait to download this file")
+
+ self.info.pop('error', None)
+
+
+ def handleFree(self):
+ self.fid = re.search(r'<input type="hidden" name="slow_id" value="([^"]+)">', self.html).group(1)
self.html = self.load(self.pyfile.url, post={'yt0': '', 'slow_id': self.fid})
- if ">Downloading is not possible" in self.html:
- self.fail("Free user can't download large files")
+ self.checkErrors()
- m = re.search(r"function download\(\){.*window\.location\.href = '([^']+)';", self.html, re.S)
- if m: # Direct mode
- self.startDownload(m.group(1))
- else:
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
+
+ if m is None:
self.handleCaptcha()
self.wait(30)
self.html = self.load(self.pyfile.url, post={'uniqueId': self.fid, 'free': 1})
- m = re.search(self.WAIT_PATTERN, self.html)
- if m:
- self.logDebug("Hoster told us to wait for %s" % m.group(1))
- # string to time convert courtesy of https://stackoverflow.com/questions/10663720
- ftr = [3600, 60, 1]
- wait_time = sum([a * b for a, b in zip(ftr, map(int, m.group(1).split(':')))])
- self.wait(wait_time, True)
- self.retry()
-
- m = re.search(self.MULTIDL_ERROR, self.html)
- if m:
- # if someone is already downloading on our line, wait 30min and retry
- self.logDebug("Already downloading, waiting for 30 minutes")
- self.wait(30 * 60, True)
- self.retry()
+ self.checkErrors()
- m = re.search(self.LINK_PATTERN, self.html)
+ m = re.search(self.LINK_FREE_PATTERN, self.html)
if m is None:
- self.error(_("LINK_PATTERN not found"))
- self.startDownload(m.group(1))
+ self.error(_("LINK_FREE_PATTERN not found"))
+
+ self.link = self._getDownloadLink(m.group(1))
+
+
+ def handlePremium(self):
+ super(Keep2shareCc, self).handlePremium()
+ if self.link:
+ self.link = self._getDownloadLink(self.link)
def handleCaptcha(self):
recaptcha = ReCaptcha(self)
for _i in xrange(5):
- post_data = {'free': 1,
+ post_data = {'free' : 1,
'freeDownloadRequest': 1,
- 'uniqueId': self.fid,
- 'yt0': ''}
+ 'uniqueId' : self.fid,
+ 'yt0' : ''}
m = re.search(self.CAPTCHA_PATTERN, self.html)
if m:
- captcha_url = urljoin(self.base_url, m.group(1))
+ captcha_url = urljoin(self.base, m.group(1))
post_data['CaptchaForm[code]'] = self.decryptCaptcha(captcha_url)
else:
challenge, response = recaptcha.challenge()
post_data.update({'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field': response})
+ 'recaptcha_response_field' : response})
self.html = self.load(self.pyfile.url, post=post_data)
@@ -102,17 +125,11 @@ class Keep2shareCc(SimpleHoster):
self.fail(_("All captcha attempts failed"))
- def startDownload(self, url):
- d = urljoin(self.base_url, url)
- self.download(d, disposition=True)
-
-
- def sanitize_url(self):
- header = self.load(self.pyfile.url, just_header=True)
- if 'location' in header:
- self.pyfile.url = header['location']
+ def _getDownloadLink(self, url):
p = urlparse(self.pyfile.url)
- self.base_url = "%s://%s" % (p.scheme, p.hostname)
+ base = "%s://%s" % (p.scheme, p.netloc)
+ link = _isDirectLink(self, url, self.premium)
+ return urljoin(base, link) if link else ""
getInfo = create_getInfo(Keep2shareCc)
diff --git a/pyload/plugins/hoster/LetitbitNet.py b/pyload/plugins/hoster/LetitbitNet.py
index 16f01bf06..cdd339eb9 100644
--- a/pyload/plugins/hoster/LetitbitNet.py
+++ b/pyload/plugins/hoster/LetitbitNet.py
@@ -139,7 +139,4 @@ class LetitbitNet(SimpleHoster):
if api_rep['status'] == 'FAIL':
self.fail(api_rep['data'])
- direct_link = api_rep['data'][0][0]
- self.logDebug("Direct Link: " + direct_link)
-
- self.download(direct_link, disposition=True)
+ self.download(api_rep['data'][0][0], disposition=True)
diff --git a/pyload/plugins/hoster/LoadTo.py b/pyload/plugins/hoster/LoadTo.py
index 974a27d29..3b7229a0b 100644
--- a/pyload/plugins/hoster/LoadTo.py
+++ b/pyload/plugins/hoster/LoadTo.py
@@ -49,7 +49,7 @@ class LoadTo(SimpleHoster):
# Set Timer - may be obsolete
m = re.search(self.WAIT_PATTERN, self.html)
if m:
- self.wait(m.group(1))
+ self.wait(int(m.group(1)))
# Load.to is using solvemedia captchas since ~july 2014:
solvemedia = SolveMedia(self)
diff --git a/pyload/plugins/hoster/LuckyShareNet.py b/pyload/plugins/hoster/LuckyShareNet.py
index 31de417b7..9b418ccd4 100644
--- a/pyload/plugins/hoster/LuckyShareNet.py
+++ b/pyload/plugins/hoster/LuckyShareNet.py
@@ -11,7 +11,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class LuckyShareNet(SimpleHoster):
__name__ = "LuckyShareNet"
__type__ = "hoster"
- __version__ = "0.03"
+ __version__ = "0.04"
__pattern__ = r'https?://(?:www\.)?luckyshare\.net/(?P<ID>\d{10,})'
@@ -42,9 +42,7 @@ class LuckyShareNet(SimpleHoster):
# TODO: There should be a filesize limit for free downloads
# TODO: Some files could not be downloaded in free mode
def handleFree(self):
- file_id = re.match(self.__pattern__, self.pyfile.url).group('ID')
- self.logDebug("File ID: " + file_id)
- rep = self.load(r"http://luckyshare.net/download/request/type/time/file/" + file_id, decode=True)
+ rep = self.load(r"http://luckyshare.net/download/request/type/time/file/" + self.info['pattern']['ID'], decode=True)
self.logDebug("JSON: " + rep)
json = self.parseJson(rep)
@@ -69,7 +67,6 @@ class LuckyShareNet(SimpleHoster):
if not json['link']:
self.fail(_("No Download url retrieved/all captcha attempts failed"))
- self.logDebug("Direct URL: " + json['link'])
self.download(json['link'])
diff --git a/pyload/plugins/hoster/MegaCoNz.py b/pyload/plugins/hoster/MegaCoNz.py
index f09a5cdd5..182c0c9b9 100644
--- a/pyload/plugins/hoster/MegaCoNz.py
+++ b/pyload/plugins/hoster/MegaCoNz.py
@@ -14,6 +14,34 @@ from pycurl import SSL_CIPHER_LIST
from pyload.utils import json_loads, json_dumps
from pyload.plugins.internal.Hoster import Hoster
+############################ General errors ###################################
+# EINTERNAL (-1): An internal error has occurred. Please submit a bug report, detailing the exact circumstances in which this error occurred
+# EARGS (-2): You have passed invalid arguments to this command
+# EAGAIN (-3): (always at the request level) A temporary congestion or server malfunction prevented your request from being processed. No data was altered. Retry. Retries must be spaced with exponential backoff
+# ERATELIMIT (-4): You have exceeded your command weight per time quota. Please wait a few seconds, then try again (this should never happen in sane real-life applications)
+#
+############################ Upload errors ####################################
+# EFAILED (-5): The upload failed. Please restart it from scratch
+# ETOOMANY (-6): Too many concurrent IP addresses are accessing this upload target URL
+# ERANGE (-7): The upload file packet is out of range or not starting and ending on a chunk boundary
+# EEXPIRED (-8): The upload target URL you are trying to access has expired. Please request a fresh one
+#
+############################ Stream/System errors #############################
+# ENOENT (-9): Object (typically, node or user) not found
+# ECIRCULAR (-10): Circular linkage attempted
+# EACCESS (-11): Access violation (e.g., trying to write to a read-only share)
+# EEXIST (-12): Trying to create an object that already exists
+# EINCOMPLETE (-13): Trying to access an incomplete resource
+# EKEY (-14): A decryption operation failed (never returned by the API)
+# ESID (-15): Invalid or expired user session, please relogin
+# EBLOCKED (-16): User blocked
+# EOVERQUOTA (-17): Request over quota
+# ETEMPUNAVAIL (-18): Resource temporarily not available, please try again later
+# ETOOMANYCONNECTIONS (-19): Too many connections on this resource
+# EWRITE (-20): Write failed
+# EREAD (-21): Read failed
+# EAPPKEY (-22): Invalid application key; request not processed
+
class MegaCoNz(Hoster):
__name__ = "MegaCoNz"
@@ -26,8 +54,7 @@ class MegaCoNz(Hoster):
__license__ = "GPLv3"
__authors__ = [("RaNaN", "ranan@pyload.org")]
-
- API_URL = "https://g.api.mega.co.nz/cs?id=%d"
+ API_URL = "https://g.api.mega.co.nz/cs"
FILE_SUFFIX = ".crypted"
@@ -48,7 +75,7 @@ class MegaCoNz(Hoster):
# generate a session id, no idea where to obtain elsewhere
uid = random.randint(10 << 9, 10 ** 10)
- res = self.load(self.API_URL % uid, post=json_dumps([kwargs]))
+ res = self.load(self.API_URL, get={'id': uid}, post=json_dumps([kwargs]))
self.logDebug("Api Response: " + res)
return json_loads(res)
diff --git a/pyload/plugins/hoster/MegasharesCom.py b/pyload/plugins/hoster/MegasharesCom.py
index fcb53a486..45bd01ffd 100644
--- a/pyload/plugins/hoster/MegasharesCom.py
+++ b/pyload/plugins/hoster/MegasharesCom.py
@@ -36,7 +36,7 @@ class MegasharesCom(SimpleHoster):
def setup(self):
self.resumeDownload = True
- self.multiDL = self.premium
+ self.multiDL = self.premium
def handlePremium(self):
@@ -55,15 +55,18 @@ class MegasharesCom(SimpleHoster):
for _i in xrange(5):
random_num = re.search(self.REACTIVATE_NUM_PATTERN, self.html).group(1)
- verifyinput = self.decryptCaptcha(
- "http://d01.megashares.com/index.php?secgfx=gfx&random_num=%s" % random_num)
+ verifyinput = self.decryptCaptcha("http://d01.megashares.com/index.php",
+ get={'secgfx': "gfx", 'random_num': random_num})
+
self.logInfo(_("Reactivating passport %s: %s %s") % (passport_num, random_num, verifyinput))
- url = ("http://d01.megashares.com%s&rs=check_passport_renewal" % request_uri +
- "&rsargs[]=%s&rsargs[]=%s&rsargs[]=%s" % (verifyinput, random_num, passport_num) +
- "&rsargs[]=replace_sec_pprenewal&rsrnd=%s" % str(int(time() * 1000)))
- self.logDebug(url)
- res = self.load(url)
+ res = self.load("http://d01.megashares.com%s" % request_uri,
+ get={'rs' : "check_passport_renewal",
+ 'rsargs[]': verifyinput,
+ 'rsargs[]': random_num,
+ 'rsargs[]': passport_num,
+ 'rsargs[]': "replace_sec_pprenewal",
+ 'rsrnd[]' : str(int(time() * 1000))})
if 'Thank you for reactivating your passport.' in res:
self.correctCaptcha()
diff --git a/pyload/plugins/hoster/MultishareCz.py b/pyload/plugins/hoster/MultishareCz.py
index 60d02b6e0..28a536089 100644
--- a/pyload/plugins/hoster/MultishareCz.py
+++ b/pyload/plugins/hoster/MultishareCz.py
@@ -10,7 +10,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class MultishareCz(SimpleHoster):
__name__ = "MultishareCz"
__type__ = "hoster"
- __version__ = "0.34"
+ __version__ = "0.35"
__pattern__ = r'http://(?:www\.)?multishare\.cz/stahnout/(?P<ID>\d+).*'
@@ -19,10 +19,13 @@ class MultishareCz(SimpleHoster):
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
- INFO_PATTERN = ur'(?:<li>Název|Soubor): <strong>(?P<N>[^<]+)</strong><(?:/li><li|br)>Velikost: <strong>(?P<S>[^<]+)</strong>'
- OFFLINE_PATTERN = ur'<h1>Stáhnout soubor</h1><p><strong>PoşadovanÜ soubor neexistuje.</strong></p>'
SIZE_REPLACEMENTS = [('&nbsp;', '')]
+ MULTI_HOSTER = True
+
+ INFO_PATTERN = ur'(?:<li>Název|Soubor): <strong>(?P<N>[^<]+)</strong><(?:/li><li|br)>Velikost: <strong>(?P<S>[^<]+)</strong>'
+ OFFLINE_PATTERN = ur'<h1>Stáhnout soubor</h1><p><strong>PoşadovanÜ soubor neexistuje.</strong></p>'
+
def process(self, pyfile):
msurl = re.match(self.__pattern__, pyfile.url)
diff --git a/pyload/plugins/hoster/NetloadIn.py b/pyload/plugins/hoster/NetloadIn.py
index 07aeb48ca..0aabc8f57 100644
--- a/pyload/plugins/hoster/NetloadIn.py
+++ b/pyload/plugins/hoster/NetloadIn.py
@@ -2,17 +2,19 @@
import re
+from urlparse import urljoin
from time import sleep, time
from pyload.network.RequestFactory import getURL
from pyload.plugins.internal.Hoster import Hoster
from pyload.plugins.Plugin import chunks
+from pyload.plugins.captcha import ReCaptcha
def getInfo(urls):
## returns list of tupels (name, size (in bytes), status (see FileDatabase), url)
- apiurl = "http://api.netload.in/info.php?auth=Zf9SnQh9WiReEsb18akjvQGqT0I830e8&bz=1&md5=1&file_id="
+ apiurl = "http://api.netload.in/info.php"
id_regex = re.compile(NetloadIn.__pattern__)
urls_per_query = 80
@@ -23,13 +25,18 @@ def getInfo(urls):
if match:
ids = ids + match.group(1) + ";"
- api = getURL(apiurl + ids, decode=True)
+ api = getURL(apiurl,
+ get={'auth' : "Zf9SnQh9WiReEsb18akjvQGqT0I830e8",
+ 'bz' : 1,
+ 'md5' : 1,
+ 'file_id': ids},
+ decode=True)
if api is None or len(api) < 10:
self.logDebug("Prefetch failed")
return
+
if api.find("unknown_auth") >= 0:
- print
self.logDebug("Outdated auth code")
return
@@ -38,11 +45,14 @@ def getInfo(urls):
for i, r in enumerate(api.splitlines()):
try:
tmp = r.split(";")
+
try:
size = int(tmp[2])
except Exception:
size = 0
- result.append((tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i]))
+
+ result.append((tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i] ))
+
except Exception:
self.logDebug("Error while processing response: %s" % r)
@@ -52,7 +62,7 @@ def getInfo(urls):
class NetloadIn(Hoster):
__name__ = "NetloadIn"
__type__ = "hoster"
- __version__ = "0.45"
+ __version__ = "0.47"
__pattern__ = r'https?://(?:[^/]*\.)?netload\.in/(?:datei(.*?)(?:\.htm|/)|index\.php?id=10&file_id=)'
@@ -69,8 +79,11 @@ class NetloadIn(Hoster):
def process(self, pyfile):
self.url = pyfile.url
+
self.prepare()
+
pyfile.setStatus("downloading")
+
self.proceed(self.url)
@@ -82,7 +95,9 @@ class NetloadIn(Hoster):
if self.premium:
self.logDebug("Use Premium Account")
- settings = self.load("http://www.netload.in/index.php?id=2&lang=en")
+
+ settings = self.load("http://www.netload.in/index.php", get={'id': 2, 'lang': "en"})
+
if '<option value="2" selected="selected">Direkter Download' in settings:
self.logDebug("Using direct download")
return True
@@ -97,9 +112,9 @@ class NetloadIn(Hoster):
def download_api_data(self, n=0):
- url = self.url
+ url = self.url
id_regex = re.compile(self.__pattern__)
- match = id_regex.search(url)
+ match = id_regex.search(url)
if match:
#normalize url
@@ -119,14 +134,17 @@ class NetloadIn(Hoster):
return
self.logDebug("APIDATA: " + html)
+
self.api_data = {}
+
if html and ";" in html and html not in ("unknown file_data", "unknown_server_data", "No input file specified."):
lines = html.split(";")
- self.api_data['exists'] = True
- self.api_data['fileid'] = lines[0]
+ self.api_data['exists'] = True
+ self.api_data['fileid'] = lines[0]
self.api_data['filename'] = lines[1]
- self.api_data['size'] = lines[2]
- self.api_data['status'] = lines[3]
+ self.api_data['size'] = lines[2]
+ self.api_data['status'] = lines[3]
+
if self.api_data['status'] == "online":
self.api_data['checksum'] = lines[4].strip()
else:
@@ -140,16 +158,28 @@ class NetloadIn(Hoster):
def final_wait(self, page):
wait_time = self.get_wait_time(page)
+
self.setWait(wait_time)
+
self.logDebug("Final wait %d seconds" % wait_time)
+
self.wait()
+
self.url = self.get_file_url(page)
+ def check_free_wait(self,page):
+ if ">An access request has been made from IP address <" in page:
+ self.wantReconnect = True
+ self.setWait(self.get_wait_time(page) or 30)
+ self.wait()
+ return True
+ else:
+ return False
+
+
def download_html(self):
- self.logDebug("Entering download_html")
page = self.load(self.url, decode=True)
- t = time() + 30
if "/share/templates/download_hddcrash.tpl" in page:
self.logError(_("Netload HDD Crash"))
@@ -169,8 +199,8 @@ class NetloadIn(Hoster):
self.pyfile.name = name
captchawaited = False
- for i in xrange(10):
+ for i in xrange(5):
if not page:
page = self.load(self.url)
t = time() + 30
@@ -185,51 +215,49 @@ class NetloadIn(Hoster):
self.logDebug("We will prepare your download")
self.final_wait(page)
return True
- if ">An access request has been made from IP address <" in page:
- wait = self.get_wait_time(page)
- if not wait:
- self.logDebug("Wait was 0 setting 30")
- wait = 30 * 60
- self.logInfo(_("Waiting between downloads %d seconds") % wait)
- self.setWait(wait, True)
- self.wait()
-
- return self.download_html()
self.logDebug("Trying to find captcha")
try:
- url_captcha_html = "http://netload.in/" + re.search('(index.php\?id=10&amp;.*&amp;captcha=1)',
- page).group(1).replace("amp;", "")
- except Exception:
+ url_captcha_html = re.search(r'(index.php\?id=10&amp;.*&amp;captcha=1)', page).group(1).replace("amp;", "")
+
+ except Exception, e:
+ self.logDebug("Exception during Captcha regex: %s" % e.message)
page = None
- continue
- try:
- page = self.load(url_captcha_html, cookies=True)
- captcha_url = "http://netload.in/" + re.search('(share/includes/captcha.php\?t=\d*)', page).group(1)
- except Exception:
- self.logDebug("Could not find captcha, try again from beginning")
- captchawaited = False
- continue
-
- file_id = re.search('<input name="file_id" type="hidden" value="(.*)" />', page).group(1)
- if not captchawaited:
- wait = self.get_wait_time(page)
- if i == 0:
- self.pyfile.waitUntil = time() # dont wait contrary to time on website
- else:
- self.pyfile.waitUntil = t
- self.logInfo(_("Waiting for captcha %d seconds") % (self.pyfile.waitUntil - time()))
- #self.setWait(wait)
- self.wait()
- captchawaited = True
+ else:
+ url_captcha_html = urljoin("http://netload.in/", url_captcha_html)
+ break
+
+ self.html = self.load(url_captcha_html)
- captcha = self.decryptCaptcha(captcha_url)
- page = self.load("http://netload.in/index.php?id=10", post={"file_id": file_id, "captcha_check": captcha},
- cookies=True)
+ recaptcha = ReCaptcha(self)
- return False
+ for _i in xrange(5):
+ challenge, response = recaptcha.challenge()
+
+ response_page = self.load("http://www.netload.in/index.php?id=10",
+ post={'captcha_check' : '1',
+ 'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response,
+ 'file_id' : self.api_data['fileid'],
+ 'Download_Next' : ''})
+ if "Orange_Link" in response_page:
+ break
+
+ if self.check_free_wait(response_page):
+ self.logDebug("Had to wait for next free slot, trying again")
+ return self.download_html()
+
+ else:
+ download_url = self.get_file_url(response_page)
+ self.logDebug("Download URL after get_file: " + download_url)
+ if not download_url.startswith("http://"):
+ self.error("download url: %s" % download_url)
+ self.wait()
+
+ self.url = download_url
+ return True
def get_file_url(self, page):
@@ -243,25 +271,24 @@ class NetloadIn(Hoster):
file_url_pattern = r'<a href="(.+)" class="Orange_Link">Click here'
attempt = re.search(file_url_pattern, page)
return "http://netload.in/" + attempt.group(1)
- except Exception:
- self.logDebug("Getting final link failed")
+
+ except Exception, e:
+ self.logDebug("Getting final link failed", e.message)
return None
def get_wait_time(self, page):
- wait_seconds = int(re.search(r"countdown\((.+),'change\(\)'\)", page).group(1)) / 100
- return wait_seconds
+ return int(re.search(r"countdown\((.+),'change\(\)'\)", page).group(1)) / 100
def proceed(self, url):
- self.logDebug("Downloading..")
-
self.download(url, disposition=True)
- check = self.checkDownload({"empty": re.compile(r"^$"), "offline": re.compile("The file was deleted")})
-
+ check = self.checkDownload({'empty' : re.compile(r'^$'),
+ 'offline': re.compile("The file was deleted")})
if check == "empty":
self.logInfo(_("Downloaded File was empty"))
self.retry()
+
elif check == "offline":
self.offline()
diff --git a/pyload/plugins/hoster/OneFichierCom.py b/pyload/plugins/hoster/OneFichierCom.py
index 6e04776b5..0e1016b0a 100644
--- a/pyload/plugins/hoster/OneFichierCom.py
+++ b/pyload/plugins/hoster/OneFichierCom.py
@@ -8,7 +8,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class OneFichierCom(SimpleHoster):
__name__ = "OneFichierCom"
__type__ = "hoster"
- __version__ = "0.73"
+ __version__ = "0.74"
__pattern__ = r'https?://(?:www\.)?(?:(?P<ID1>\w+)\.)?(?P<HOST>1fichier\.com|alterupload\.com|cjoint\.net|d(es)?fichiers\.com|dl4free\.com|megadl\.fr|mesfichiers\.org|piecejointe\.net|pjointe\.com|tenvoi\.com)(?:/\?(?P<ID2>\w+))?'
@@ -34,7 +34,7 @@ class OneFichierCom(SimpleHoster):
def setup(self):
- self.multiDL = self.premium
+ self.multiDL = self.premium
self.resumeDownload = True
@@ -46,7 +46,7 @@ class OneFichierCom(SimpleHoster):
self.wait(wait_time, reconnect)
self.retry(reason="You have to wait been each free download")
- id = self.info['ID1'] or self.info['ID2']
+ id = self.info['pattern']['ID1'] or self.info['pattern']['ID2']
url, inputs = self.parseHtmlForm('action="https://1fichier.com/\?%s' % id)
if not url:
diff --git a/pyload/plugins/hoster/PremiumTo.py b/pyload/plugins/hoster/PremiumTo.py
index 03ac37599..04e00849a 100644
--- a/pyload/plugins/hoster/PremiumTo.py
+++ b/pyload/plugins/hoster/PremiumTo.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from __future__ import with_statement
+
from os import remove
from os.path import exists
from urllib import quote
@@ -11,7 +13,7 @@ from pyload.utils import fs_encode
class PremiumTo(Hoster):
__name__ = "PremiumTo"
__type__ = "hoster"
- __version__ = "0.10"
+ __version__ = "0.11"
__pattern__ = r'https?://(?:www\.)?premium\.to/.+'
@@ -39,9 +41,11 @@ class PremiumTo(Hoster):
#raise timeout to 2min
self.req.setOption("timeout", 120)
- self.download(
- "http://premium.to/api/getfile.php?username=%s&password=%s&link=%s" % (self.account.username, self.account.password, quote(pyfile.url, "")),
- disposition=True)
+ self.download("http://premium.to/api/getfile.php",
+ get={'username': self.account.username,
+ 'password': self.account.password,
+ 'link' : quote(pyfile.url, "")},
+ disposition=True)
check = self.checkDownload({"nopremium": "No premium account available"})
diff --git a/pyload/plugins/hoster/PremiumizeMe.py b/pyload/plugins/hoster/PremiumizeMe.py
index 177edb1a0..ff56adf79 100644
--- a/pyload/plugins/hoster/PremiumizeMe.py
+++ b/pyload/plugins/hoster/PremiumizeMe.py
@@ -36,10 +36,11 @@ class PremiumizeMe(Hoster):
(user, data) = self.account.selectAccount()
# Get rewritten link using the premiumize.me api v1 (see https://secure.premiumize.me/?show=api)
- answer = self.load(
- "https://api.premiumize.me/pm-api/v1.php?method=directdownloadlink&params[login]=%s&params[pass]=%s&params[link]=%s" % (
- user, data['password'], pyfile.url))
- data = json_loads(answer)
+ data = json_loads(self.load("https://api.premiumize.me/pm-api/v1.php",
+ get={'method' : "directdownloadlink",
+ 'params[login]': user,
+ 'params[pass]' : data['password'],
+ 'params[link]' : pyfile.url}))
# Check status and decide what to do
status = data['status']
diff --git a/pyload/plugins/hoster/PromptfileCom.py b/pyload/plugins/hoster/PromptfileCom.py
index 73324e6ab..027f57505 100644
--- a/pyload/plugins/hoster/PromptfileCom.py
+++ b/pyload/plugins/hoster/PromptfileCom.py
@@ -38,9 +38,8 @@ class PromptfileCom(SimpleHoster):
m = re.search(self.LINK_PATTERN, self.html)
if m is None:
self.error(_("LINK_PATTERN not found"))
- direct = m.group(1)
- self.logDebug("Found direct link: " + direct)
- self.download(direct, disposition=True)
+
+ self.download(m.group(1), disposition=True)
getInfo = create_getInfo(PromptfileCom)
diff --git a/pyload/plugins/hoster/QuickshareCz.py b/pyload/plugins/hoster/QuickshareCz.py
index 5123e5aa5..21dc7aa11 100644
--- a/pyload/plugins/hoster/QuickshareCz.py
+++ b/pyload/plugins/hoster/QuickshareCz.py
@@ -88,7 +88,6 @@ class QuickshareCz(SimpleHoster):
def handlePremium(self):
download_url = '%s/download_premium.php' % self.jsvars['server']
data = dict((x, self.jsvars[x]) for x in self.jsvars if x in ("ID1", "ID2", "ID4", "ID5"))
- self.logDebug("PREMIUM URL:" + download_url, data)
self.download(download_url, get=data)
diff --git a/pyload/plugins/hoster/RapidgatorNet.py b/pyload/plugins/hoster/RapidgatorNet.py
index 99fec9b20..5deca9ddb 100644
--- a/pyload/plugins/hoster/RapidgatorNet.py
+++ b/pyload/plugins/hoster/RapidgatorNet.py
@@ -55,7 +55,7 @@ class RapidgatorNet(SimpleHoster):
self.premium = True
self.resumeDownload = self.multiDL = self.premium
- self.chunkLimit = 1
+ self.chunkLimit = 1
def api_response(self, cmd):
diff --git a/pyload/plugins/hoster/RapidshareCom.py b/pyload/plugins/hoster/RapidshareCom.py
deleted file mode 100644
index 97823ba96..000000000
--- a/pyload/plugins/hoster/RapidshareCom.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.Hoster import Hoster
-
-
-def getInfo(urls):
- ids = ""
- names = ""
-
- p = re.compile(RapidshareCom.__pattern__)
-
- for url in urls:
- r = p.search(url)
- if r.group("name"):
- ids += "," + r.group("id")
- names += "," + r.group("name")
- elif r.group("name_new"):
- ids += "," + r.group("id_new")
- names += "," + r.group("name_new")
-
- url = "http://api.rapidshare.com/cgi-bin/rsapi.cgi?sub=checkfiles&files=%s&filenames=%s" % (ids[1:], names[1:])
-
- api = getURL(url)
- result = []
- i = 0
- for res in api.split():
- tmp = res.split(",")
- if tmp[4] in ("0", "4", "5"):
- status = 1
- elif tmp[4] == "1":
- status = 2
- else:
- status = 3
-
- result.append((tmp[1], tmp[2], status, urls[i]))
- i += 1
-
- yield result
-
-
-class RapidshareCom(Hoster):
- __name__ = "RapidshareCom"
- __type__ = "hoster"
- __version__ = "1.40"
-
- __pattern__ = r'https?://(?:www\.)?rapidshare\.com/(?:files/(?P<id>\d+)/(?P<name>[^?]+)|#!download\|(?:\w+)\|(?P<id_new>\d+)\|(?P<name_new>[^|]+))'
-
- __description__ = """Rapidshare.com hoster plugin"""
- __license__ = "GPLv3"
- __authors__ = [("spoob", "spoob@pyload.org"),
- ("RaNaN", "ranan@pyload.org"),
- ("mkaay", "mkaay@mkaay.de")]
-
-
- def setup(self):
- self.no_download = True
- self.api_data = None
- self.offset = 0
- self.dl_dict = {}
-
- self.id = None
- self.name = None
-
- self.chunkLimit = -1 if self.premium else 1
- self.multiDL = self.resumeDownload = self.premium
-
-
- def process(self, pyfile):
- self.url = pyfile.url
- self.prepare()
-
-
- def prepare(self):
- m = re.match(self.__pattern__, self.url)
-
- if m.group("name"):
- self.id = m.group("id")
- self.name = m.group("name")
- else:
- self.id = m.group("id_new")
- self.name = m.group("name_new")
-
- self.download_api_data()
- if self.api_data['status'] == "1":
- self.pyfile.name = self.get_file_name()
-
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
-
- elif self.api_data['status'] == "2":
- self.logInfo(_("Rapidshare: Traffic Share (direct download)"))
- self.pyfile.name = self.get_file_name()
-
- self.download(self.pyfile.url, get={"directstart": 1})
-
- elif self.api_data['status'] in ("0", "4", "5"):
- self.offline()
- elif self.api_data['status'] == "3":
- self.tempOffline()
- else:
- self.error(_("Unknown response code"))
-
-
- def handleFree(self):
- while self.no_download:
- self.dl_dict = self.freeWait()
-
- #tmp = "#!download|%(server)s|%(id)s|%(name)s|%(size)s"
- download = "http://%(host)s/cgi-bin/rsapi.cgi?sub=download&editparentlocation=0&bin=1&fileid=%(id)s&filename=%(name)s&dlauth=%(auth)s" % self.dl_dict
-
- self.logDebug("RS API Request: %s" % download)
- self.download(download, ref=False)
-
- check = self.checkDownload({"ip": "You need RapidPro to download more files from your IP address",
- "auth": "Download auth invalid"})
- if check == "ip":
- self.setWait(60)
- self.logInfo(_("Already downloading from this ip address, waiting 60 seconds"))
- self.wait()
- self.handleFree()
- elif check == "auth":
- self.logInfo(_("Invalid Auth Code, download will be restarted"))
- self.offset += 5
- self.handleFree()
-
-
- def handlePremium(self):
- info = self.account.getAccountInfo(self.user, True)
- self.logDebug("Use Premium Account")
- url = self.api_data['mirror']
- self.download(url, get={"directstart": 1})
-
-
- def download_api_data(self, force=False):
- """
- http://images.rapidshare.com/apidoc.txt
- """
- if self.api_data and not force:
- return
- api_url_base = "http://api.rapidshare.com/cgi-bin/rsapi.cgi"
- api_param_file = {"sub": "checkfiles", "incmd5": "1", "files": self.id, "filenames": self.name}
- html = self.load(api_url_base, cookies=False, get=api_param_file).strip()
- self.logDebug("RS INFO API: %s" % html)
- if html.startswith("ERROR"):
- return
- fields = html.split(",")
-
- # status codes:
- # 0=File not found
- # 1=File OK (Anonymous downloading)
- # 3=Server down
- # 4=File marked as illegal
- # 5=Anonymous file locked, because it has more than 10 downloads already
- # 50+n=File OK (TrafficShare direct download type "n" without any logging.)
- # 100+n=File OK (TrafficShare direct download type "n" with logging.
- # Read our privacy policy to see what is logged.)
-
- self.api_data = {"fileid": fields[0], "filename": fields[1], "size": int(fields[2]), "serverid": fields[3],
- "status": fields[4], "shorthost": fields[5], "checksum": fields[6].strip().lower()}
-
- if int(self.api_data['status']) > 100:
- self.api_data['status'] = str(int(self.api_data['status']) - 100)
- elif int(self.api_data['status']) > 50:
- self.api_data['status'] = str(int(self.api_data['status']) - 50)
-
- self.api_data['mirror'] = "http://rs%(serverid)s%(shorthost)s.rapidshare.com/files/%(fileid)s/%(filename)s" % self.api_data
-
-
- def freeWait(self):
- """downloads html with the important information
- """
- self.no_download = True
-
- id = self.id
- name = self.name
-
- prepare = "https://api.rapidshare.com/cgi-bin/rsapi.cgi?sub=download&fileid=%(id)s&filename=%(name)s&try=1&cbf=RSAPIDispatcher&cbid=1" % {
- "name": name, "id": id}
-
- self.logDebug("RS API Request: %s" % prepare)
- result = self.load(prepare, ref=False)
- self.logDebug("RS API Result: %s" % result)
-
- between_wait = re.search("You need to wait (\d+) seconds", result)
-
- if "You need RapidPro to download more files from your IP address" in result:
- self.setWait(60)
- self.logInfo(_("Already downloading from this ip address, waiting 60 seconds"))
- self.wait()
- elif ("Too many users downloading from this server right now" in result or
- "All free download slots are full" in result):
- self.setWait(120)
- self.logInfo(_("RapidShareCom: No free slots"))
- self.wait()
- elif "This file is too big to download it for free" in result:
- self.fail(_("You need a premium account for this file"))
- elif "Filename invalid." in result:
- self.fail(_("Filename reported invalid"))
- elif between_wait:
- self.setWait(int(between_wait.group(1)), True)
- self.wait()
- else:
- self.no_download = False
-
- tmp, info = result.split(":")
- data = info.split(",")
-
- dl_dict = {"id": id,
- "name": name,
- "host": data[0],
- "auth": data[1],
- "server": self.api_data['serverid'],
- "size": self.api_data['size']}
- self.setWait(int(data[2]) + 2 + self.offset)
- self.wait()
-
- return dl_dict
-
-
- def get_file_name(self):
- if self.api_data['filename']:
- return self.api_data['filename']
- return self.url.split("/")[-1]
diff --git a/pyload/plugins/hoster/RealdebridCom.py b/pyload/plugins/hoster/RealdebridCom.py
index 2ca9970e0..cc4731e3f 100644
--- a/pyload/plugins/hoster/RealdebridCom.py
+++ b/pyload/plugins/hoster/RealdebridCom.py
@@ -52,10 +52,11 @@ class RealdebridCom(Hoster):
else:
password = password[0]
- url = "https://real-debrid.com/ajax/unrestrict.php?lang=en&link=%s&password=%s&time=%s" % (
- quote(pyfile.url, ""), password, int(time() * 1000))
- page = self.load(url)
- data = json_loads(page)
+ data = json_loads(self.load("https://real-debrid.com/ajax/unrestrict.php",
+ get={'lang' : "en",
+ 'link' : quote(pyfile.url, ""),
+ 'password': password,
+ 'time' : int(time() * 1000)}))
self.logDebug("Returned Data: %s" % data)
diff --git a/pyload/plugins/hoster/RehostTo.py b/pyload/plugins/hoster/RehostTo.py
index d9855c796..99d44b1aa 100644
--- a/pyload/plugins/hoster/RehostTo.py
+++ b/pyload/plugins/hoster/RehostTo.py
@@ -35,9 +35,10 @@ class RehostTo(Hoster):
long_ses = data['long_ses']
self.logDebug("Rehost.to: Old URL: %s" % pyfile.url)
- new_url = "http://rehost.to/process_download.php?user=cookie&pass=%s&dl=%s" % (long_ses, quote(pyfile.url, ""))
#raise timeout to 2min
self.req.setOption("timeout", 120)
- self.download(new_url, disposition=True)
+ self.download("http://rehost.to/process_download.php",
+ get={'user': "cookie", 'pass': long_ses, 'dl': quote(pyfile.url, "")},
+ disposition=True)
diff --git a/pyload/plugins/hoster/RemixshareCom.py b/pyload/plugins/hoster/RemixshareCom.py
index fee898654..b3aaee1e8 100644
--- a/pyload/plugins/hoster/RemixshareCom.py
+++ b/pyload/plugins/hoster/RemixshareCom.py
@@ -52,7 +52,7 @@ class RemixshareCom(SimpleHoster):
seconds = re.search(self.WAIT_PATTERN, self.html)
if seconds:
self.logDebug("Wait " + seconds.group(1))
- self.wait(seconds.group(1))
+ self.wait(int(seconds.group(1)))
# Finally start downloading...
self.download(dl_url, disposition=True)
diff --git a/pyload/plugins/hoster/RgHostNet.py b/pyload/plugins/hoster/RgHostNet.py
index 82a5b88c5..353e62696 100644
--- a/pyload/plugins/hoster/RgHostNet.py
+++ b/pyload/plugins/hoster/RgHostNet.py
@@ -8,7 +8,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class RgHostNet(SimpleHoster):
__name__ = "RgHostNet"
__type__ = "hoster"
- __version__ = "0.02"
+ __version__ = "0.03"
__pattern__ = r'http://(?:www\.)?rghost\.net/\d+(?:r=\d+)?'
@@ -17,17 +17,10 @@ class RgHostNet(SimpleHoster):
__authors__ = [("z00nx", "z00nx0@gmail.com")]
- INFO_PATTERN = r'<h1>\s+(<a[^>]+>)?(?P<N>[^<]+)(</a>)?\s+<small[^>]+>\s+\((?P<S>[^)]+)\)\s+</small>\s+</h1>'
+ INFO_PATTERN = r'<h1>\s+(<a[^>]+>)?(?P<N>[^<]+)(</a>)?\s+<small[^>]+>\s+\((?P<S>[^)]+)\)\s+</small>\s+</h1>'
OFFLINE_PATTERN = r'File is deleted|this page is not found'
- LINK_PATTERN = r'''<a\s+href="([^"]+)"\s+class="btn\s+large\s+download"[^>]+>Download</a>'''
-
- def handleFree(self):
- m = re.search(self.LINK_PATTERN, self.html)
- if m is None:
- self.error(_("LINK_PATTERN not found"))
- download_link = m.group(1)
- self.download(download_link, disposition=True)
+ LINK_FREE_PATTERN = r'<a\s+href="([^"]+)"\s+class="btn\s+large\s+download"[^>]+>Download</a>'
getInfo = create_getInfo(RgHostNet)
diff --git a/pyload/plugins/hoster/ShareonlineBiz.py b/pyload/plugins/hoster/ShareonlineBiz.py
index 59204eb2e..1cb651b12 100644
--- a/pyload/plugins/hoster/ShareonlineBiz.py
+++ b/pyload/plugins/hoster/ShareonlineBiz.py
@@ -3,43 +3,18 @@
import re
from time import time
+from urllib import unquote
+from urlparse import urlparse
-from pyload.network.RequestFactory import getURL
-from pyload.plugins.internal.Hoster import Hoster
-from pyload.plugins.Plugin import chunks
-from pyload.plugins.internal.CaptchaService import ReCaptcha
+from module.network.RequestFactory import getURL
+from module.plugins.internal.CaptchaService import ReCaptcha
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-def getInfo(urls):
- api_url_base = "http://api.share-online.biz/linkcheck.php"
-
- urls = [url.replace("https://", "http://") for url in urls]
-
- for chunk in chunks(urls, 90):
- api_param_file = {"links": "\n".join(x.replace("http://www.share-online.biz/dl/", "").rstrip("/") for x in
- chunk)} # api only supports old style links
- html = getURL(api_url_base, post=api_param_file, decode=True)
- result = []
- for i, res in enumerate(html.split("\n")):
- if not res:
- continue
- fields = res.split(";")
-
- if fields[1] == "OK":
- status = 2
- elif fields[1] in ("DELETED", "NOT FOUND"):
- status = 1
- else:
- status = 3
-
- result.append((fields[2], int(fields[3]), status, chunk[i]))
- yield result
-
-
-class ShareonlineBiz(Hoster):
+class ShareonlineBiz(SimpleHoster):
__name__ = "ShareonlineBiz"
__type__ = "hoster"
- __version__ = "0.41"
+ __version__ = "0.44"
__pattern__ = r'https?://(?:www\.)?(share-online\.biz|egoshare\.com)/(download\.php\?id=|dl/)(?P<ID>\w+)'
@@ -51,110 +26,122 @@ class ShareonlineBiz(Hoster):
("Walter Purcaro", "vuolter@gmail.com")]
- ERROR_INFO_PATTERN = r'<p class="b">Information:</p>\s*<div>\s*<strong>(.*?)</strong>'
+ URL_REPLACEMENTS = [(__pattern__ + ".*", "http://www.share-online.biz/dl/\g<ID>")]
+ RECAPTCHA_KEY = "6LdatrsSAAAAAHZrB70txiV5p-8Iv8BtVxlTtjKX"
- def setup(self):
- self.file_id = re.match(self.__pattern__, self.pyfile.url).group("ID")
- self.pyfile.url = "http://www.share-online.biz/dl/" + self.file_id
+ ERROR_INFO_PATTERN = r'<p class="b">Information:</p>\s*<div>\s*<strong>(.*?)</strong>'
- self.resumeDownload = self.premium
- self.multiDL = False
- self.check_data = None
+ @classmethod
+ def getInfo(cls, url="", html=""):
+ info = {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3 if url else 1, 'url': url}
+ if url:
+ info['pattern'] = re.match(cls.__pattern__, url).groupdict()
- def process(self, pyfile):
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
+ field = getURL("http://api.share-online.biz/linkcheck.php",
+ get={'md5': "1"},
+ post={'links': info['pattern']['ID']},
+ decode=True).split(";")
- if self.api_data:
- self.check_data = {"size": int(self.api_data['size']), "md5": self.api_data['md5']}
+ if field[1] == "OK":
+ info['fileid'] = field[0]
+ info['status'] = 2
+ info['name'] = field[2]
+ info['size'] = field[3] #: in bytes
+ info['md5'] = field[4].strip().lower().replace("\n\n", "") #: md5
+ elif field[1] in ("DELETED", "NOT FOUND"):
+ info['status'] = 1
- def loadAPIData(self):
- api_url_base = "http://api.share-online.biz/linkcheck.php?md5=1"
- api_param_file = {"links": self.file_id} #: api only supports old style links
- html = self.load(api_url_base, cookies=False, post=api_param_file, decode=True)
-
- fields = html.split(";")
- self.api_data = {"fileid": fields[0],
- "status": fields[1]}
- if not self.api_data['status'] == "OK":
- self.offline()
- else:
- self.api_data['filename'] = fields[2]
- self.api_data['size'] = fields[3] #: in bytes
- self.api_data['md5'] = fields[4].strip().lower().replace("\n\n", "") #: md5
+ return info
- def handleFree(self):
- self.loadAPIData()
- self.pyfile.name = self.api_data['filename']
- self.pyfile.size = int(self.api_data['size'])
-
- self.html = self.load(self.pyfile.url, cookies=True) #: refer, stuff
- self.setWait(3)
- self.wait()
-
- self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free": "1", "choice": "free"}, decode=True)
- self.checkErrors()
+ def setup(self):
+ self.resumeDownload = self.premium
+ self.multiDL = False
- m = re.search(r'var wait=(\d+);', self.html)
+ def handleCaptcha(self):
recaptcha = ReCaptcha(self)
+
for _i in xrange(5):
- challenge, response = recaptcha.challenge("6LdatrsSAAAAAHZrB70txiV5p-8Iv8BtVxlTtjKX")
+ challenge, response = recaptcha.challenge(self.RECAPTCHA_KEY)
+
+ m = re.search(r'var wait=(\d+);', self.html)
self.setWait(int(m.group(1)) if m else 30)
+
res = self.load("%s/free/captcha/%d" % (self.pyfile.url, int(time() * 1000)),
- post={'dl_free': '1',
+ post={'dl_free' : "1",
'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field': response})
-
+ 'recaptcha_response_field' : response})
if not res == '0':
self.correctCaptcha()
- break
+ return res
else:
self.invalidCaptcha()
else:
self.invalidCaptcha()
self.fail(_("No valid captcha solution received"))
+
+ def handleFree(self):
+ self.html = self.load(self.pyfile.url, cookies=True) #: refer, stuff
+
+ self.wait(3)
+
+ self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free": "1", "choice": "free"}, decode=True)
+
+ self.checkErrors()
+
+ res = self.handleCaptcha()
+
download_url = res.decode("base64")
+
if not download_url.startswith("http://"):
self.error(_("Wrong download url"))
self.wait()
+
self.download(download_url)
+
+
+ def checkFile(self):
# check download
check = self.checkDownload({
- "cookie": re.compile(r'<div id="dl_failure"'),
- "fail": re.compile(r"<title>Share-Online")
+ 'empty' : re.compile(r"^$"),
+ 'cookie': re.compile(r'<div id="dl_failure"'),
+ 'fail' : re.compile(r"<title>Share-Online")
})
- if check == "cookie":
+
+ if check == "empty":
+ self.fail(_("Empty file"))
+
+ elif check == "cookie":
self.invalidCaptcha()
- self.retry(5, 60, "Cookie failure")
+ self.retry(5, 60, _("Cookie failure"))
+
elif check == "fail":
self.invalidCaptcha()
- self.retry(5, 5 * 60, "Download failed")
- else:
- self.correctCaptcha()
+ self.retry(5, 5 * 60, _("Download failed"))
def handlePremium(self): #: should be working better loading (account) api internally
self.account.getAccountInfo(self.user, True)
+
html = self.load("http://api.share-online.biz/account.php",
{"username": self.user, "password": self.account.accounts[self.user]['password'],
- "act": "download", "lid": self.file_id})
+ "act": "download", "lid": self.info['fileid']})
self.api_data = dlinfo = {}
+
for line in html.splitlines():
key, value = line.split(": ")
dlinfo[key.lower()] = value
self.logDebug(dlinfo)
+
if not dlinfo['status'] == "online":
self.offline()
else:
@@ -162,6 +149,7 @@ class ShareonlineBiz(Hoster):
self.pyfile.size = int(dlinfo['size'])
dlLink = dlinfo['url']
+
if dlLink == "server_under_maintenance":
self.tempOffline()
else:
@@ -172,25 +160,32 @@ class ShareonlineBiz(Hoster):
def checkErrors(self):
m = re.search(r"/failure/(.*?)/1", self.req.lastEffectiveURL)
if m is None:
+ self.info.pop('error', None)
return
- err = m.group(1)
+ errmsg = m.group(1).lower()
+
try:
- self.logError(err, re.search(self.ERROR_INFO_PATTERN, self.html).group(1))
- except Exception:
- self.logError(err, "Unknown error occurred")
+ self.logError(errmsg, re.search(self.ERROR_INFO_PATTERN, self.html).group(1))
+ except:
+ self.logError("Unknown error occurred", errmsg)
- if err == "invalid":
+ if errmsg is "invalid":
self.fail(_("File not available"))
- elif err in ("freelimit", "size", "proxy"):
+
+ elif errmsg in ("freelimit", "size", "proxy"):
self.fail(_("Premium account needed"))
+
+ elif errmsg in ("expired", "server"):
+ self.retry(wait_time=600, reason=errmsg)
+
+ elif 'slot' in errmsg:
+ self.wantReconnect = True
+ self.retry(24, 3600, errmsg)
+
else:
- if err in 'server':
- self.setWait(600, False)
- elif err in 'expired':
- self.setWait(30, False)
- else:
- self.setWait(300, True)
+ self.wantReconnect = True
+ self.retry(wait_time=60, reason=errmsg)
+
- self.wait()
- self.retry(max_tries=25, reason=err)
+getInfo = create_getInfo(ShareonlineBiz)
diff --git a/pyload/plugins/hoster/SimplyPremiumCom.py b/pyload/plugins/hoster/SimplyPremiumCom.py
index bb431a5dd..b0ea9f90f 100644
--- a/pyload/plugins/hoster/SimplyPremiumCom.py
+++ b/pyload/plugins/hoster/SimplyPremiumCom.py
@@ -34,7 +34,7 @@ class SimplyPremiumCom(Hoster):
else:
self.logDebug("Old URL: %s" % pyfile.url)
for i in xrange(5):
- page = self.load('http://www.simply-premium.com/premium.php?info&link=' + pyfile.url)
+ page = self.load("http://www.simply-premium.com/premium.php", get={'info': "", 'link': pyfile.url})
self.logDebug("JSON data: " + page)
if page != '':
break
diff --git a/pyload/plugins/hoster/SimplydebridCom.py b/pyload/plugins/hoster/SimplydebridCom.py
index 5092be32a..c68c6bdd0 100644
--- a/pyload/plugins/hoster/SimplydebridCom.py
+++ b/pyload/plugins/hoster/SimplydebridCom.py
@@ -18,8 +18,9 @@ class SimplydebridCom(Hoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
- self.chunkLimit = 1
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = 1
def process(self, pyfile):
@@ -46,7 +47,7 @@ class SimplydebridCom(Hoster):
self.logDebug("New URL: %s" % new_url)
if not re.match(self.__pattern__, new_url):
- page = self.load('http://simply-debrid.com/api.php', get={'dl': new_url}) # +'&u='+self.user+'&p='+self.account.getAccountData(self.user)['password'])
+ page = self.load("http://simply-debrid.com/api.php", get={'dl': new_url}) # +'&u='+self.user+'&p='+self.account.getAccountData(self.user)['password'])
if 'tiger Link' in page or 'Invalid Link' in page or ('API' in page and 'ERROR' in page):
self.fail(_("Unable to unrestrict link"))
new_url = page
diff --git a/pyload/plugins/hoster/StreamCz.py b/pyload/plugins/hoster/StreamCz.py
index c9d00863e..fcd69ead5 100644
--- a/pyload/plugins/hoster/StreamCz.py
+++ b/pyload/plugins/hoster/StreamCz.py
@@ -39,8 +39,8 @@ class StreamCz(Hoster):
def setup(self):
- self.multiDL = True
self.resumeDownload = True
+ self.multiDL = True
def process(self, pyfile):
diff --git a/pyload/plugins/hoster/TurbobitNet.py b/pyload/plugins/hoster/TurbobitNet.py
index e0691942c..70844cadb 100644
--- a/pyload/plugins/hoster/TurbobitNet.py
+++ b/pyload/plugins/hoster/TurbobitNet.py
@@ -17,7 +17,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, t
class TurbobitNet(SimpleHoster):
__name__ = "TurbobitNet"
__type__ = "hoster"
- __version__ = "0.15"
+ __version__ = "0.16"
__pattern__ = r'http://(?:www\.)?turbobit\.net/(?:download/free/)?(?P<ID>\w+)'
@@ -42,7 +42,7 @@ class TurbobitNet(SimpleHoster):
def handleFree(self):
- self.url = "http://turbobit.net/download/free/%s" % self.info['ID']
+ self.url = "http://turbobit.net/download/free/%s" % self.info['pattern']['ID']
self.html = self.load(self.url, ref=True, decode=True)
rtUpdate = self.getRtUpdate()
@@ -130,7 +130,7 @@ class TurbobitNet(SimpleHoster):
for b in [1, 3]:
self.jscode = "var id = \'%s\';var b = %d;var inn = \'%s\';%sout" % (
- self.info['ID'], b, quote(fun), rtUpdate)
+ self.info['pattern']['ID'], b, quote(fun), rtUpdate)
try:
out = self.js.eval(self.jscode)
diff --git a/pyload/plugins/hoster/TwoSharedCom.py b/pyload/plugins/hoster/TwoSharedCom.py
index 24dd92895..ee50c8712 100644
--- a/pyload/plugins/hoster/TwoSharedCom.py
+++ b/pyload/plugins/hoster/TwoSharedCom.py
@@ -25,7 +25,8 @@ class TwoSharedCom(SimpleHoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
+ self.resumeDownload = True
+ self.multiDL = True
def handleFree(self):
diff --git a/pyload/plugins/hoster/UlozTo.py b/pyload/plugins/hoster/UlozTo.py
index 402a5e3e6..b331dd4f1 100644
--- a/pyload/plugins/hoster/UlozTo.py
+++ b/pyload/plugins/hoster/UlozTo.py
@@ -41,7 +41,7 @@ class UlozTo(SimpleHoster):
def setup(self):
- self.multiDL = self.premium
+ self.multiDL = self.premium
self.resumeDownload = True
diff --git a/pyload/plugins/hoster/UnrestrictLi.py b/pyload/plugins/hoster/UnrestrictLi.py
index 583a9f4a9..998c097fd 100644
--- a/pyload/plugins/hoster/UnrestrictLi.py
+++ b/pyload/plugins/hoster/UnrestrictLi.py
@@ -80,7 +80,7 @@ class UnrestrictLi(Hoster):
self.download(new_url, disposition=True)
if self.getConfig("history"):
- self.load("https://unrestrict.li/history/&delete=all")
+ self.load("https://unrestrict.li/history/", get={'delete': "all"})
self.logInfo(_("Download history deleted"))
diff --git a/pyload/plugins/hoster/UpleaCom.py b/pyload/plugins/hoster/UpleaCom.py
index 395f71bbf..ca639a954 100644
--- a/pyload/plugins/hoster/UpleaCom.py
+++ b/pyload/plugins/hoster/UpleaCom.py
@@ -46,7 +46,7 @@ class UpleaCom(XFSHoster):
m = re.search(self.WAIT_PATTERN, self.html)
if m:
- self.wait(m.group(1), True)
+ self.wait(int(m.group(1)), True)
self.retry()
m = re.search(self.LINK_PATTERN, self.html)
diff --git a/pyload/plugins/hoster/UploadedTo.py b/pyload/plugins/hoster/UploadedTo.py
index c39df66ea..ea55c3398 100644
--- a/pyload/plugins/hoster/UploadedTo.py
+++ b/pyload/plugins/hoster/UploadedTo.py
@@ -113,7 +113,7 @@ class UploadedTo(Hoster):
def setup(self):
- self.multiDL = self.resumeDownload = self.premium
+ self.multiDL = self.resumeDownload = self.premium
self.chunkLimit = 1 # critical problems with more chunks
self.fileID = getID(self.pyfile.url)
diff --git a/pyload/plugins/hoster/UploadheroCom.py b/pyload/plugins/hoster/UploadheroCom.py
index 97100b17d..857cf066d 100644
--- a/pyload/plugins/hoster/UploadheroCom.py
+++ b/pyload/plugins/hoster/UploadheroCom.py
@@ -75,5 +75,7 @@ class UploadheroCom(SimpleHoster):
self.wait(wait_time, True)
self.retry()
+ self.info.pop('error', None)
+
getInfo = create_getInfo(UploadheroCom)
diff --git a/pyload/plugins/hoster/UploadingCom.py b/pyload/plugins/hoster/UploadingCom.py
index 3c0bc7ff9..bc409e1cb 100644
--- a/pyload/plugins/hoster/UploadingCom.py
+++ b/pyload/plugins/hoster/UploadingCom.py
@@ -11,7 +11,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, t
class UploadingCom(SimpleHoster):
__name__ = "UploadingCom"
__type__ = "hoster"
- __version__ = "0.38"
+ __version__ = "0.39"
__pattern__ = r'http://(?:www\.)?uploading\.com/files/(?:get/)?(?P<ID>\w+)'
@@ -47,7 +47,7 @@ class UploadingCom(SimpleHoster):
def handlePremium(self):
postData = {'action': 'get_link',
- 'code': self.info['ID'],
+ 'code': self.info['pattern']['ID'],
'pass': 'undefined'}
self.html = self.load('http://uploading.com/files/get/?JsHttpRequest=%d-xml' % timestamp(), post=postData)
@@ -70,7 +70,7 @@ class UploadingCom(SimpleHoster):
self.req.http.c.setopt(HTTPHEADER, ["X-Requested-With: XMLHttpRequest"])
self.req.http.lastURL = self.pyfile.url
- res = json_loads(self.load(ajax_url, post={'action': 'second_page', 'code': self.info['ID']}))
+ res = json_loads(self.load(ajax_url, post={'action': 'second_page', 'code': self.info['pattern']['ID']}))
if 'answer' in res and 'wait_time' in res['answer']:
wait_time = int(res['answer']['wait_time'])
@@ -79,7 +79,7 @@ class UploadingCom(SimpleHoster):
else:
self.error(_("No AJAX/WAIT"))
- res = json_loads(self.load(ajax_url, post={'action': 'get_link', 'code': self.info['ID'], 'pass': 'false'}))
+ res = json_loads(self.load(ajax_url, post={'action': 'get_link', 'code': self.info['pattern']['ID'], 'pass': 'false'}))
if 'answer' in res and 'link' in res['answer']:
url = res['answer']['link']
diff --git a/pyload/plugins/hoster/UpstoreNet.py b/pyload/plugins/hoster/UpstoreNet.py
index 255526aa2..328b42b12 100644
--- a/pyload/plugins/hoster/UpstoreNet.py
+++ b/pyload/plugins/hoster/UpstoreNet.py
@@ -46,7 +46,7 @@ class UpstoreNet(SimpleHoster):
m = re.search(self.WAIT_PATTERN, self.html)
if m is None:
self.error(_("Wait pattern not found"))
- wait_time = m.group(1)
+ wait_time = int(m.group(1))
# then, do the waiting
self.wait(wait_time)
diff --git a/pyload/plugins/hoster/VeohCom.py b/pyload/plugins/hoster/VeohCom.py
index a1aa9896f..b2f8c69ed 100644
--- a/pyload/plugins/hoster/VeohCom.py
+++ b/pyload/plugins/hoster/VeohCom.py
@@ -27,8 +27,9 @@ class VeohCom(SimpleHoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
- self.chunkLimit = -1
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = -1
def handleFree(self):
diff --git a/pyload/plugins/hoster/VimeoCom.py b/pyload/plugins/hoster/VimeoCom.py
index a24eedee2..8b4d5bafb 100644
--- a/pyload/plugins/hoster/VimeoCom.py
+++ b/pyload/plugins/hoster/VimeoCom.py
@@ -29,8 +29,9 @@ class VimeoCom(SimpleHoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
- self.chunkLimit = -1
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = -1
def handleFree(self):
diff --git a/pyload/plugins/hoster/WebshareCz.py b/pyload/plugins/hoster/WebshareCz.py
index bb41fbd26..0a6063062 100644
--- a/pyload/plugins/hoster/WebshareCz.py
+++ b/pyload/plugins/hoster/WebshareCz.py
@@ -35,13 +35,14 @@ class WebshareCz(SimpleHoster):
def handleFree(self):
api_data = self.load('https://webshare.cz/api/file_link/', post={'ident': self.fid})
+
self.logDebug("API data: " + api_data)
+
m = re.search('<link>(.+)</link>', api_data)
if m is None:
self.error(_("Unable to detect direct link"))
- direct = m.group(1)
- self.logDebug("Direct link: " + direct)
- self.download(direct, disposition=True)
+
+ self.download(m.group(1), disposition=True)
def getFileInfo(self):
diff --git a/pyload/plugins/hoster/YoutubeCom.py b/pyload/plugins/hoster/YoutubeCom.py
index 7fdf848c1..7570ddc1d 100644
--- a/pyload/plugins/hoster/YoutubeCom.py
+++ b/pyload/plugins/hoster/YoutubeCom.py
@@ -16,11 +16,11 @@ def which(program):
Courtesy of http://stackoverflow.com/a/377028/675646"""
-
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
+
if fpath:
if is_exe(program):
return program
@@ -60,31 +60,32 @@ class YoutubeCom(Hoster):
invalidChars = u'\u2605:?><"|\\'
# name, width, height, quality ranking, 3D
- formats = {5: (".flv", 400, 240, 1, False),
- 6: (".flv", 640, 400, 4, False),
- 17: (".3gp", 176, 144, 0, False),
- 18: (".mp4", 480, 360, 2, False),
- 22: (".mp4", 1280, 720, 8, False),
- 43: (".webm", 640, 360, 3, False),
- 34: (".flv", 640, 360, 4, False),
- 35: (".flv", 854, 480, 6, False),
- 36: (".3gp", 400, 240, 1, False),
- 37: (".mp4", 1920, 1080, 9, False),
- 38: (".mp4", 4096, 3072, 10, False),
- 44: (".webm", 854, 480, 5, False),
- 45: (".webm", 1280, 720, 7, False),
- 46: (".webm", 1920, 1080, 9, False),
- 82: (".mp4", 640, 360, 3, True),
- 83: (".mp4", 400, 240, 1, True),
- 84: (".mp4", 1280, 720, 8, True),
- 85: (".mp4", 1920, 1080, 9, True),
- 100: (".webm", 640, 360, 3, True),
- 101: (".webm", 640, 360, 4, True),
- 102: (".webm", 1280, 720, 8, True)}
+ formats = {5 : (".flv" , 400 , 240 , 1 , False),
+ 6 : (".flv" , 640 , 400 , 4 , False),
+ 17 : (".3gp" , 176 , 144 , 0 , False),
+ 18 : (".mp4" , 480 , 360 , 2 , False),
+ 22 : (".mp4" , 1280, 720 , 8 , False),
+ 43 : (".webm", 640 , 360 , 3 , False),
+ 34 : (".flv" , 640 , 360 , 4 , False),
+ 35 : (".flv" , 854 , 480 , 6 , False),
+ 36 : (".3gp" , 400 , 240 , 1 , False),
+ 37 : (".mp4" , 1920, 1080, 9 , False),
+ 38 : (".mp4" , 4096, 3072, 10, False),
+ 44 : (".webm", 854 , 480 , 5 , False),
+ 45 : (".webm", 1280, 720 , 7 , False),
+ 46 : (".webm", 1920, 1080, 9 , False),
+ 82 : (".mp4" , 640 , 360 , 3 , True ),
+ 83 : (".mp4" , 400 , 240 , 1 , True ),
+ 84 : (".mp4" , 1280, 720 , 8 , True ),
+ 85 : (".mp4" , 1920, 1080, 9 , True ),
+ 100: (".webm", 640 , 360 , 3 , True ),
+ 101: (".webm", 640 , 360 , 4 , True ),
+ 102: (".webm", 1280, 720 , 8 , True )}
def setup(self):
- self.resumeDownload = self.multiDL = True
+ self.resumeDownload = True
+ self.multiDL = True
def process(self, pyfile):
diff --git a/pyload/plugins/hoster/ZeveraCom.py b/pyload/plugins/hoster/ZeveraCom.py
index c0c10215d..d298d42f0 100644
--- a/pyload/plugins/hoster/ZeveraCom.py
+++ b/pyload/plugins/hoster/ZeveraCom.py
@@ -16,8 +16,9 @@ class ZeveraCom(Hoster):
def setup(self):
- self.resumeDownload = self.multiDL = True
- self.chunkLimit = 1
+ self.resumeDownload = True
+ self.multiDL = True
+ self.chunkLimit = 1
def process(self, pyfile):
diff --git a/pyload/plugins/hoster/ZippyshareCom.py b/pyload/plugins/hoster/ZippyshareCom.py
index 53b93d928..5850a6a6a 100644
--- a/pyload/plugins/hoster/ZippyshareCom.py
+++ b/pyload/plugins/hoster/ZippyshareCom.py
@@ -2,8 +2,7 @@
import re
-from os import path
-from urllib import unquote
+from os.path import join
from urlparse import urljoin
from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
@@ -12,7 +11,7 @@ from pyload.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class ZippyshareCom(SimpleHoster):
__name__ = "ZippyshareCom"
__type__ = "hoster"
- __version__ = "0.60"
+ __version__ = "0.62"
__pattern__ = r'(?P<HOST>http://www\d{0,2}\.zippyshare\.com)/v(?:/|iew\.jsp.*key=)(?P<KEY>\d+)'
@@ -40,12 +39,6 @@ class ZippyshareCom(SimpleHoster):
self.download(url)
- def getFileInfo(self):
- info = super(ZippyshareCom, self).getFileInfo()
- self.pyfile.name = info['name'] = unquote(info['name'])
- return info
-
-
def get_checksum(self):
try:
m = re.search(r'\+[ ]*\((\d+)[ ]*\%[ ]*(\d+)[ ]*\+[ ]*(\d+)[ ]*\%[ ]*(\d+)\)[ ]*\+', self.html)
@@ -64,8 +57,8 @@ class ZippyshareCom(SimpleHoster):
def get_link(self):
checksum = self.get_checksum()
- p_url = path.join("d", self.info['KEY'], str(checksum), self.pyfile.name)
- dl_link = urljoin(self.info['HOST'], p_url)
+ p_url = join("d", self.info['pattern']['KEY'], str(checksum), self.pyfile.name)
+ dl_link = urljoin(self.info['pattern']['HOST'], p_url)
return dl_link
diff --git a/pyload/plugins/internal/BasePlugin.py b/pyload/plugins/internal/BasePlugin.py
index f4abc1a15..a9d81d079 100644
--- a/pyload/plugins/internal/BasePlugin.py
+++ b/pyload/plugins/internal/BasePlugin.py
@@ -3,7 +3,7 @@
import re
from urllib import unquote
-from urlparse import urlparse
+from urlparse import urljoin, urlparse
from pyload.network.HTTPRequest import BadHeader
from pyload.plugins.internal.SimpleHoster import create_getInfo
@@ -13,7 +13,7 @@ from pyload.plugins.internal.Hoster import Hoster
class BasePlugin(Hoster):
__name__ = "BasePlugin"
__type__ = "hoster"
- __version__ = "0.23"
+ __version__ = "0.25"
__pattern__ = r'^unmatchable$'
@@ -25,7 +25,7 @@ class BasePlugin(Hoster):
@classmethod
def getInfo(cls, url="", html=""): #@TODO: Move to hoster class in 0.4.10
- return {'name': urlparse(url).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3, 'url': url or ""}
+ return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3 if url else 1, 'url': unquote(url) or ""}
def setup(self):
@@ -38,69 +38,69 @@ class BasePlugin(Hoster):
pyfile.name = self.getInfo(pyfile.url)['name']
- if pyfile.url.startswith("http"):
- for _i in xrange(2):
- try:
- self.downloadFile(pyfile)
+ if not pyfile.url.startswith("http"):
+ self.fail(_("No plugin matched"))
+
+ for _i in xrange(5):
+ try:
+ self.downloadFile(pyfile)
- except BadHeader, e:
- if e.code is 404:
- self.offline()
+ except BadHeader, e:
+ if e.code is 404:
+ self.offline()
- elif e.code in (401, 403):
- self.logDebug("Auth required")
+ elif e.code in (401, 403):
+ self.logDebug("Auth required", "Received HTTP status code: %d" % e.code)
- account = self.core.accountManager.getAccountPlugin('Http')
- servers = [x['login'] for x in account.getAllAccounts()]
- server = urlparse(pyfile.url).netloc
+ account = self.core.accountManager.getAccountPlugin('Http')
+ servers = [x['login'] for x in account.getAllAccounts()]
+ server = urlparse(pyfile.url).netloc
- if server in servers:
- self.logDebug("Logging on to %s" % server)
- self.req.addAuth(account.accounts[server]['password'])
- else:
- for pwd in pyfile.package().password.splitlines():
- if ":" in pwd:
- self.req.addAuth(pwd.strip())
- break
- else:
- self.fail(_("Authorization required (username:password)"))
+ if server in servers:
+ self.logDebug("Logging on to %s" % server)
+ self.req.addAuth(account.accounts[server]['password'])
else:
- self.fail(e)
+ for pwd in self.getPassword().splitlines():
+ if ":" in pwd:
+ self.req.addAuth(pwd.strip())
+ break
+ else:
+ self.fail(_("Authorization required"))
else:
- break
+ self.fail(e)
else:
- self.fail(_("No file downloaded")) #@TODO: Move to hoster class (check if self.lastDownload) in 0.4.10
+ break
else:
- self.fail(_("No plugin matched"))
+ self.fail(_("No file downloaded")) #@TODO: Move to hoster class in 0.4.10
- # if self.checkDownload({'empty': re.compile(r"^$")}) is "empty":
- # self.fail(_("Empty file"))
+ if self.checkDownload({'empty': re.compile(r"^$")}) is "empty": #@TODO: Move to hoster in 0.4.10
+ self.fail(_("Empty file"))
def downloadFile(self, pyfile):
url = pyfile.url
- for _i in xrange(5):
- header = self.load(url, just_header=True)
-
- # self.load does not raise a BadHeader on 404 responses, do it here
- if 'code' in header and header['code'] == 404:
- raise BadHeader(404)
+ for i in xrange(1, 7): #@TODO: retrieve the pycurl.MAXREDIRS value set by req
+ header = self.load(url, ref=True, cookies=True, just_header=True, decode=True)
- if 'location' in header:
- self.logDebug("Location: " + header['location'])
-
- base = re.match(r'https?://[^/]+', url).group(0)
+ if 'location' not in header or not header['location']:
+ if 'code' in header and header['code'] not in (200, 201, 203, 206):
+ self.logDebug("Received HTTP status code: %d" % header['code'])
+ self.fail(_("File not found"))
+ else:
+ break
- if header['location'].startswith("http"):
- url = header['location']
+ location = header['location']
- elif header['location'].startswith("/"):
- url = base + unquote(header['location'])
+ self.logDebug("Redirect #%d to: %s" % (i, location))
- else:
- url = '%s/%s' % (base, unquote(header['location']))
+ if urlparse(location).scheme:
+ url = location
else:
- break
+ p = urlparse(url)
+ base = "%s://%s" % (p.scheme, p.netloc)
+ url = urljoin(base, location)
+ else:
+ self.fail(_("Too many redirects"))
- self.download(url, disposition=True)
+ self.download(unquote(url), disposition=True)
diff --git a/pyload/plugins/internal/DeadCrypter.py b/pyload/plugins/internal/DeadCrypter.py
index 3510e1466..81b68e00a 100644
--- a/pyload/plugins/internal/DeadCrypter.py
+++ b/pyload/plugins/internal/DeadCrypter.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
+from urllib import unquote
from urlparse import urlparse
from pyload.plugins.internal.Crypter import Crypter as _Crypter
@@ -9,7 +10,7 @@ from pyload.plugins.internal.SimpleCrypter import create_getInfo
class DeadCrypter(_Crypter):
__name__ = "DeadCrypter"
__type__ = "crypter"
- __version__ = "0.03"
+ __version__ = "0.04"
__pattern__ = r'^unmatchable$'
@@ -20,7 +21,7 @@ class DeadCrypter(_Crypter):
@classmethod
def getInfo(cls, url="", html=""):
- return {'name': urlparse(url).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 1, 'url': url or ""}
+ return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 1, 'url': url}
def setup(self):
diff --git a/pyload/plugins/internal/DeadHoster.py b/pyload/plugins/internal/DeadHoster.py
index a7e5093d3..f066883c6 100644
--- a/pyload/plugins/internal/DeadHoster.py
+++ b/pyload/plugins/internal/DeadHoster.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
+from urllib import unquote
from urlparse import urlparse
from pyload.plugins.internal.Hoster import Hoster as _Hoster
@@ -9,7 +10,7 @@ from pyload.plugins.internal.SimpleHoster import create_getInfo
class DeadHoster(_Hoster):
__name__ = "DeadHoster"
__type__ = "hoster"
- __version__ = "0.13"
+ __version__ = "0.14"
__pattern__ = r'^unmatchable$'
@@ -20,7 +21,7 @@ class DeadHoster(_Hoster):
@classmethod
def getInfo(cls, url="", html=""):
- return {'name': urlparse(url).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 1, 'url': url or ""}
+ return {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 1, 'url': url}
def setup(self):
diff --git a/pyload/plugins/internal/MultiHoster.py b/pyload/plugins/internal/MultiHoster.py
index 4eb4a6f31..4d466d350 100644
--- a/pyload/plugins/internal/MultiHoster.py
+++ b/pyload/plugins/internal/MultiHoster.py
@@ -16,18 +16,21 @@ class MultiHoster(Addon):
__authors__ = [("pyLoad Team", "admin@pyload.org")]
- interval = 24 * 60 * 60 #: reload hosters daily
-
- HOSTER_REPLACEMENTS = [("2shared.com", "twoshared.com"), ("4shared.com", "fourshared.com"), ("cloudnator.com", "shragle.com"),
- ("ifile.it", "filecloud.io"), ("easy-share.com", "crocko.com"), ("freakshare.net", "freakshare.com"),
- ("hellshare.com", "hellshare.cz"), ("share-rapid.cz", "sharerapid.com"), ("sharerapid.cz", "sharerapid.com"),
- ("ul.to", "uploaded.to"), ("uploaded.net", "uploaded.to"), ("1fichier.com", "onefichier.com")]
+ interval = 12 * 60 * 60 #: reload hosters every 12h
+
+ HOSTER_REPLACEMENTS = [("1fichier.com", "onefichier.com"), ("2shared.com", "twoshared.com"),
+ ("4shared.com", "fourshared.com"), ("cloudnator.com", "shragle.com"),
+ ("easy-share.com", "crocko.com"), ("freakshare.net", "freakshare.com"),
+ ("hellshare.com", "hellshare.cz"), ("ifile.it", "filecloud.io"),
+ ("putlocker.com", "firedrive.com"), ("share-rapid.cz", "multishare.cz"),
+ ("sharerapid.cz", "multishare.cz"), ("ul.to", "uploaded.to"),
+ ("uploaded.net", "uploaded.to")]
HOSTER_EXCLUDED = []
def setup(self):
- self.hosters = []
- self.supported = []
+ self.hosters = []
+ self.supported = []
self.new_supported = []
@@ -110,8 +113,10 @@ class MultiHoster(Addon):
"""reload hoster list periodically"""
self.logInfo(_("Reloading supported hoster list"))
- old_supported = self.supported
- self.supported, self.new_supported, self.hosters = [], [], []
+ old_supported = self.supported
+ self.supported = []
+ self.new_supported = []
+ self.hosters = []
self.overridePlugins()
@@ -123,11 +128,8 @@ class MultiHoster(Addon):
def overridePlugins(self):
- pluginMap = {}
- for name in self.core.pluginManager.hosterPlugins.keys():
- pluginMap[name.lower()] = name
-
- accountList = [name.lower() for name, data in self.core.accountManager.accounts.iteritems() if data]
+ pluginMap = dict((name.lower(), name) for name in self.core.pluginManager.hosterPlugins.keys())
+ accountList = [name.lower() for name, data in self.core.accountManager.accounts.iteritems() if data]
excludedList = []
for hoster in self.getHosterCached():
@@ -146,14 +148,14 @@ class MultiHoster(Addon):
return
module = self.core.pluginManager.getPlugin(self.__type__, self.__name__)
- klass = getattr(module, self.__name__)
+ klass = getattr(module, self.__name__)
# inject plugin plugin
self.logDebug("Overwritten Hosters", ", ".join(sorted(self.supported)))
for hoster in self.supported:
dict = self.core.pluginManager.hosterPlugins[hoster]
dict['new_module'] = module
- dict['new_name'] = self.__name__
+ dict['new_name'] = self.__name__
if excludedList:
self.logInfo(_("The following hosters were not overwritten - account exists"), ", ".join(sorted(excludedList)))
@@ -162,7 +164,7 @@ class MultiHoster(Addon):
self.logDebug("New Hosters", ", ".join(sorted(self.new_supported)))
# create new regexp
- regexp = r'.*(%s).*' % "|".join([x.replace(".", "\\.") for x in self.new_supported])
+ regexp = r'.*(%s).*' % "|".join([x.replace(".", "\.") for x in self.new_supported])
if hasattr(klass, "__pattern__") and isinstance(klass.__pattern__, basestring) and '://' in klass.__pattern__:
regexp = r'%s|%s' % (klass.__pattern__, regexp)
@@ -170,7 +172,7 @@ class MultiHoster(Addon):
dict = self.core.pluginManager.hosterPlugins[self.__name__]
dict['pattern'] = regexp
- dict['re'] = re.compile(regexp)
+ dict['re'] = re.compile(regexp)
def unloadHoster(self, hoster):
@@ -190,9 +192,9 @@ class MultiHoster(Addon):
# reset pattern
klass = getattr(self.core.pluginManager.getPlugin(self.__type__, self.__name__), self.__name__)
- dict = self.core.pluginManager.hosterPlugins[self.__name__]
+ dict = self.core.pluginManager.hosterPlugins[self.__name__]
dict['pattern'] = getattr(klass, "__pattern__", r'^unmatchable$')
- dict['re'] = re.compile(dict['pattern'])
+ dict['re'] = re.compile(dict['pattern'])
def downloadFailed(self, pyfile):
diff --git a/pyload/plugins/internal/SimpleHoster.py b/pyload/plugins/internal/SimpleHoster.py
index 922361b30..ce1ddd2b6 100644
--- a/pyload/plugins/internal/SimpleHoster.py
+++ b/pyload/plugins/internal/SimpleHoster.py
@@ -3,9 +3,8 @@
import re
from time import time
-from urlparse import urlparse
-
-from pycurl import FOLLOWLOCATION
+from urllib import unquote
+from urlparse import urljoin, urlparse
from pyload.datatype.PyFile import statusMap as _statusMap
from pyload.network.CookieJar import CookieJar
@@ -91,22 +90,37 @@ def timestamp():
#@TODO: Move to hoster class in 0.4.10
-def _getDirectLink(self, url):
+def _isDirectLink(self, url, resumable=True):
header = self.load(url, ref=True, just_header=True, decode=True)
if not 'location' in header or not header['location']:
return ""
- if header['code'] != 302 or 'content-type' not in header or header['content-type'] != "text/plain":
- return ""
+ location = header['location']
+
+ resumable = False #@NOTE: Testing...
+
+ if resumable: #: sometimes http code may be wrong...
+ if 'location' in self.load(location, ref=True, cookies=True, just_header=True, decode=True):
+ return ""
+ else:
+ if not 'code' in header or header['code'] != 302:
+ return ""
+
+ if urlparse(location).scheme:
+ link = location
+ else:
+ p = urlparse(url)
+ base = "%s://%s" % (p.scheme, p.netloc)
+ link = urljoin(base, location)
- return header['location']
+ return link
class SimpleHoster(Hoster):
__name__ = "SimpleHoster"
__type__ = "hoster"
- __version__ = "0.62"
+ __version__ = "0.71"
__pattern__ = r'^unmatchable$'
@@ -128,6 +142,9 @@ class SimpleHoster(Hoster):
SIZE_PATTERN: (optional) Size that will be checked for the file
example: SIZE_PATTERN = r'(?P<S>file_size) (?P<U>size_unit)'
+ HASHSUM_PATTERN: (optional) Hash code and type of the file
+ example: HASHSUM_PATTERN = r'(?P<H>hash_code) (?P<T>MD5)'
+
OFFLINE_PATTERN: (optional) Check if the file is yet available online
example: OFFLINE_PATTERN = r'File (deleted|not found)'
@@ -163,9 +180,9 @@ class SimpleHoster(Hoster):
TEXT_ENCODING = False #: Set to True or encoding name if encoding value in http header is not correct
COOKIES = True #: or False or list of tuples [(domain, name, value)]
FORCE_CHECK_TRAFFIC = False #: Set to True to force checking traffic left for premium account
- CHECK_DIRECT_LINK = None #: when None self-set to True if self.account else False
- MULTI_HOSTER = False #: Set to True to leech other hoster link
- CONTENT_DISPOSITION = False #: Set to True to replace file name with content-disposition value in http header
+ CHECK_DIRECT_LINK = None #: Set to True to check for direct link, set to None to do it only if self.account is True
+ MULTI_HOSTER = False #: Set to True to leech other hoster link (according its multihoster hook if available)
+ CONTENT_DISPOSITION = False #: Set to True to replace file name with content-disposition value from http header
@classmethod
@@ -177,14 +194,32 @@ class SimpleHoster(Hoster):
@classmethod
def getInfo(cls, url="", html=""):
- info = {'name': urlparse(url).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3, 'url': url or ""}
+ info = {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3, 'url': url}
if not html:
- if url:
- html = getURL(url, cookies=cls.COOKIES, decode=not cls.TEXT_ENCODING)
- if isinstance(cls.TEXT_ENCODING, basestring):
- html = unicode(html, cls.TEXT_ENCODING)
- else:
+ try:
+ if not url:
+ info['error'] = "missing url"
+ info['status'] = 1
+ raise
+
+ try:
+ html = getURL(url, cookies=cls.COOKIES, decode=not cls.TEXT_ENCODING)
+
+ if isinstance(cls.TEXT_ENCODING, basestring):
+ html = unicode(html, cls.TEXT_ENCODING)
+
+ except BadHeader, e:
+ info['error'] = "%d: %s" % (e.code, e.content)
+
+ if e.code is 404:
+ info['status'] = 1
+ raise
+
+ if e.code is 503:
+ info['status'] = 6
+ raise
+ except:
return info
online = False
@@ -197,33 +232,43 @@ class SimpleHoster(Hoster):
else:
try:
- info.update(re.match(cls.__pattern__, url).groupdict())
+ info['pattern'] = re.match(cls.__pattern__, url).groupdict() #: pattern groups will be saved here, please save api stuff to info['api']
except Exception:
pass
- for pattern in ("INFO_PATTERN", "NAME_PATTERN", "SIZE_PATTERN"):
+ for pattern in ("INFO_PATTERN", "NAME_PATTERN", "SIZE_PATTERN", "HASHSUM_PATTERN"):
try:
attr = getattr(cls, pattern)
- info.update(re.search(attr, html).groupdict())
+ dict = re.search(attr, html).groupdict()
+
+ if all(True for k in dict if k not in info['pattern']):
+ info['pattern'].update(dict)
+
except AttributeError:
continue
+
else:
online = True
if online:
info['status'] = 2
- if 'N' in info:
- info['name'] = replace_patterns(info['N'].strip(), cls.NAME_REPLACEMENTS)
+ if 'N' in info['pattern']:
+ info['name'] = replace_patterns(unquote(info['pattern']['N'].strip()), cls.NAME_REPLACEMENTS)
- if 'S' in info:
- size = replace_patterns(info['S'] + info['U'] if 'U' in info else info['S'], cls.SIZE_REPLACEMENTS)
+ if 'S' in info['pattern']:
+ size = replace_patterns(info['pattern']['S'] + info['pattern']['U'] if 'U' in info else info['pattern']['S'],
+ cls.SIZE_REPLACEMENTS)
info['size'] = parseFileSize(size)
elif isinstance(info['size'], basestring):
unit = info['units'] if 'units' in info else None
info['size'] = parseFileSize(info['size'], unit)
+ if 'H' in info['pattern']:
+ hashtype = info['pattern']['T'] if 'T' in info['pattern'] else "hash"
+ info[hashtype] = info['pattern']['H']
+
return info
@@ -243,8 +288,8 @@ class SimpleHoster(Hoster):
set_cookies(self.req.cj, self.COOKIES)
if (self.MULTI_HOSTER
- and self.__pattern__ != self.core.pluginManager.hosterPlugins[self.__name__]['pattern']
- and re.match(self.__pattern__, self.pyfile.url) is None):
+ and (self.__pattern__ != self.core.pluginManager.hosterPlugins[self.__name__]['pattern']
+ or re.match(self.__pattern__, self.pyfile.url) is None)):
self.logInfo("Multi hoster detected")
@@ -288,8 +333,7 @@ class SimpleHoster(Hoster):
premium_only = 'error' in self.info and self.info['error'] == "premium-only"
- info = self.getInfo(pyfile.url, self.html)
- self._updateInfo(info)
+ self._updateInfo(self.getInfo(pyfile.url, self.html))
self.checkNameSize()
@@ -308,18 +352,28 @@ class SimpleHoster(Hoster):
self.logDebug("Handled as free download")
self.handleFree()
- if self.link:
- self.download(self.link, disposition=self.CONTENT_DISPOSITION)
+ self.downloadLink(self.link)
+ self.checkFile()
+
+
+ def downloadLink(self, link):
+ if not link:
+ return
+
+ self.download(link, disposition=self.CONTENT_DISPOSITION)
+
+
+ def checkFile(self):
+ if self.checkDownload({'empty': re.compile(r"^$")}) is "empty": #@TODO: Move to hoster in 0.4.10
+ self.fail(_("Empty file"))
def checkErrors(self):
- if hasattr(self, 'WAIT_PATTERN'):
- m = re.search(self.WAIT_PATTERN, self.html)
+ if hasattr(self, 'ERROR_PATTERN'):
+ m = re.search(self.ERROR_PATTERN, self.html)
if m:
- wait_time = sum([int(v) * {"hr": 3600, "hour": 3600, "min": 60, "sec": 1}[u.lower()] for v, u in
- re.findall(r'(\d+)\s*(hr|hour|min|sec)', m, re.I)])
- self.wait(wait_time, False)
- return
+ errmsg = self.info['error'] = m.group(1)
+ self.error(errmsg)
if hasattr(self, 'PREMIUM_ONLY_PATTERN'):
m = re.search(self.PREMIUM_ONLY_PATTERN, self.html)
@@ -327,11 +381,13 @@ class SimpleHoster(Hoster):
self.info['error'] = "premium-only"
return
- if hasattr(self, 'ERROR_PATTERN'):
- m = re.search(self.ERROR_PATTERN, self.html)
+ if hasattr(self, 'WAIT_PATTERN'):
+ m = re.search(self.WAIT_PATTERN, self.html)
if m:
- e = self.info['error'] = m.group(1)
- self.error(e)
+ wait_time = sum([int(v) * {"hr": 3600, "hour": 3600, "min": 60, "sec": 1}[u.lower()] for v, u in
+ re.findall(r'(\d+)\s*(hr|hour|min|sec)', m, re.I)])
+ self.wait(wait_time, False)
+ return
self.info.pop('error', None)
@@ -381,7 +437,9 @@ class SimpleHoster(Hoster):
#: Deprecated
def getFileInfo(self):
- return self.checkInfo()
+ self.info = {}
+ self.checkInfo()
+ return self.info
def _updateInfo(self, info):
@@ -391,7 +449,7 @@ class SimpleHoster(Hoster):
def handleDirect(self):
- link = _getDirectLink(self, self.pyfile.url)
+ link = _isDirectLink(self, self.pyfile.url, self.resumeDownload)
if link:
self.logInfo(_("Direct download link detected"))
@@ -420,7 +478,7 @@ class SimpleHoster(Hoster):
self.link = m.group(1)
except Exception, e:
- self.fail(str(e))
+ self.fail(e)
def handlePremium(self):
@@ -435,7 +493,7 @@ class SimpleHoster(Hoster):
self.link = m.group(1)
except Exception, e:
- self.fail(str(e))
+ self.fail(e)
def longWait(self, wait_time=None, max_tries=3):
diff --git a/pyload/plugins/internal/UnRar.py b/pyload/plugins/internal/UnRar.py
index ebfe53829..90216222b 100644
--- a/pyload/plugins/internal/UnRar.py
+++ b/pyload/plugins/internal/UnRar.py
@@ -22,7 +22,7 @@ def renice(pid, value):
class UnRar(AbtractExtractor):
__name__ = "UnRar"
- __version__ = "0.18"
+ __version__ = "0.19"
__description__ = """Rar extractor plugin"""
__license__ = "GPLv3"
@@ -32,12 +32,12 @@ class UnRar(AbtractExtractor):
CMD = "unrar"
# there are some more uncovered rar formats
- re_version = re.compile(r"(UNRAR 5[\d.]+(.*?)freeware)")
- re_splitfile = re.compile(r"(.*)\.part(\d+)\.rar$", re.I)
- re_partfiles = re.compile(r".*\.(rar|r\d+)", re.I)
- re_filelist = re.compile(r"(.+)\s+(\d+)\s+(\d+)\s+")
- re_filelist5 = re.compile(r"(.+)\s+(\d+)\s+\d\d-\d\d-\d\d\s+\d\d:\d\d\s+(.+)")
- re_wrongpwd = re.compile("(Corrupt file or wrong password|password incorrect)", re.I)
+ re_version = re.compile(r'UNRAR ([\w .]+?) freeware')
+ re_splitfile = re.compile(r'(.*)\.part(\d+)\.rar$', re.I)
+ re_partfiles = re.compile(r'.*\.(rar|r\d+)', re.I)
+ re_filelist = re.compile(r'(.+)\s+(\d+)\s+(\d+)\s+')
+ re_filelist5 = re.compile(r'(.+)\s+(\d+)\s+\d\d-\d\d-\d\d\s+\d\d:\d\d\s+(.+)')
+ re_wrongpwd = re.compile(r'(Corrupt file or wrong password|password incorrect)', re.I)
@staticmethod
diff --git a/pyload/plugins/internal/XFSAccount.py b/pyload/plugins/internal/XFSAccount.py
index 168c4f903..a330d2ff4 100644
--- a/pyload/plugins/internal/XFSAccount.py
+++ b/pyload/plugins/internal/XFSAccount.py
@@ -12,7 +12,7 @@ from pyload.plugins.internal.SimpleHoster import parseHtmlForm, set_cookies
class XFSAccount(Account):
__name__ = "XFSAccount"
__type__ = "account"
- __version__ = "0.30"
+ __version__ = "0.32"
__description__ = """XFileSharing account plugin"""
__license__ = "GPLv3"
@@ -27,15 +27,15 @@ class XFSAccount(Account):
PREMIUM_PATTERN = r'\(Premium only\)'
- VALID_UNTIL_PATTERN = r'>Premium.[Aa]ccount expire:.*?(\d{1,2} [\w^_]+ \d{4})'
+ VALID_UNTIL_PATTERN = r'Premium.[Aa]ccount expire:.*?(\d{1,2} [\w^_]+ \d{4})'
- TRAFFIC_LEFT_PATTERN = r'>Traffic available today:.*?<b>\s*(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
+ TRAFFIC_LEFT_PATTERN = r'Traffic available today:.*?<b>\s*(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
TRAFFIC_LEFT_UNIT = "MB" #: used only if no group <U> was found
LEECH_TRAFFIC_PATTERN = r'Leech Traffic left:<b>.*?(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
LEECH_TRAFFIC_UNIT = "MB" #: used only if no group <U> was found
- LOGIN_FAIL_PATTERN = r'>(Incorrect Login or Password|Error<)'
+ LOGIN_FAIL_PATTERN = r'>\s*(Incorrect Login or Password|Error<)'
def init(self):
@@ -104,12 +104,12 @@ class XFSAccount(Account):
else:
self.logDebug("TRAFFIC_LEFT_PATTERN not found")
- m = re.finditer(self.LEECH_TRAFFIC_PATTERN, html)
- if m:
+ leech = [m.groupdict() for m in re.finditer(self.LEECH_TRAFFIC_PATTERN, html)]
+ if leech:
leechtraffic = 0
try:
- for leech in m:
- size = leech['S']
+ for traffic in leech:
+ size = traffic['S']
if "nlimited" in size:
leechtraffic = -1
@@ -117,8 +117,8 @@ class XFSAccount(Account):
validuntil = -1
break
else:
- if 'U' in leech:
- unit = leech['U']
+ if 'U' in traffic:
+ unit = traffic['U']
elif isinstance(self.LEECH_TRAFFIC_UNIT, basestring):
unit = self.LEECH_TRAFFIC_UNIT
else:
diff --git a/pyload/plugins/internal/XFSHoster.py b/pyload/plugins/internal/XFSHoster.py
index 061012059..a4e7339c5 100644
--- a/pyload/plugins/internal/XFSHoster.py
+++ b/pyload/plugins/internal/XFSHoster.py
@@ -16,7 +16,7 @@ from pyload.utils import html_unescape
class XFSHoster(SimpleHoster):
__name__ = "XFSHoster"
__type__ = "hoster"
- __version__ = "0.26"
+ __version__ = "0.27"
__pattern__ = r'^unmatchable$'
@@ -35,7 +35,6 @@ class XFSHoster(SimpleHoster):
CHECK_DIRECT_LINK = None
MULTI_HOSTER = True #@NOTE: Should be default to False for safe, but I'm lazy...
- INFO_PATTERN = r'<tr><td align=right><b>Filename:</b></td><td nowrap>(?P<N>[^<]+)</td></tr>\s*.*?<small>\((?P<S>[^<]+)\)</small>'
NAME_PATTERN = r'(>Filename:</b></td><td nowrap>|name="fname" value="|<span class="name">)(?P<N>.+?)(\s*<|")'
SIZE_PATTERN = r'(>Size:</b></td><td>|>File:.*>|<span class="size">)(?P<S>[\d.,]+)\s*(?P<U>[\w^_]+)'
@@ -49,10 +48,10 @@ class XFSHoster(SimpleHoster):
LEECH_LINK_PATTERN = r'<h2>Download Link</h2>\s*<textarea[^>]*>([^<]+)'
LINK_PATTERN = None #: final download url pattern
- CAPTCHA_PATTERN = r'(https?://[^"\']+?/captchas?/[^"\']+)'
- CAPTCHA_DIV_PATTERN = r'>Enter code.*?<div.*?>(.+?)</div>'
- RECAPTCHA_PATTERN = None
- SOLVEMEDIA_PATTERN = None
+ CAPTCHA_PATTERN = r'(https?://[^"\']+?/captchas?/[^"\']+)'
+ CAPTCHA_BLOCK_PATTERN = r'>Enter code.*?<div.*?>(.+?)</div>'
+ RECAPTCHA_PATTERN = None
+ SOLVEMEDIA_PATTERN = None
FORM_PATTERN = None
FORM_INPUTS_MAP = None #: dict passed as input_names to parseHtmlForm
@@ -234,10 +233,10 @@ class XFSHoster(SimpleHoster):
retries = 3
else:
delay = 1 * 60 * 60
- retries = 25
+ retries = 24
- self.wait(delay, True)
- self.retry(retries, reason=_("Download limit exceeded"))
+ self.wantReconnect = True
+ self.retry(retries, delay, _("Download limit exceeded"))
elif 'countdown' in self.errmsg or 'Expired' in self.errmsg:
self.retry(reason=_("Link expired"))
@@ -249,6 +248,7 @@ class XFSHoster(SimpleHoster):
self.fail(_("File too large for free download"))
else:
+ self.wantReconnect = True
self.retry(wait_time=60, reason=self.errmsg)
if self.errmsg:
@@ -256,8 +256,6 @@ class XFSHoster(SimpleHoster):
else:
self.info.pop('error', None)
- return self.errmsg
-
def getPostParameters(self):
if self.FORM_PATTERN or self.FORM_INPUTS_MAP:
@@ -311,7 +309,7 @@ class XFSHoster(SimpleHoster):
inputs['code'] = self.decryptCaptcha(captcha_url)
return 1
- m = re.search(self.CAPTCHA_DIV_PATTERN, self.html, re.S)
+ m = re.search(self.CAPTCHA_BLOCK_PATTERN, self.html, re.S)
if m:
captcha_div = m.group(1)
numerals = re.findall(r'<span.*?padding-left\s*:\s*(\d+).*?>(\d)</span>', html_unescape(captcha_div))