summaryrefslogtreecommitdiffstats
path: root/module/plugins/hoster/FileserveCom.py
diff options
context:
space:
mode:
authorGravatar lazlev <lazlev@yopmail.com> 2015-08-09 00:50:54 +0200
committerGravatar lazlev <lazlev@yopmail.com> 2015-08-09 00:50:54 +0200
commitb0ef3f1673e1930916604bb1264ca3a38414bc8d (patch)
treec97936e4d2a4cd6eb1072c65c8a08a7d18816b18 /module/plugins/hoster/FileserveCom.py
parent[XFileSharingPro][XFileSharingProFolder] Added default __pattern__ (diff)
parentFix https://github.com/pyload/pyload/issues/1707 (diff)
downloadpyload-b0ef3f1673e1930916604bb1264ca3a38414bc8d.tar.xz
Merge pull request #1 from pyload/stable
sync with stable
Diffstat (limited to 'module/plugins/hoster/FileserveCom.py')
-rw-r--r--module/plugins/hoster/FileserveCom.py144
1 files changed, 71 insertions, 73 deletions
diff --git a/module/plugins/hoster/FileserveCom.py b/module/plugins/hoster/FileserveCom.py
index f8cf652b9..a74589cff 100644
--- a/module/plugins/hoster/FileserveCom.py
+++ b/module/plugins/hoster/FileserveCom.py
@@ -3,16 +3,16 @@
import re
from module.common.json_layer import json_loads
-from module.network.RequestFactory import getURL
-from module.plugins.Hoster import Hoster
-from module.plugins.Plugin import chunks
-from module.plugins.internal.ReCaptcha import ReCaptcha
-from module.plugins.internal.SimpleHoster import secondsToMidnight
-from module.utils import parseFileSize
+from module.network.RequestFactory import getURL as get_url
+from module.plugins.internal.Hoster import Hoster
+from module.plugins.internal.Plugin import chunks
+from module.plugins.captcha.ReCaptcha import ReCaptcha
+from module.plugins.internal.SimpleHoster import seconds_to_midnight
+from module.utils import parseFileSize as parse_size
-def checkFile(plugin, urls):
- html = getURL(plugin.URLS[1], post={"urls": "\n".join(urls)}, decode=True)
+def check_file(plugin, urls):
+ html = get_url(plugin.URLS[1], post={'urls': "\n".join(urls)})
file_info = []
for li in re.finditer(plugin.LINKCHECK_TR, html, re.S):
@@ -21,7 +21,7 @@ def checkFile(plugin, urls):
if cols:
file_info.append((
cols[1] if cols[1] != '--' else cols[0],
- parseFileSize(cols[2]) if cols[2] != '--' else 0,
+ parse_size(cols[2]) if cols[2] != '--' else 0,
2 if cols[3].startswith('Available') else 1,
cols[0]))
except Exception, e:
@@ -33,65 +33,67 @@ def checkFile(plugin, urls):
class FileserveCom(Hoster):
__name__ = "FileserveCom"
__type__ = "hoster"
- __version__ = "0.55"
+ __version__ = "0.58"
+ __status__ = "testing"
__pattern__ = r'http://(?:www\.)?fileserve\.com/file/(?P<ID>[^/]+)'
__description__ = """Fileserve.com hoster plugin"""
__license__ = "GPLv3"
- __authors__ = [("jeix", "jeix@hasnomail.de"),
- ("mkaay", "mkaay@mkaay.de"),
- ("Paul King", None),
- ("zoidberg", "zoidberg@mujmail.cz")]
+ __authors__ = [("jeix" , "jeix@hasnomail.de" ),
+ ("mkaay" , "mkaay@mkaay.de" ),
+ ("Paul King", None ),
+ ("zoidberg" , "zoidberg@mujmail.cz")]
- URLS = ["http://www.fileserve.com/file/", "http://www.fileserve.com/link-checker.php",
+ URLS = ["http://www.fileserve.com/file/",
+ "http://www.fileserve.com/link-checker.php",
"http://www.fileserve.com/checkReCaptcha.php"]
+
LINKCHECK_TR = r'<tr>\s*(<td>http://www\.fileserve\.com/file/.*?)</tr>'
LINKCHECK_TD = r'<td>(?:<.*?>|&nbsp;)*([^<]*)'
- CAPTCHA_KEY_PATTERN = r'var reCAPTCHA_publickey=\'(.+?)\''
- LONG_WAIT_PATTERN = r'<li class="title">You need to wait (\d+) (\w+) to start another download\.</li>'
- LINK_EXPIRED_PATTERN = r'Your download link has expired'
- DAILY_LIMIT_PATTERN = r'Your daily download limit has been reached'
+ CAPTCHA_KEY_PATTERN = r'var reCAPTCHA_publickey=\'(.+?)\''
+ LONG_WAIT_PATTERN = r'<li class="title">You need to wait (\d+) (\w+) to start another download\.</li>'
+ LINK_EXPIRED_PATTERN = r'Your download link has expired'
+ DL_LIMIT_PATTERN = r'Your daily download limit has been reached'
NOT_LOGGED_IN_PATTERN = r'<form (name="loginDialogBoxForm"|id="login_form")|<li><a href="/login\.php">Login</a></li>'
def setup(self):
- self.resumeDownload = self.multiDL = self.premium
+ self.resume_download = self.multiDL = self.premium
self.file_id = re.match(self.__pattern__, self.pyfile.url).group('ID')
self.url = "%s%s" % (self.URLS[0], self.file_id)
- self.logDebug("File ID: %s URL: %s" % (self.file_id, self.url))
+ self.log_debug("File ID: %s URL: %s" % (self.file_id, self.url))
def process(self, pyfile):
- pyfile.name, pyfile.size, status, self.url = checkFile(self, [self.url])[0]
+ pyfile.name, pyfile.size, status, self.url = check_file(self, [self.url])[0]
if status != 2:
self.offline()
- self.logDebug("File Name: %s Size: %d" % (pyfile.name, pyfile.size))
+ self.log_debug("File Name: %s Size: %d" % (pyfile.name, pyfile.size))
if self.premium:
- self.handlePremium()
+ self.handle_premium()
else:
- self.handleFree()
+ self.handle_free()
- def handleFree(self):
+ def handle_free(self):
self.html = self.load(self.url)
- action = self.load(self.url, post={"checkDownload": "check"}, decode=True)
+ action = self.load(self.url, post={'checkDownload': "check"})
action = json_loads(action)
- self.logDebug(action)
+ self.log_debug(action)
if "fail" in action:
if action['fail'] == "timeLimit":
- self.html = self.load(self.url, post={"checkDownload": "showError", "errorType": "timeLimit"},
- decode=True)
+ self.html = self.load(self.url, post={'checkDownload': "showError", 'errorType': "timeLimit"})
- self.doLongWait(re.search(self.LONG_WAIT_PATTERN, self.html))
+ self.do_long_wait(re.search(self.LONG_WAIT_PATTERN, self.html))
elif action['fail'] == "parallelDownload":
- self.logWarning(_("Parallel download error, now waiting 60s"))
+ self.log_warning(_("Parallel download error, now waiting 60s"))
self.retry(wait_time=60, reason=_("parallelDownload"))
else:
@@ -99,47 +101,46 @@ class FileserveCom(Hoster):
elif "success" in action:
if action['success'] == "showCaptcha":
- self.doCaptcha()
- self.doTimmer()
+ self.do_captcha()
+ self.do_timmer()
elif action['success'] == "showTimmer":
- self.doTimmer()
+ self.do_timmer()
else:
self.error(_("Unknown server response"))
- # show download link
- res = self.load(self.url, post={"downloadLink": "show"}, decode=True)
- self.logDebug("Show downloadLink response: %s" % res)
+ #: Show download link
+ res = self.load(self.url, post={'downloadLink': "show"})
+ self.log_debug("Show downloadLink response: %s" % res)
if "fail" in res:
self.error(_("Couldn't retrieve download url"))
- # this may either download our file or forward us to an error page
- self.download(self.url, post={"download": "normal"})
- self.logDebug(self.req.http.lastEffectiveURL)
+ #: This may either download our file or forward us to an error page
+ self.download(self.url, post={'download': "normal"})
+ self.log_debug(self.req.http.lastEffectiveURL)
- check = self.checkDownload({"expired": self.LINK_EXPIRED_PATTERN,
- "wait" : re.compile(self.LONG_WAIT_PATTERN),
- "limit" : self.DAILY_LIMIT_PATTERN})
+ check = self.check_download({'expired': self.LINK_EXPIRED_PATTERN,
+ 'wait' : re.compile(self.LONG_WAIT_PATTERN),
+ 'limit' : self.DL_LIMIT_PATTERN})
if check == "expired":
- self.logDebug("Download link was expired")
+ self.log_debug("Download link was expired")
self.retry()
elif check == "wait":
- self.doLongWait(self.lastCheck)
+ self.do_long_wait(self.last_check)
elif check == "limit":
- self.logWarning(_("Download limited reached for today"))
- self.setWait(secondsToMidnight(gmt=2), True)
- self.wait()
+ self.log_warning(_("Download limited reached for today"))
+ self.wait(seconds_to_midnight(gmt=2), True)
self.retry()
- self.thread.m.reconnecting.wait(3) # Ease issue with later downloads appearing to be in parallel
+ self.thread.m.reconnecting.wait(3) #: Ease issue with later downloads appearing to be in parallel
- def doTimmer(self):
- res = self.load(self.url, post={"downloadLink": "wait"}, decode=True)
- self.logDebug("Wait response: %s" % res[:80])
+ def do_timmer(self):
+ res = self.load(self.url, post={'downloadLink': "wait"})
+ self.log_debug("Wait response: %s" % res[:80])
if "fail" in res:
self.fail(_("Failed getting wait time"))
@@ -152,11 +153,10 @@ class FileserveCom(Hoster):
else:
wait_time = int(res) + 3
- self.setWait(wait_time)
- self.wait()
+ self.wait(wait_time)
- def doCaptcha(self):
+ def do_captcha(self):
captcha_key = re.search(self.CAPTCHA_KEY_PATTERN, self.html).group(1)
recaptcha = ReCaptcha(self)
@@ -167,50 +167,48 @@ class FileserveCom(Hoster):
'recaptcha_response_field' : response,
'recaptcha_shortencode_field': self.file_id}))
if not res['success']:
- self.invalidCaptcha()
+ self.captcha.invalid()
else:
- self.correctCaptcha()
+ self.captcha.correct()
break
else:
self.fail(_("Invalid captcha"))
- def doLongWait(self, m):
+ def do_long_wait(self, m):
wait_time = (int(m.group(1)) * {'seconds': 1, 'minutes': 60, 'hours': 3600}[m.group(2)]) if m else 12 * 60
- self.setWait(wait_time, True)
- self.wait()
+ self.wait(wait_time, True)
self.retry()
- def handlePremium(self):
+ def handle_premium(self):
premium_url = None
if self.__name__ == "FileserveCom":
- #try api download
+ #: Try api download
res = self.load("http://app.fileserve.com/api/download/premium/",
- post={"username": self.user,
- "password": self.account.getAccountData(self.user)['password'],
- "shorten": self.file_id},
- decode=True)
+ post={'username': self.user,
+ 'password': self.account.get_info(self.user)['login']['password'],
+ 'shorten': self.file_id})
if res:
res = json_loads(res)
if res['error_code'] == "302":
premium_url = res['next']
elif res['error_code'] in ["305", "500"]:
- self.tempOffline()
+ self.temp_offline()
elif res['error_code'] in ["403", "605"]:
- self.resetAccount()
+ self.restart(nopremium=True)
elif res['error_code'] in ["606", "607", "608"]:
self.offline()
else:
- self.logError(res['error_code'], res['error_message'])
+ self.log_error(res['error_code'], res['error_message'])
self.download(premium_url or self.pyfile.url)
- if not premium_url and self.checkDownload({"login": re.compile(self.NOT_LOGGED_IN_PATTERN)}):
+ if not premium_url and self.check_download({'login': re.compile(self.NOT_LOGGED_IN_PATTERN)}):
self.account.relogin(self.user)
self.retry(reason=_("Not logged in"))
-def getInfo(urls):
+def get_info(urls):
for chunk in chunks(urls, 100):
- yield checkFile(FileserveCom, chunk)
+ yield check_file(FileserveCom, chunk)