summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--module/plugins/hoster/ShareonlineBiz.py184
1 files changed, 87 insertions, 97 deletions
diff --git a/module/plugins/hoster/ShareonlineBiz.py b/module/plugins/hoster/ShareonlineBiz.py
index 78a27558b..91fc989c9 100644
--- a/module/plugins/hoster/ShareonlineBiz.py
+++ b/module/plugins/hoster/ShareonlineBiz.py
@@ -3,43 +3,18 @@
import re
from time import time
+from urllib import unquote
+from urlparse import urlparse
from module.network.RequestFactory import getURL
-from module.plugins.Hoster import Hoster
-from module.plugins.Plugin import chunks
from module.plugins.internal.CaptchaService import ReCaptcha
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-def getInfo(urls):
- api_url_base = "http://api.share-online.biz/linkcheck.php"
-
- urls = [url.replace("https://", "http://") for url in urls]
-
- for chunk in chunks(urls, 90):
- api_param_file = {"links": "\n".join(x.replace("http://www.share-online.biz/dl/", "").rstrip("/") for x in
- chunk)} # api only supports old style links
- html = getURL(api_url_base, post=api_param_file, decode=True)
- result = []
- for i, res in enumerate(html.split("\n")):
- if not res:
- continue
- fields = res.split(";")
-
- if fields[1] == "OK":
- status = 2
- elif fields[1] in ("DELETED", "NOT FOUND"):
- status = 1
- else:
- status = 3
-
- result.append((fields[2], int(fields[3]), status, chunk[i]))
- yield result
-
-
-class ShareonlineBiz(Hoster):
+class ShareonlineBiz(SimpleHoster):
__name__ = "ShareonlineBiz"
__type__ = "hoster"
- __version__ = "0.41"
+ __version__ = "0.42"
__pattern__ = r'https?://(?:www\.)?(share-online\.biz|egoshare\.com)/(download\.php\?id=|dl/)(?P<ID>\w+)'
@@ -51,110 +26,118 @@ class ShareonlineBiz(Hoster):
("Walter Purcaro", "vuolter@gmail.com")]
- ERROR_INFO_PATTERN = r'<p class="b">Information:</p>\s*<div>\s*<strong>(.*?)</strong>'
+ URL_REPLACEMENTS = [(__pattern__ + ".*", "http://www.share-online.biz/dl/\g<ID>")]
+ ERROR_INFO_PATTERN = r'<p class="b">Information:</p>\s*<div>\s*<strong>(.*?)</strong>'
- def setup(self):
- self.file_id = re.match(self.__pattern__, self.pyfile.url).group("ID")
- self.pyfile.url = "http://www.share-online.biz/dl/" + self.file_id
- self.resumeDownload = self.premium
- self.multiDL = False
+ @classmethod
+ def getInfo(cls, url="", html=""):
+ info = {'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3 if url else 1, 'url': url}
- self.check_data = None
+ if url:
+ info.update(re.match(cls.__pattern__, url).groupdict())
+ api_url = "http://api.share-online.biz/linkcheck.php?md5=1"
+ html = getURL(api_url, cookies=False, post={"links": self.info['pattern']['ID']}, decode=True)
+ field = html.split(";")
- def process(self, pyfile):
- if self.premium:
- self.handlePremium()
- else:
- self.handleFree()
+ if field[1] is "OK":
+ info['fileid'] = field[0]
+ info['status'] = 2
+ info['filename'] = field[2]
+ info['size'] = field[3] #: in bytes
+ info['md5'] = field[4].strip().lower().replace("\n\n", "") #: md5
- if self.api_data:
- self.check_data = {"size": int(self.api_data['size']), "md5": self.api_data['md5']}
+ elif field[1] in ("DELETED", "NOT FOUND"):
+ info['status'] = 1
+ return info
- def loadAPIData(self):
- api_url_base = "http://api.share-online.biz/linkcheck.php?md5=1"
- api_param_file = {"links": self.file_id} #: api only supports old style links
- html = self.load(api_url_base, cookies=False, post=api_param_file, decode=True)
-
- fields = html.split(";")
- self.api_data = {"fileid": fields[0],
- "status": fields[1]}
- if not self.api_data['status'] == "OK":
- self.offline()
- else:
- self.api_data['filename'] = fields[2]
- self.api_data['size'] = fields[3] #: in bytes
- self.api_data['md5'] = fields[4].strip().lower().replace("\n\n", "") #: md5
+ def setup(self):
+ self.resumeDownload = self.premium
+ self.multiDL = False
- def handleFree(self):
- self.loadAPIData()
- self.pyfile.name = self.api_data['filename']
- self.pyfile.size = int(self.api_data['size'])
-
- self.html = self.load(self.pyfile.url, cookies=True) #: refer, stuff
- self.setWait(3)
- self.wait()
-
- self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free": "1", "choice": "free"}, decode=True)
- self.checkErrors()
-
- m = re.search(r'var wait=(\d+);', self.html)
+ def handleCaptcha(self):
recaptcha = ReCaptcha(self)
+
for _i in xrange(5):
- challenge, response = recaptcha.challenge("6LdatrsSAAAAAHZrB70txiV5p-8Iv8BtVxlTtjKX")
+ challenge, response = recaptcha.challenge()
+
+ m = re.search(r'var wait=(\d+);', self.html)
self.setWait(int(m.group(1)) if m else 30)
+
res = self.load("%s/free/captcha/%d" % (self.pyfile.url, int(time() * 1000)),
- post={'dl_free': '1',
+ post={'dl_free' : "1",
'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field': response})
-
+ 'recaptcha_response_field' : response})
if not res == '0':
self.correctCaptcha()
- break
+ return res
else:
self.invalidCaptcha()
else:
self.invalidCaptcha()
self.fail(_("No valid captcha solution received"))
+
+ def handleFree(self):
+ self.html = self.load(self.pyfile.url, cookies=True) #: refer, stuff
+
+ self.wait(3)
+
+ self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free": "1", "choice": "free"}, decode=True)
+
+ self.checkErrors()
+
+ res = self.handleCaptcha()
+
download_url = res.decode("base64")
+
if not download_url.startswith("http://"):
self.error(_("Wrong download url"))
self.wait()
+
self.download(download_url)
+
+ def checkFile(self):
# check download
check = self.checkDownload({
- "cookie": re.compile(r'<div id="dl_failure"'),
- "fail": re.compile(r"<title>Share-Online")
+ 'empty' : re.compile(r"^$"),
+ 'cookie': re.compile(r'<div id="dl_failure"'),
+ 'fail' : re.compile(r"<title>Share-Online")
})
- if check == "cookie":
+
+ if check == "empty":
+ self.fail(_("Empty file"))
+
+ elif check == "cookie":
self.invalidCaptcha()
- self.retry(5, 60, "Cookie failure")
+ self.retry(5, 60, _("Cookie failure"))
+
elif check == "fail":
self.invalidCaptcha()
- self.retry(5, 5 * 60, "Download failed")
- else:
- self.correctCaptcha()
+ self.retry(5, 5 * 60, _("Download failed"))
def handlePremium(self): #: should be working better loading (account) api internally
self.account.getAccountInfo(self.user, True)
+
html = self.load("http://api.share-online.biz/account.php",
{"username": self.user, "password": self.account.accounts[self.user]['password'],
- "act": "download", "lid": self.file_id})
+ "act": "download", "lid": self.info['fileid']})
self.api_data = dlinfo = {}
+
for line in html.splitlines():
key, value = line.split(": ")
dlinfo[key.lower()] = value
self.logDebug(dlinfo)
+
if not dlinfo['status'] == "online":
self.offline()
else:
@@ -162,6 +145,7 @@ class ShareonlineBiz(Hoster):
self.pyfile.size = int(dlinfo['size'])
dlLink = dlinfo['url']
+
if dlLink == "server_under_maintenance":
self.tempOffline()
else:
@@ -174,23 +158,29 @@ class ShareonlineBiz(Hoster):
if m is None:
return
- err = m.group(1)
+ errmsg = m.group(1).lower()
+
try:
- self.logError(err, re.search(self.ERROR_INFO_PATTERN, self.html).group(1))
+ self.logError(errmsg, re.search(self.ERROR_INFO_PATTERN, self.html).group(1))
except:
- self.logError(err, "Unknown error occurred")
+ self.logError("Unknown error occurred", errmsg)
- if err == "invalid":
+ if errmsg is "invalid":
self.fail(_("File not available"))
- elif err in ("freelimit", "size", "proxy"):
+
+ elif errmsg in ("freelimit", "size", "proxy"):
self.fail(_("Premium account needed"))
+
+ elif errmsg in ("expired", "server"):
+ self.retry(wait_time=600, reason=errmsg)
+
+ elif 'slot' in errmsg:
+ self.wantReconnect = True
+ self.retry(24, 3600, errmsg)
+
else:
- if err in 'server':
- self.setWait(600, False)
- elif err in 'expired':
- self.setWait(30, False)
- else:
- self.setWait(300, True)
+ self.wantReconnect = True
+ self.retry(wait_time=60, reason=errmsg)
+
- self.wait()
- self.retry(max_tries=25, reason=err)
+getInfo = create_getInfo(ShareonlineBiz)