diff options
author | RaNaN <Mast3rRaNaN@hotmail.de> | 2010-08-12 22:44:41 +0200 |
---|---|---|
committer | RaNaN <Mast3rRaNaN@hotmail.de> | 2010-08-12 22:44:41 +0200 |
commit | 72abfb455275546e110e4daf811480dd47ffceea (patch) | |
tree | 4f6955a97af4600c31dffc5941a57640d81da0a3 /module/plugins/hoster | |
parent | encoding fix try (diff) | |
download | pyload-72abfb455275546e110e4daf811480dd47ffceea.tar.xz |
improvements
Diffstat (limited to 'module/plugins/hoster')
-rw-r--r-- | module/plugins/hoster/NetloadIn.py | 20 | ||||
-rw-r--r-- | module/plugins/hoster/ShareCx.py | 27 | ||||
-rw-r--r-- | module/plugins/hoster/ShareonlineBiz.py | 31 |
3 files changed, 57 insertions, 21 deletions
diff --git a/module/plugins/hoster/NetloadIn.py b/module/plugins/hoster/NetloadIn.py index 9e117fa14..6f0cb9461 100644 --- a/module/plugins/hoster/NetloadIn.py +++ b/module/plugins/hoster/NetloadIn.py @@ -4,8 +4,12 @@ import re from time import sleep + from module.plugins.Hoster import Hoster from module.network.Request import getURL +from module.plugins.Plugin import chunks + + def getInfo(urls): ## returns list of tupels (name, size (in bytes), status (see FileDatabase), url) @@ -14,14 +18,10 @@ def getInfo(urls): apiurl = "http://api.netload.in/info.php?auth=Zf9SnQh9WiReEsb18akjvQGqT0I830e8&bz=1&md5=1&file_id=" id_regex = re.compile("http://.*netload\.in/(?:datei(.*?)(?:\.htm|/)|index.php?id=10&file_id=)") urls_per_query = 80 - - iterations = len(urls)/urls_per_query - if len(urls)%urls_per_query > 0: - iterations = iterations +1 - - for i in range(iterations): + + for chunk in chunks(urls, urls_per_query): ids = "" - for url in urls[i*urls_per_query:(i+1)*urls_per_query]: + for url in chunk: match = id_regex.search(url) if match: ids = ids + match.group(1) +";" @@ -37,19 +37,17 @@ def getInfo(urls): result = [] - counter = 0 - for r in api.split(): + for i, r in enumerate(api.split()): try: tmp = r.split(";") try: size = int(tmp[2]) except: size = 0 - result.append( (tmp[1], size, 2 if tmp[3] == "online" else 1, urls[(i*80)+counter]) ) + result.append( (tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i] ) ) except: print "Netload prefetch: Error while processing response: " print r - counter = counter +1 yield result diff --git a/module/plugins/hoster/ShareCx.py b/module/plugins/hoster/ShareCx.py index feee30cd3..e64459754 100644 --- a/module/plugins/hoster/ShareCx.py +++ b/module/plugins/hoster/ShareCx.py @@ -3,8 +3,35 @@ import re
from module.plugins.Hoster import Hoster
+from module.plugins.Plugin import chunks
+from module.network.Request import getURL
#from module.BeautifulSoup import BeautifulSoup
+def getInfo(urls):
+ api_url = "http://www.share.cx/uapi?do=check&links="
+
+ for chunk in chunks(urls, 90):
+ get = ""
+ for url in chunk:
+ get += ";"+url
+
+ api = getURL(api_url+get[1:])
+ result = []
+
+ for i, link in enumerate(api.split()):
+ url,name,size = link.split(";")
+ if name and size:
+ status = 2
+ else:
+ status = 1
+
+ if not name: name = chunk[i]
+ if not size: size = 0
+
+ result.append( (name, size, status, chunk[i]) )
+
+ yield result
+
class ShareCx(Hoster):
__name__ = "ShareCx"
__type__ = "hoster"
diff --git a/module/plugins/hoster/ShareonlineBiz.py b/module/plugins/hoster/ShareonlineBiz.py index 8646fcc88..42a2bc560 100644 --- a/module/plugins/hoster/ShareonlineBiz.py +++ b/module/plugins/hoster/ShareonlineBiz.py @@ -13,19 +13,30 @@ from time import sleep from module.plugins.Hoster import Hoster from module.network.Request import getURL +from module.plugins.Plugin import chunks + def getInfo(urls): api_url_base = "http://www.share-online.biz/linkcheck/linkcheck.php" - api_param_file = {"links": "\n".join(x.replace("http://www.share-online.biz/dl/","") for x in urls)} #api only supports old style links - src = getURL(api_url_base, post=api_param_file) - result = [] - for i, res in enumerate(src.split("\n")): - if not res: - continue - fields = res.split(";") - status = 2 if fields[1] == "OK" else 3 - result.append((fields[2], int(fields[3]), status, urls[i])) - yield result + + for chunk in chunks(urls, 90): + api_param_file = {"links": "\n".join(x.replace("http://www.share-online.biz/dl/","") for x in chunk)} #api only supports old style links + src = getURL(api_url_base, post=api_param_file) + result = [] + for i, res in enumerate(src.split("\n")): + if not res: + continue + fields = res.split(";") + + if fields[1] == "OK": + status = 2 + elif fields[1] in ("DELETED", "NOT FOUND"): + status = 1 + else: + status = 3 + + result.append((fields[2], int(fields[3]), status, chunk[i])) + yield result class ShareonlineBiz(Hoster): __name__ = "ShareonlineBiz" |