summaryrefslogtreecommitdiffstats
path: root/module/plugins/hoster
diff options
context:
space:
mode:
authorGravatar Jeix <devnull@localhost> 2010-08-13 11:03:44 +0200
committerGravatar Jeix <devnull@localhost> 2010-08-13 11:03:44 +0200
commitb1cc10a0f5ea6c9cb8231c67dd016fe4a2414170 (patch)
treeaf31f7d3c6efb49a2afe905d4971001417299c03 /module/plugins/hoster
parentirchook-xmlrpc fix (diff)
parentimprovements (diff)
downloadpyload-b1cc10a0f5ea6c9cb8231c67dd016fe4a2414170.tar.xz
Merge
Diffstat (limited to 'module/plugins/hoster')
-rw-r--r--module/plugins/hoster/NetloadIn.py20
-rw-r--r--module/plugins/hoster/ShareCx.py27
-rw-r--r--module/plugins/hoster/ShareonlineBiz.py31
-rw-r--r--module/plugins/hoster/YoutubeCom.py7
4 files changed, 60 insertions, 25 deletions
diff --git a/module/plugins/hoster/NetloadIn.py b/module/plugins/hoster/NetloadIn.py
index 9e117fa14..6f0cb9461 100644
--- a/module/plugins/hoster/NetloadIn.py
+++ b/module/plugins/hoster/NetloadIn.py
@@ -4,8 +4,12 @@
import re
from time import sleep
+
from module.plugins.Hoster import Hoster
from module.network.Request import getURL
+from module.plugins.Plugin import chunks
+
+
def getInfo(urls):
## returns list of tupels (name, size (in bytes), status (see FileDatabase), url)
@@ -14,14 +18,10 @@ def getInfo(urls):
apiurl = "http://api.netload.in/info.php?auth=Zf9SnQh9WiReEsb18akjvQGqT0I830e8&bz=1&md5=1&file_id="
id_regex = re.compile("http://.*netload\.in/(?:datei(.*?)(?:\.htm|/)|index.php?id=10&file_id=)")
urls_per_query = 80
-
- iterations = len(urls)/urls_per_query
- if len(urls)%urls_per_query > 0:
- iterations = iterations +1
-
- for i in range(iterations):
+
+ for chunk in chunks(urls, urls_per_query):
ids = ""
- for url in urls[i*urls_per_query:(i+1)*urls_per_query]:
+ for url in chunk:
match = id_regex.search(url)
if match:
ids = ids + match.group(1) +";"
@@ -37,19 +37,17 @@ def getInfo(urls):
result = []
- counter = 0
- for r in api.split():
+ for i, r in enumerate(api.split()):
try:
tmp = r.split(";")
try:
size = int(tmp[2])
except:
size = 0
- result.append( (tmp[1], size, 2 if tmp[3] == "online" else 1, urls[(i*80)+counter]) )
+ result.append( (tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i] ) )
except:
print "Netload prefetch: Error while processing response: "
print r
- counter = counter +1
yield result
diff --git a/module/plugins/hoster/ShareCx.py b/module/plugins/hoster/ShareCx.py
index feee30cd3..e64459754 100644
--- a/module/plugins/hoster/ShareCx.py
+++ b/module/plugins/hoster/ShareCx.py
@@ -3,8 +3,35 @@
import re
from module.plugins.Hoster import Hoster
+from module.plugins.Plugin import chunks
+from module.network.Request import getURL
#from module.BeautifulSoup import BeautifulSoup
+def getInfo(urls):
+ api_url = "http://www.share.cx/uapi?do=check&links="
+
+ for chunk in chunks(urls, 90):
+ get = ""
+ for url in chunk:
+ get += ";"+url
+
+ api = getURL(api_url+get[1:])
+ result = []
+
+ for i, link in enumerate(api.split()):
+ url,name,size = link.split(";")
+ if name and size:
+ status = 2
+ else:
+ status = 1
+
+ if not name: name = chunk[i]
+ if not size: size = 0
+
+ result.append( (name, size, status, chunk[i]) )
+
+ yield result
+
class ShareCx(Hoster):
__name__ = "ShareCx"
__type__ = "hoster"
diff --git a/module/plugins/hoster/ShareonlineBiz.py b/module/plugins/hoster/ShareonlineBiz.py
index 8646fcc88..42a2bc560 100644
--- a/module/plugins/hoster/ShareonlineBiz.py
+++ b/module/plugins/hoster/ShareonlineBiz.py
@@ -13,19 +13,30 @@ from time import sleep
from module.plugins.Hoster import Hoster
from module.network.Request import getURL
+from module.plugins.Plugin import chunks
+
def getInfo(urls):
api_url_base = "http://www.share-online.biz/linkcheck/linkcheck.php"
- api_param_file = {"links": "\n".join(x.replace("http://www.share-online.biz/dl/","") for x in urls)} #api only supports old style links
- src = getURL(api_url_base, post=api_param_file)
- result = []
- for i, res in enumerate(src.split("\n")):
- if not res:
- continue
- fields = res.split(";")
- status = 2 if fields[1] == "OK" else 3
- result.append((fields[2], int(fields[3]), status, urls[i]))
- yield result
+
+ for chunk in chunks(urls, 90):
+ api_param_file = {"links": "\n".join(x.replace("http://www.share-online.biz/dl/","") for x in chunk)} #api only supports old style links
+ src = getURL(api_url_base, post=api_param_file)
+ result = []
+ for i, res in enumerate(src.split("\n")):
+ if not res:
+ continue
+ fields = res.split(";")
+
+ if fields[1] == "OK":
+ status = 2
+ elif fields[1] in ("DELETED", "NOT FOUND"):
+ status = 1
+ else:
+ status = 3
+
+ result.append((fields[2], int(fields[3]), status, chunk[i]))
+ yield result
class ShareonlineBiz(Hoster):
__name__ = "ShareonlineBiz"
diff --git a/module/plugins/hoster/YoutubeCom.py b/module/plugins/hoster/YoutubeCom.py
index e40b0c9ad..d92d8d128 100644
--- a/module/plugins/hoster/YoutubeCom.py
+++ b/module/plugins/hoster/YoutubeCom.py
@@ -32,9 +32,8 @@ class YoutubeCom(Hoster):
if self.getConf("quality") == "hd" or self.getConf("quality") == "hq":
file_suffix = ".mp4"
- name = re.search(file_name_pattern, html).group(1).replace("/", "") + file_suffix
-
- pyfile.name = name.replace("&amp;", "&").replace("ö", "oe").replace("ä", "ae").replace("ü", "ue")
+ name = (re.search(file_name_pattern, html).group(1).replace("/", "") + file_suffix).decode("utf8")
+ pyfile.name = name #.replace("&amp;", "&").replace("ö", "oe").replace("ä", "ae").replace("ü", "ue")
if self.getConf("quality") == "sd":
quality = "&fmt=6"
@@ -45,4 +44,4 @@ class YoutubeCom(Hoster):
file_url = 'http://youtube.com/get_video?video_id=' + videoId + '&t=' + videoHash + quality + "&asv=2"
- self.download(file_url) \ No newline at end of file
+ self.download(file_url)