summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--module/network/Browser.py5
-rw-r--r--module/network/FTPBase.py5
-rw-r--r--module/network/HTTPDownload.py22
-rwxr-xr-xmodule/network/NewRequest.py185
-rw-r--r--module/network/XDCCBase.py5
-rw-r--r--module/network/helper.py13
-rw-r--r--module/plugins/hoster/PornhubCom.py16
-rw-r--r--module/plugins/hoster/VeehdCom.py80
8 files changed, 318 insertions, 13 deletions
diff --git a/module/network/Browser.py b/module/network/Browser.py
index 90502b298..6277f3a45 100644
--- a/module/network/Browser.py
+++ b/module/network/Browser.py
@@ -16,9 +16,12 @@ class Browser():
self.bucket = bucket
self.http = HTTPBase(interface=interface, proxies=proxies)
- self.http.cookieJar = cookieJar
+ self.setCookieJar(cookieJar)
self.proxies = proxies
+ def setCookieJar(self, cookieJar):
+ self.http.cookieJar = cookieJar
+
def clearReferer(self):
self.lastURL = None
diff --git a/module/network/FTPBase.py b/module/network/FTPBase.py
index da67573a3..036da383a 100644
--- a/module/network/FTPBase.py
+++ b/module/network/FTPBase.py
@@ -63,6 +63,9 @@ class FTPBase(FTP):
self.welcome = self.getresp()
return self.welcome
+class WrappedFTPDeferred(WrappedDeferred):
+ pass
+
class FTPDownload():
def __init__(self, url, filename, interface=None, bucket=None, proxies={}):
self.url = url
@@ -162,7 +165,7 @@ class FTPDownload():
self.size = self.ftp.size(self.url.split("/")[-1])
self._download(offset)
- return self.deferred
+ return WrappedFTPDeferred(self, self.deferred)
if __name__ == "__main__":
import sys
diff --git a/module/network/HTTPDownload.py b/module/network/HTTPDownload.py
index f17e16492..2b8c44a87 100644
--- a/module/network/HTTPDownload.py
+++ b/module/network/HTTPDownload.py
@@ -102,6 +102,9 @@ class ChunkInfo():
def getChunkEncoding(self, index):
return self.chunks[index][2]
+class WrappedHTTPDeferred(WrappedDeferred):
+ pass
+
class HTTPDownload():
def __init__(self, url, filename, get={}, post={}, referer=None, cookies=True, customHeaders={}, bucket=None, interface=None, proxies={}):
self.url = url
@@ -122,6 +125,7 @@ class HTTPDownload():
self.deferred = Deferred()
self.finished = False
+ self._abort = False
self.size = None
self.cookieJar = CookieJar()
@@ -143,9 +147,15 @@ class HTTPDownload():
arrived = self.size
return arrived
- def abort(self):
+ def setAbort(self, val):
+ self._abort = val
for chunk in self.chunks:
- chunk.abort = True
+ chunk.abort = val
+
+ def getAbort(self):
+ return self._abort
+
+ abort = property(getAbort, setAbort)
def getSpeed(self):
speed = 0
@@ -264,7 +274,7 @@ class HTTPDownload():
dg.addCallback(self._copyChunks)
if not len(self.chunks):
dg.callback()
- return self.deferred
+ return WrappedHTTPDeferred(self, self.deferred)
else:
raise Exception("no chunks")
@@ -272,8 +282,8 @@ if __name__ == "__main__":
import sys
from Bucket import Bucket
bucket = Bucket()
- bucket.setRate(3000*1024)
- bucket = None
+ bucket.setRate(200*1024)
+ #bucket = None
url = "http://speedtest.netcologne.de/test_100mb.bin"
@@ -305,5 +315,5 @@ if __name__ == "__main__":
break
sleep(1)
except KeyboardInterrupt:
- dwnld.abort()
+ dwnld.abort = True
sys.exit()
diff --git a/module/network/NewRequest.py b/module/network/NewRequest.py
new file mode 100755
index 000000000..9ac7d54aa
--- /dev/null
+++ b/module/network/NewRequest.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License,
+ or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, see <http://www.gnu.org/licenses/>.
+
+ @author: spoob
+ @author: RaNaN
+ @author: mkaay
+"""
+
+import time
+from os.path import exists, join
+from shutil import move
+import urllib
+
+from module.plugins.Plugin import Abort
+
+from module.network.Browser import Browser
+from module.network.helper import waitFor
+
+class Request:
+ def __init__(self, interface=None):
+ self.browser = Browser(interface=interface)
+ self.d = None
+
+ self.dl_time = 0
+ self.dl_finished = 0
+ self.dl_size = 0
+ self.dl_arrived = 0
+ self.dl = False
+
+ self.abort = False
+
+ self.lastEffectiveURL = self.lastURL = property(lambda: self.browser.lastUrl)
+ self.auth = False
+
+ self.canContinue = False
+
+ self.dl_speed = 0.0
+
+ self.cookieJar = None
+ self.interface = interface
+ self.progressNotify = None
+
+ # change this for connection information
+ self.debug = False
+
+ def set_timeout(self, timeout):
+ self.timeout = int(timeout)
+
+ def setCookieJar(self, j):
+ self.cookieJar = j
+
+ def addCookies(self):
+ #@TODO
+ pass
+
+ def getCookies(self):
+ #@TODO
+ pass
+
+ def getCookie(self, name):
+ #@TODO
+ pass
+
+ def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, no_post_encode=False, raw_cookies={}):
+ url = self.__myquote(str(url))
+
+ #@TODO: cookies
+ #@TODO: auth
+
+ if not ref:
+ self.browser.clearReferer()
+
+ return self.browser.getPage(url, get=get, post=post, cookies=cookies)
+
+ def add_auth(self, user, pw):
+ #@TODO
+ pass
+
+ def clearCookies(self):
+ #@TODO
+ pass
+
+ def add_proxy(self, protocol, adress):
+ #@TODO
+ pass
+
+ def download(self, url, file_name, folder, get={}, post={}, ref=True, cookies=True, no_post_encode=False):
+ url = self.__myquote(str(url))
+
+ file_temp = self.get_free_name(folder,file_name)
+
+ #@TODO: cookies
+ #@TODO: auth
+
+ if not ref:
+ self.browser.clearReferer()
+
+ self.d = self.browser.httpDownload(url, file_temp, get=get, post=post, cookies=cookies, chunks=1, resume=self.canContinue)
+ self.dl_time = property(lambda: self.d.startTime)
+ self.dl_finished = property(lambda: self.d.endTime)
+ self.dl_speed = property(lambda: self.d.speed)
+ self.dl_size = property(lambda: self.d.size)
+ self.dl = property(lambda: True if self.d.startTime and not self.d.endTime else False)
+ self.abort = property(self.d.getAbort, self.d.setAbort)
+
+ waitFor(self.d)
+
+ if self.abort: raise Abort
+
+ free_name = self.get_free_name(folder, file_name)
+ move(file_temp, free_name)
+
+ self.dl_time = 0
+ self.dl_finished = 0
+ self.dl_size = 0
+ self.dl_arrived = 0
+ self.dl = False
+ self.dl_speed = 0.0
+
+ return free_name
+
+ def get_speed(self):
+ try:
+ return self.dl_speed
+ except:
+ return 0
+
+ def get_ETA(self):
+ try:
+ return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time))
+ except:
+ return 0
+
+ def bytes_left(self):
+ return (self.dl_size - self.dl_arrived)
+
+ def progress(self):
+ if self.progressNotify:
+ try:
+ progress = int(float(self.dl_arrived)/self.dl_size*100)
+ self.progressNotify(progress)
+ except:
+ pass
+
+ def get_free_name(self, folder, file_name):
+ file_count = 0
+ file_name = join(folder, file_name)
+ while exists(file_name):
+ file_count += 1
+ if "." in file_name:
+ file_split = file_name.split(".")
+ temp_name = "%s-%i.%s" % (".".join(file_split[:-1]), file_count, file_split[-1])
+ else:
+ temp_name = "%s-%i" % (file_name, file_count)
+ if not exists(temp_name):
+ file_name = temp_name
+ return file_name
+
+ def __myquote(self, url):
+ return urllib.quote(url, safe="%/:=&?~#+!$,;'@()*[]")
+
+
+def getURL(url, get={}, post={}):
+ """
+ currently used for update check
+ """
+ b = Browser()
+ return b.getPage(url, get=get, post=post)
+
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
diff --git a/module/network/XDCCBase.py b/module/network/XDCCBase.py
index b242c8f27..6d9978b80 100644
--- a/module/network/XDCCBase.py
+++ b/module/network/XDCCBase.py
@@ -34,6 +34,9 @@ from select import select
class XDCCError(Exception):
pass
+class WrappedXDCCDeferred(WrappedDeferred):
+ pass
+
class XDCCDownload():
def __init__(self, server, port, channel, bot, pack, nick, ident, real, filename, timeout=30, bucket=None, interface=None, proxies={}):
self.server = server
@@ -254,7 +257,7 @@ class XDCCDownload():
debug("XDCC: Downloading %s from %s:%d" % (packname, ip, port))
self._download(ip, port)
- return self.deferred
+ return WrappedXDCCDeferred(self, self.deferred)
if __name__ == "__main__":
import sys
diff --git a/module/network/helper.py b/module/network/helper.py
index 6900467f5..8cc61d3ff 100644
--- a/module/network/helper.py
+++ b/module/network/helper.py
@@ -110,3 +110,16 @@ class DeferredGroup(Deferred):
if len(self.group) == self.done:
self.callback()
+class WrappedDeferred():
+ def __init__(self, download, d):
+ self.download = download
+ self.d = d
+
+ def addCallback(self, *args, **kwargs):
+ self.d.addCallback(*args, **kwargs)
+
+ def addErrback(self, *args, **kwargs):
+ self.d.addErrback(*args, **kwargs)
+
+ def __getattr__(self, attr):
+ return getattr(self.download, attr)
diff --git a/module/plugins/hoster/PornhubCom.py b/module/plugins/hoster/PornhubCom.py
index 2df7ba452..6afa9d295 100644
--- a/module/plugins/hoster/PornhubCom.py
+++ b/module/plugins/hoster/PornhubCom.py
@@ -8,7 +8,7 @@ class PornhubCom(Hoster):
__name__ = "PornhubCom"
__type__ = "hoster"
__pattern__ = r'http://[\w\.]*?pornhub\.com/view_video\.php\?viewkey=[\w\d]+'
- __version__ = "0.2"
+ __version__ = "0.3"
__description__ = """Pornhub.com Download Hoster"""
__author_name__ = ("jeix")
__author_mail__ = ("jeix@hasnomail.de")
@@ -48,10 +48,18 @@ class PornhubCom(Hoster):
def get_file_name(self):
if self.html is None:
self.download_html()
-
- name = re.findall('<h1>(.*?)</h1>', self.html)[1] + ".flv"
- return name
+ match = re.search(r'<title[^>]+>([^<]+) - ', self.html)
+ if match:
+ name = re.group(1)
+ else:
+ matches = re.findall('<h1>(.*?)</h1>', self.html)
+ if len(matches) > 1:
+ name = matches[1]
+ else:
+ name = matches[0]
+
+ return name + '.flv'
def file_exists(self):
""" returns True or False
diff --git a/module/plugins/hoster/VeehdCom.py b/module/plugins/hoster/VeehdCom.py
new file mode 100644
index 000000000..06e59c7fa
--- /dev/null
+++ b/module/plugins/hoster/VeehdCom.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import re
+from module.plugins.Hoster import Hoster
+
+class VeehdCom(Hoster):
+ __name__ = 'VeehdCom'
+ __type__ = 'hoster'
+ __pattern__ = r'http://veehd\.com/video/\d+_\S+'
+ __config__ = [
+ ('filename_spaces', 'bool', "Allow spaces in filename", 'False'),
+ ('replacement_char', 'str', "Filename replacement character", '_'),
+ ]
+ __version__ = '0.1'
+ __description__ = """Veehd.com Download Hoster"""
+ __author_name__ = ('cat')
+ __author_mail__ = ('cat@pyload')
+
+ def _debug(self, msg):
+ self.log.debug('[%s] %s' % (self.__name__, msg))
+
+ def setup(self):
+ self.html = None
+ self.multiDL = True
+ self.req.canContinue = True
+
+ def process(self, pyfile):
+ self.download_html()
+ if not self.file_exists():
+ self.offline()
+
+ pyfile.name = self.get_file_name()
+ self.download(self.get_file_url())
+
+ def download_html(self):
+ url = self.pyfile.url
+ self._debug("Requesting page: %s" % (repr(url),))
+ self.html = self.load(url)
+
+ def file_exists(self):
+ if self.html is None:
+ self.download_html()
+
+ if '<title>Veehd</title>' in self.html:
+ return False
+ return True
+
+ def get_file_name(self):
+ if self.html is None:
+ self.download_html()
+
+ match = re.search(r'<title[^>]*>([^<]+) on Veehd</title>', self.html)
+ if not match:
+ self.fail("video title not found")
+ name = match.group(1)
+
+ # replace unwanted characters in filename
+ if self.getConf('filename_spaces'):
+ pattern = '[^0-9A-Za-z\.\ ]+'
+ else:
+ pattern = '[^0-9A-Za-z\.]+'
+
+ name = re.sub('[^0-9A-Za-z\.]+', self.getConf('replacement_char'),
+ name)
+ return name + '.avi'
+
+ def get_file_url(self):
+ """ returns the absolute downloadable filepath
+ """
+ if self.html is None:
+ self.download_html()
+
+ match = re.search(r'<embed type="video/divx" '
+ r'src="(http://([^/]*\.)?veehd\.com/dl/[^"]+)"',
+ self.html)
+ if not match:
+ self.fail("embedded video url not found")
+ file_url = match.group(1)
+
+ return file_url