summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2011-03-11 23:41:40 +0100
committerGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2011-03-11 23:41:40 +0100
commitb692dc0ba8e8940844eb647a1f15e435a55ce4eb (patch)
tree1653344e5b37504894fe2c2e226f9d8d094bd3bb
parentfixed DepositFile free (diff)
downloadpyload-b692dc0ba8e8940844eb647a1f15e435a55ce4eb.tar.xz
closed #259, #250
-rw-r--r--module/gui/PackageDock.py18
-rw-r--r--module/network/Browser.py8
-rw-r--r--module/network/HTTPChunk.py7
-rw-r--r--module/network/HTTPDownload.py12
-rw-r--r--module/network/HTTPRequest.py13
-rw-r--r--module/plugins/PluginManager.py19
-rw-r--r--module/plugins/hoster/DepositfilesCom.py5
-rw-r--r--module/plugins/hoster/FileserveCom.py44
-rw-r--r--module/plugins/hoster/ShareonlineBiz.py14
-rw-r--r--module/web/media/default/js/funktions.js65
10 files changed, 131 insertions, 74 deletions
diff --git a/module/gui/PackageDock.py b/module/gui/PackageDock.py
index 077abb44c..73db8f177 100644
--- a/module/gui/PackageDock.py
+++ b/module/gui/PackageDock.py
@@ -15,6 +15,7 @@
@author: mkaay
"""
+import re
from PyQt4.QtCore import *
from PyQt4.QtGui import *
@@ -45,6 +46,16 @@ class NewPackageDock(QDockWidget):
self.widget.box.clear()
self.hide()
+ def parseUri(self):
+
+ text=str(self.widget.box.toPlainText())
+ self.widget.box.setText("")
+ result = re.findall(r"(?:ht|f)tps?:\/\/[a-zA-Z0-9\-\.\/\?=_&%#]+[<| |\"|\'|\r|\n|\t]{1}", text)
+ for url in result:
+ if "\n" or "\t" or "\r" or "\"" or "<" or "'" in url:
+ url = url[:-1]
+ self.widget.box.append("%s " % url)
+
class NewPackageWindow(QWidget):
def __init__(self, dock):
QWidget.__init__(self)
@@ -64,6 +75,7 @@ class NewPackageWindow(QWidget):
self.passwordInput = passwordInput
save = QPushButton(_("Create"))
+ parseUri = QPushButton(_("Filter URLs"))
layout.addWidget(nameLabel, 0, 0)
layout.addWidget(nameInput, 0, 1)
@@ -71,6 +83,8 @@ class NewPackageWindow(QWidget):
layout.addWidget(passwordInput, 1, 1)
layout.addWidget(linksLabel, 2, 0, 1, 2)
layout.addWidget(self.box, 3, 0, 1, 2)
- layout.addWidget(save, 4, 0, 1, 2)
-
+ layout.addWidget(parseUri, 4, 0, 1, 2)
+ layout.addWidget(save, 5, 0, 1, 2)
+
self.connect(save, SIGNAL("clicked()"), self.dock.slotDone)
+ self.connect(parseUri, SIGNAL("clicked()"), self.dock.parseUri) \ No newline at end of file
diff --git a/module/network/Browser.py b/module/network/Browser.py
index 6cc907491..adb2cb5d9 100644
--- a/module/network/Browser.py
+++ b/module/network/Browser.py
@@ -95,6 +95,14 @@ class Browser(object):
""" retrieves page """
return self.http.load(url, get, post, ref, cookies, just_header)
+
+ def putHeader(self, name, value):
+ """ add a header to the request """
+ self.http.putHeader(name, value)
+
+ def clearHeaders(self):
+ self.http.clearHeaders()
+
def close(self):
""" cleanup """
if hasattr(self, "http"):
diff --git a/module/network/HTTPChunk.py b/module/network/HTTPChunk.py
index 9ca1be909..2fc48a588 100644
--- a/module/network/HTTPChunk.py
+++ b/module/network/HTTPChunk.py
@@ -36,6 +36,13 @@ class ChunkInfo():
self.resume = False
self.chunks = []
+ def __repr__(self):
+ ret = "ChunkInfo: %s, %s\n" % (self.name, self.size)
+ for i, c in enumerate(self.chunks):
+ ret += "%s# %s\n" % (i, c[1])
+
+ return ret
+
def setSize(self, size):
self.size = int(size)
diff --git a/module/network/HTTPDownload.py b/module/network/HTTPDownload.py
index 4c9d0705d..50b33cd97 100644
--- a/module/network/HTTPDownload.py
+++ b/module/network/HTTPDownload.py
@@ -48,8 +48,6 @@ class HTTPDownload():
self.chunks = []
- self.infoSaved = False # needed for 1 chunk resume
-
try:
self.info = ChunkInfo.load(filename)
self.info.resume = True #resume is only possible with valid info file
@@ -123,6 +121,7 @@ class HTTPDownload():
def _download(self, chunks, resume):
if not resume:
+ self.info.clear()
self.info.addChunk("%s.chunk0" % self.filename, (0, 0)) #create an initial entry
init = HTTPChunk(0, self, None, resume) #initial chunk that will load complete file (if needed)
@@ -134,15 +133,8 @@ class HTTPDownload():
chunksCreated = False
while 1:
- if (chunks == 1) and self.chunkSupport and self.size and not self.infoSaved:
- # if chunk size is one, save info file here to achieve resume support
- self.info.setSize(self.size)
- self.info.createChunks(1)
- self.info.save()
- self.infoSaved = True
-
#need to create chunks
- if not chunksCreated and self.chunkSupport and self.size: #will be set later by first chunk
+ if not chunksCreated and self.chunkSupport and self.size: #will be setted later by first chunk
if not resume:
self.info.setSize(self.size)
diff --git a/module/network/HTTPRequest.py b/module/network/HTTPRequest.py
index 42b7aaf51..cd3635bcf 100644
--- a/module/network/HTTPRequest.py
+++ b/module/network/HTTPRequest.py
@@ -47,6 +47,8 @@ class HTTPRequest():
self.header = ""
+ self.headers = [] #temporary request header
+
self.initHandle()
self.setInterface(interface, proxies)
@@ -150,7 +152,8 @@ class HTTPRequest():
self.header = ""
- #@TODO raw_cookies and some things in old backend, which are apperently not needed
+ if self.headers:
+ self.c.setopt(pycurl.HTTPHEADER, self.headers)
if just_header:
self.c.setopt(pycurl.NOBODY, 1)
@@ -165,6 +168,8 @@ class HTTPRequest():
self.lastEffectiveURL = self.c.getinfo(pycurl.EFFECTIVE_URL)
self.addCookies()
+ self.headers = []
+
return rep
def verifyHeader(self):
@@ -198,6 +203,12 @@ class HTTPRequest():
""" writes header """
self.header += buf
+ def putHeader(self, name, value):
+ self.headers.append("%s: %s" % (name, value))
+
+ def clearHeaders(self):
+ self.headers = []
+
def close(self):
""" cleanup, unusable after this """
self.rep.close()
diff --git a/module/plugins/PluginManager.py b/module/plugins/PluginManager.py
index a911cdd1e..0848d520e 100644
--- a/module/plugins/PluginManager.py
+++ b/module/plugins/PluginManager.py
@@ -20,16 +20,11 @@
import re
import sys
-from os import listdir
-from os import makedirs
-
-from os.path import isfile
-from os.path import join
-from os.path import exists
-from os.path import abspath
-
+from os import listdir, makedirs
+from os.path import isfile, join, exists, abspath
from sys import version_info
from itertools import chain
+from traceback import print_exc
try:
from ast import literal_eval
@@ -292,13 +287,15 @@ class PluginManager():
try:
module = __import__(value["path"], globals(), locals(), [value["name"]] , -1)
+ pluginClass = getattr(module, name)
except Exception, e:
self.log.error(_("Error importing %(name)s: %(msg)s") % {"name": name, "msg": str(e) })
self.log.error(_("You should fix dependicies or deactivate load on startup."))
+ if self.core.debug:
+ print_exc()
+
continue
-
- pluginClass = getattr(module, name)
-
+
value["class"] = pluginClass
classes.append(pluginClass)
diff --git a/module/plugins/hoster/DepositfilesCom.py b/module/plugins/hoster/DepositfilesCom.py
index 3c1124709..b2cab30de 100644
--- a/module/plugins/hoster/DepositfilesCom.py
+++ b/module/plugins/hoster/DepositfilesCom.py
@@ -48,8 +48,9 @@ class DepositfilesCom(Hoster):
wait_time = int(wait.group(1))
self.log.info( "%s: Traffic used up. Waiting %d seconds." % (self.__name__, wait_time) )
self.setWait(wait_time)
- if wait_time > 300:
- self.wantReconnect = True
+ self.wantReconnect = True
+ self.wait()
+ self.retry()
wait = re.search(r'>Try in (\d+) minutes or use GOLD account', self.html)
if wait:
diff --git a/module/plugins/hoster/FileserveCom.py b/module/plugins/hoster/FileserveCom.py
index 2e1bb1a59..9e14bfe7d 100644
--- a/module/plugins/hoster/FileserveCom.py
+++ b/module/plugins/hoster/FileserveCom.py
@@ -38,7 +38,7 @@ def getInfo(urls):
class FileserveCom(Hoster):
__name__ = "FileserveCom"
__type__ = "hoster"
- __pattern__ = r"http://(www\.)?fileserve\.com/file/.*?(/.*)?"
+ __pattern__ = r"http://(www\.)?fileserve\.com/file/[a-zA-Z0-9]+"
__version__ = "0.3"
__description__ = """Fileserve.Com File Download Hoster"""
__author_name__ = ("jeix", "mkaay")
@@ -54,6 +54,8 @@ class FileserveCom(Hoster):
else:
self.multiDL = False
+ self.file_id = re.search(r"fileserve\.com/file/([a-zA-Z0-9]+)(http:.*)?", self.pyfile.url).group(1)
+
def process(self, pyfile):
self.html = self.load(self.pyfile.url, ref=False, cookies=False if self.account else True, utf8=True)
@@ -76,7 +78,26 @@ class FileserveCom(Hoster):
self.download(self.pyfile.url, post={"download":"premium"}, cookies=True)
def handleFree(self):
-
+
+ self.html = self.load(self.pyfile.url)
+ jsPage = re.search(r"\"(/landing/.*?/download_captcha\.js)\"", self.html)
+ self.req.putHeader("X-Requested-With", "XMLHttpRequest")
+
+ jsPage = self.load("http://fileserve.com" + jsPage.group(1))
+ action = self.load(self.pyfile.url, post={"checkDownload" : "check"})
+
+ if "timeLimit" in action:
+ html = self.load(self.pyfile.url, post={"checkDownload" : "showError", "errorType" : "timeLimit"})
+ wait = re.search(r"You need to wait (\d+) seconds to start another download", html)
+ if wait:
+ wait = int(wait.group(1))
+ else:
+ wait = 720
+
+ self.setWait(wait, True)
+ self.wait()
+ self.retry()
+
if r'<div id="captchaArea" style="display:none;">' in self.html or \
r'/showCaptcha\(\);' in self.html:
# we got a captcha
@@ -84,18 +105,25 @@ class FileserveCom(Hoster):
recaptcha = ReCaptcha(self)
challenge, code = recaptcha.challenge(id)
- shortencode = re.search(r'name="recaptcha_shortencode_field" value="(.*?)"', self.html).group(1)
-
self.html = self.load(r'http://www.fileserve.com/checkReCaptcha.php', post={'recaptcha_challenge_field':challenge,
- 'recaptcha_response_field':code, 'recaptcha_shortencode_field': shortencode})
+ 'recaptcha_response_field':code, 'recaptcha_shortencode_field': self.file_id})
if r'incorrect-captcha-sol' in self.html:
+ self.invalidCaptcha()
self.retry()
wait = self.load(self.pyfile.url, post={"downloadLink":"wait"})
- wait = wait.decode("UTF-8").encode("ascii", "ignore") # Remove unicode stuff
- self.setWait(int(wait)+3)
- self.wait()
+ wait = re.search(r".*?(\d+).*?", wait)
+ if wait:
+ wait = wait.group(1)
+ if wait == "404":
+ self.log.debug("No wait time returned")
+ self.setWait(30)
+ else:
+ self.setWait(int(wait))
+
+ self.wait()
+
# show download link
self.load(self.pyfile.url, post={"downloadLink":"show"})
diff --git a/module/plugins/hoster/ShareonlineBiz.py b/module/plugins/hoster/ShareonlineBiz.py
index 419d677f6..6dd8933c0 100644
--- a/module/plugins/hoster/ShareonlineBiz.py
+++ b/module/plugins/hoster/ShareonlineBiz.py
@@ -37,7 +37,7 @@ def getInfo(urls):
class ShareonlineBiz(Hoster):
__name__ = "ShareonlineBiz"
__type__ = "hoster"
- __pattern__ = r"(?:http://)?(?:www.)?share-online.biz/(download.php\?id=|dl/)"
+ __pattern__ = r"http://[\w\.]*?(share\-online\.biz|egoshare\.com)/(download.php\?id\=|dl/)[\w]+"
__version__ = "0.2"
__description__ = """Shareonline.biz Download Hoster"""
__author_name__ = ("spoob", "mkaay")
@@ -47,16 +47,15 @@ class ShareonlineBiz(Hoster):
# range request not working?
# api supports resume, only one chunk
# website isn't supporting resuming in first place
+ self.file_id = re.search(r"(id\=|/dl/)([a-zA-Z0-9]+)", self.pyfile.url).group(2)
+ self.pyfile.url = "http://www.share-online.biz/dl/" + self.file_id
+
self.multiDL = False
self.chunkLimit = 1
if self.account and self.account.isPremium(self.user):
self.multiDL = True
def process(self, pyfile):
- self.pyfile.url = self.pyfile.url.replace("http://www.share-online.biz/download.php?id=", "http://www.share-online.biz/dl/")
- self.pyfile.url = self.pyfile.url.replace("http://share-online.biz/download.php?id=", "http://www.share-online.biz/dl/")
- self.pyfile.url = self.pyfile.url.replace("http://share-online.biz/dl/", "http://www.share-online.biz/dl/")
-
self.downloadAPIData()
pyfile.name = self.api_data["filename"]
pyfile.sync()
@@ -86,7 +85,7 @@ class ShareonlineBiz(Hoster):
self.resumeDownload = False
self.html = self.load(self.pyfile.url) #refer, stuff
- self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free":"1"})
+ self.html = self.load("%s/free/" % self.pyfile.url, post={"dl_free":"1", "choice": "free"})
if re.search(r"/failure/full/1", self.req.lastEffectiveURL):
self.setWait(120)
self.log.info("%s: no free slots, waiting 120 seconds" % (self.__name__))
@@ -122,9 +121,8 @@ class ShareonlineBiz(Hoster):
self.fail("DL API error")
self.req.cj.setCookie("share-online.biz", "dl", info["dl"])
- lid = self.pyfile.url.replace("http://www.share-online.biz/dl/", "") #cut of everything but the id
- src = self.load("http://api.share-online.biz/account.php?username=%s&password=%s&act=download&lid=%s" % (self.user, self.account.accounts[self.user]["password"], lid), post={})
+ src = self.load("http://api.share-online.biz/account.php?username=%s&password=%s&act=download&lid=%s" % (self.user, self.account.accounts[self.user]["password"], self.file_id), post={})
dlinfo = {}
for line in src.splitlines():
key, value = line.split(": ")
diff --git a/module/web/media/default/js/funktions.js b/module/web/media/default/js/funktions.js
index 6240d32ad..8d9d332b3 100644
--- a/module/web/media/default/js/funktions.js
+++ b/module/web/media/default/js/funktions.js
@@ -1,41 +1,42 @@
// JavaScript Document
-function HumanFileSize(size)
-{
- var filesizename = new Array("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB");
- var loga = Math.log(size)/Math.log(1024);
- var i = Math.floor(loga);
- var a = Math.pow(1024, i);
- return (size == 0) ? "0 B" : (Math.round( size / a , 2) + " " + filesizename[i]);
+function HumanFileSize(size) {
+ var filesizename = new Array("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB");
+ var loga = Math.log(size) / Math.log(1024);
+ var i = Math.floor(loga);
+ var a = Math.pow(1024, i);
+ return (size == 0) ? "0 B" : (Math.round(size / a, 2) + " " + filesizename[i]);
}
function parseUri() {
- var oldString = $("add_links").value;
- var regxp= new RegExp('(ht|f)tp(s?):\/\/[a-zA-Z0-9\-\.\/\?=]+[<| |\"|\'|\r|\n|\t]{1}', 'g');
- var resu=oldString.match(regxp);
- if (resu == null) return;
- var res="";
- for(var i=0; i < resu.length; i++){
- // remove the last char, if ...
- if (resu[i].indexOf(" ") != -1){
- res = res + resu[i].replace(" ","\n");
- } else if (resu[i].indexOf("\t") != -1){
- res = res + resu[i].replace("\t","\n");
- } else if (resu[i].indexOf("\r") != -1){
- res = res + resu[i].replace("\r","\n");
- } else if (resu[i].indexOf("\"") != -1){
- res = res + resu[i].replace("\"","\n");
- } else if (resu[i].indexOf("<") != -1){
- res = res + resu[i].replace("<","\n");
- } else {
- res = res + "\n" + resu[i].replace("'","\n");
- }
+ var oldString = $("add_links").value;
+ var regxp = new RegExp('(ht|f)tp(s?):\/\/[a-zA-Z0-9\-\.\/\?=_&%#]+[<| |\"|\'|\r|\n|\t]{1}', 'g');
+ var resu = oldString.match(regxp);
+ if (resu == null) return;
+ var res = "";
+ for (var i = 0; i < resu.length; i++) {
+ // remove the last char, if ...
+ if (resu[i].indexOf(" ") != -1) {
+ res = res + resu[i].replace(" ", " \n");
+ } else if (resu[i].indexOf("\t") != -1) {
+ res = res + resu[i].replace("\t", " \n");
+ } else if (resu[i].indexOf("\r") != -1) {
+ res = res + resu[i].replace("\r", " \n");
+ } else if (resu[i].indexOf("\"") != -1) {
+ res = res + resu[i].replace("\"", " \n");
+ } else if (resu[i].indexOf("<") != -1) {
+ res = res + resu[i].replace("<", " \n");
+ } else if (resu[i].indexOf("'") != -1) {
+ res = res + resu[i].replace("'", " \n");
+ } else {
+ res = res + resu[i].replace("\n", " \n");
}
- $("add_links").value = res;
+ }
+ $("add_links").value = res;
}
Array.prototype.remove = function(from, to) {
- var rest = this.slice((to || from) + 1 || this.length);
- this.length = from < 0 ? this.length + from : from;
- if (this.length == 0) return [];
- return this.push.apply(this, rest);
+ var rest = this.slice((to || from) + 1 || this.length);
+ this.length = from < 0 ? this.length + from : from;
+ if (this.length == 0) return [];
+ return this.push.apply(this, rest);
}; \ No newline at end of file