summaryrefslogtreecommitdiffstats
path: root/module
diff options
context:
space:
mode:
Diffstat (limited to 'module')
-rwxr-xr-xmodule/network/Request.py9
-rw-r--r--module/plugins/hoster/UploadedTo.py7
2 files changed, 10 insertions, 6 deletions
diff --git a/module/network/Request.py b/module/network/Request.py
index c76590afc..bc62a0cc9 100755
--- a/module/network/Request.py
+++ b/module/network/Request.py
@@ -334,14 +334,17 @@ class Request:
self.header += string
def write_rep(self, buf):
- if self.rep.tell() > 180000 or self.abort:
+ if self.rep.tell() > 200000 or self.abort:
+ rep = self.get_rep()
if self.abort: raise Abort
- print self.rep.getvalue()
+ f = open("response.dump", "wb")
+ f.write(rep)
+ f.close()
raise Exception("Loaded Url exceeded limit")
self.rep.write(buf)
- def get_rep(self):
+ def get_rep(self):
value = self.rep.getvalue()
self.rep.close()
self.rep = StringIO()
diff --git a/module/plugins/hoster/UploadedTo.py b/module/plugins/hoster/UploadedTo.py
index a798a145f..5d4798d4e 100644
--- a/module/plugins/hoster/UploadedTo.py
+++ b/module/plugins/hoster/UploadedTo.py
@@ -13,7 +13,7 @@ def getInfo(urls):
for url in chunk:
match = pattern.search(url)
if match:
- src = getURL("http://uploaded.to/api/file", get={"id": match.group(1).split("/")[0]})
+ src = getURL("http://uploaded.to/api/file", get={"id": match.group(1).split("/")[0]}).decode("utf8", "ignore")
if src.find("404 Not Found") >= 0:
result.append((url, 0, 1, url))
continue
@@ -108,17 +108,18 @@ class UploadedTo(Hoster):
return
match = re.compile(self.__pattern__).search(self.pyfile.url)
if match:
- src = self.load("http://uploaded.to/api/file", cookies=False, get={"id": match.group(1).split("/")[0]})
+ src = self.load("http://uploaded.to/api/file", cookies=False, get={"id": match.group(1).split("/")[0]}).decode("utf8", "ignore")
if not src.find("404 Not Found"):
return
self.api_data = {}
lines = src.splitlines()
+ self.log.debug("Uploaded API: %s" % lines)
self.api_data["filename"] = lines[0]
self.api_data["size"] = int(lines[1]) # in bytes
self.api_data["checksum"] = lines[2] #sha1
def download_html(self):
- self.html = self.load(self.pyfile.url, cookies=False)
+ self.html = self.load(self.pyfile.url, cookies=False).decode("utf8", "ignore")
def get_waiting_time(self):
try: