summaryrefslogtreecommitdiffstats
path: root/pyload/network/HTTPRequest.py
diff options
context:
space:
mode:
Diffstat (limited to 'pyload/network/HTTPRequest.py')
-rw-r--r--pyload/network/HTTPRequest.py18
1 files changed, 18 insertions, 0 deletions
diff --git a/pyload/network/HTTPRequest.py b/pyload/network/HTTPRequest.py
index fe7e26c48..3e5903df3 100644
--- a/pyload/network/HTTPRequest.py
+++ b/pyload/network/HTTPRequest.py
@@ -28,6 +28,7 @@ bad_headers = range(400, 404) + range(405, 418) + range(500, 506)
class BadHeader(Exception):
+
def __init__(self, code, content=""):
Exception.__init__(self, "Bad server response: %s %s" % (code, responses[int(code)]))
self.code = code
@@ -35,6 +36,7 @@ class BadHeader(Exception):
class HTTPRequest(object):
+
def __init__(self, cookies=None, options=None):
self.c = pycurl.Curl()
self.rep = StringIO()
@@ -58,6 +60,7 @@ class HTTPRequest(object):
self.log = getLogger("log")
+
def initHandle(self):
""" sets common options to curl handle """
self.c.setopt(pycurl.FOLLOWLOCATION, 1)
@@ -87,6 +90,7 @@ class HTTPRequest(object):
"Keep-Alive: 300",
"Expect:"])
+
def setInterface(self, options):
interface, proxy, ipv6 = options["interface"], options["proxies"], options["ipv6"]
@@ -119,11 +123,13 @@ class HTTPRequest(object):
if "timeout" in options:
self.c.setopt(pycurl.LOW_SPEED_TIME, options["timeout"])
+
def addCookies(self):
""" put cookies from curl handle to cj """
if self.cj:
self.cj.addCookies(self.c.getinfo(pycurl.INFO_COOKIELIST))
+
def getCookies(self):
""" add cookies from cj to curl handle """
if self.cj:
@@ -131,9 +137,11 @@ class HTTPRequest(object):
self.c.setopt(pycurl.COOKIELIST, c)
return
+
def clearCookies(self):
self.c.setopt(pycurl.COOKIELIST, "")
+
def setRequestContext(self, url, get, post, referer, cookies, multipart=False):
""" sets everything needed for the request """
@@ -171,6 +179,7 @@ class HTTPRequest(object):
self.c.setopt(pycurl.COOKIEJAR, "")
self.getCookies()
+
def load(self, url, get={}, post={}, referer=True, cookies=True, just_header=False, multipart=False, decode=False, follow_location=True, save_cookies=True):
""" load and returns a given page """
@@ -212,6 +221,7 @@ class HTTPRequest(object):
return rep
+
def verifyHeader(self):
""" raise an exceptions on bad headers """
code = int(self.c.getinfo(pycurl.RESPONSE_CODE))
@@ -220,10 +230,12 @@ class HTTPRequest(object):
raise BadHeader(code, self.getResponse())
return code
+
def checkHeader(self):
""" check if header indicates failure"""
return int(self.c.getinfo(pycurl.RESPONSE_CODE)) not in bad_headers
+
def getResponse(self):
""" retrieve response from string io """
if self.rep is None:
@@ -234,6 +246,7 @@ class HTTPRequest(object):
self.rep = StringIO()
return value
+
def decodeResponse(self, rep):
""" decode with correct encoding, relies on header """
header = self.header.splitlines()
@@ -269,6 +282,7 @@ class HTTPRequest(object):
return rep
+
def write(self, buf):
""" writes response """
if self.rep.tell() > 1000000 or self.abort:
@@ -283,16 +297,20 @@ class HTTPRequest(object):
else:
self.rep.write(buf)
+
def writeHeader(self, buf):
""" writes header """
self.header += buf
+
def putHeader(self, name, value):
self.headers.append("%s: %s" % (name, value))
+
def clearHeaders(self):
self.headers = []
+
def close(self):
""" cleanup, unusable after this """
self.rep.close()