summaryrefslogtreecommitdiffstats
path: root/module
diff options
context:
space:
mode:
authorGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2010-12-19 14:49:19 +0100
committerGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2010-12-19 14:49:19 +0100
commit216b61197147e9d230dcc2e5faa8e63db740001e (patch)
tree24e5602ceced37b5eb1c9e71697a6097de41ecfa /module
parentnew experimental backend, new gui status (unfinished) (diff)
downloadpyload-216b61197147e9d230dcc2e5faa8e63db740001e.tar.xz
little changes
Diffstat (limited to 'module')
-rw-r--r--module/network/Browser.py2
-rw-r--r--module/network/FTPBase.py2
-rw-r--r--module/network/HTTPBase.py8
-rw-r--r--module/network/HTTPChunk.py2
-rw-r--r--module/network/HTTPDownload.py6
-rw-r--r--module/network/MultipartPostHandler.py20
-rw-r--r--module/plugins/container/CCF.py4
7 files changed, 22 insertions, 22 deletions
diff --git a/module/network/Browser.py b/module/network/Browser.py
index 65aefbae8..b749a8338 100644
--- a/module/network/Browser.py
+++ b/module/network/Browser.py
@@ -77,4 +77,4 @@ if __name__ == "__main__":
#browser.getPage("https://encrypted.google.com/")
#browser.getPage("http://google.com/search?q=bar")
- #browser.downloadFile("https://bitbucket.org/spoob/pyload/downloads/Logo_neu.png", "logo.png")
+ #browser.downloadFile("http://speedtest.netcologne.de/test_100mb.bin", "test_100mb.bin")
diff --git a/module/network/FTPBase.py b/module/network/FTPBase.py
index 9bcb9b45e..4d27585a1 100644
--- a/module/network/FTPBase.py
+++ b/module/network/FTPBase.py
@@ -142,7 +142,7 @@ class FTPDownload():
if self.abort:
self.deferred.error("abort")
elif self.size is None or self.size == self.arrived:
- self.deferred.callback(resp)
+ self.deferred.callback(resp) #@TODO resp = unresolved?
else:
self.deferred.error("wrong content lenght")
diff --git a/module/network/HTTPBase.py b/module/network/HTTPBase.py
index e8c7d07ad..fe654d4dd 100644
--- a/module/network/HTTPBase.py
+++ b/module/network/HTTPBase.py
@@ -18,7 +18,7 @@
"""
from urllib import urlencode
-from urlparse import urlparse
+#from urlparse import urlparse
from urllib2 import Request
from urllib2 import OpenerDirector
@@ -94,7 +94,6 @@ class PyLoadHTTPResponse(HTTPResponse):
return s
def readline(self, limit=-1):
- data = ""
i = self._rbuf.find('\n')
while i < 0 and not (0 < limit <= len(self._rbuf)):
new = self._raw_read(self._rbufsize)
@@ -103,7 +102,7 @@ class PyLoadHTTPResponse(HTTPResponse):
if i >= 0: i = i + len(self._rbuf)
self._rbuf = self._rbuf + new
if i < 0: i = len(self._rbuf)
- else: i = i+1
+ else: i += 1
if 0 <= limit < len(self._rbuf): i = limit
data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
return data
@@ -186,6 +185,7 @@ class PyLoadHTTPHandler(HTTPHandler):
del self._connections[host]
def _start_connection(self, h, req):
+ data = ""
try:
if req.has_data():
data = req.get_data()
@@ -198,7 +198,7 @@ class PyLoadHTTPHandler(HTTPHandler):
else:
h.putrequest('GET', req.get_selector(), skip_accept_encoding=1)
except socket.error, err:
- raise urllib2.URLError(err)
+ raise URLError(err)
for args in self.parent.addheaders:
h.putheader(*args)
diff --git a/module/network/HTTPChunk.py b/module/network/HTTPChunk.py
index 37c28f685..6ffc43078 100644
--- a/module/network/HTTPChunk.py
+++ b/module/network/HTTPChunk.py
@@ -157,7 +157,7 @@ if __name__ == "__main__":
#bucket.setRate(200*1000)
bucket = None
- url = "http://download.fedoraproject.org/pub/fedora/linux/releases/13/Live/x86_64/Fedora-13-x86_64-Live.iso"
+ url = "http://speedtest.netcologne.de/test_100mb.bin"
finished = 0
def err(*a, **b):
diff --git a/module/network/HTTPDownload.py b/module/network/HTTPDownload.py
index 78dc00d72..98941ff08 100644
--- a/module/network/HTTPDownload.py
+++ b/module/network/HTTPDownload.py
@@ -21,7 +21,7 @@ from HTTPChunk import HTTPChunk
from helper import *
from os.path import exists, getsize
from os import remove
-from shutil import move, copyfileobj
+#from shutil import move, copyfileobj
from cookielib import CookieJar
@@ -271,7 +271,7 @@ if __name__ == "__main__":
#bucket.setRate(200*1000)
bucket = None
- url = "http://mirror.sov.uk.goscomb.net/ubuntu-releases/maverick/ubuntu-10.10-desktop-i386.iso"
+ url = "http://speedtest.netcologne.de/test_100mb.bin"
finished = False
def err(*a, **b):
@@ -283,7 +283,7 @@ if __name__ == "__main__":
print "starting"
- dwnld = HTTPDownload(url, "ubuntu.iso")
+ dwnld = HTTPDownload(url, "test_100mb.bin")
d = dwnld.download(chunks=1, resume=True)
d.addCallback(callb)
d.addErrback(err)
diff --git a/module/network/MultipartPostHandler.py b/module/network/MultipartPostHandler.py
index 113fd7cf9..94aee0193 100644
--- a/module/network/MultipartPostHandler.py
+++ b/module/network/MultipartPostHandler.py
@@ -41,10 +41,10 @@ Further Example:
then uploads it to the W3C validator.
"""
-import urllib
-import urllib2
+from urllib import urlencode
+from urllib2 import BaseHandler, HTTPHandler, build_opener
import mimetools, mimetypes
-import os, stat
+from os import write, remove
from cStringIO import StringIO
class Callable:
@@ -55,8 +55,8 @@ class Callable:
# assigning a sequence.
doseq = 1
-class MultipartPostHandler(urllib2.BaseHandler):
- handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first
+class MultipartPostHandler(BaseHandler):
+ handler_order = HTTPHandler.handler_order - 10 # needs to run first
def http_request(self, request):
data = request.get_data()
@@ -74,7 +74,7 @@ class MultipartPostHandler(urllib2.BaseHandler):
raise TypeError, "not a valid non-string sequence or mapping object", traceback
if len(v_files) == 0:
- data = urllib.urlencode(v_vars, doseq)
+ data = urlencode(v_vars, doseq)
else:
boundary, data = self.multipart_encode(v_vars, v_files)
@@ -98,7 +98,7 @@ class MultipartPostHandler(urllib2.BaseHandler):
buf.write('Content-Disposition: form-data; name="%s"' % key)
buf.write('\r\n\r\n' + value + '\r\n')
for(key, fd) in files:
- file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
+ #file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
filename = fd.name.split('/')[-1]
contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
buf.write('--%s\r\n' % boundary)
@@ -118,16 +118,16 @@ def main():
import tempfile, sys
validatorURL = "http://validator.w3.org/check"
- opener = urllib2.build_opener(MultipartPostHandler)
+ opener = build_opener(MultipartPostHandler)
def validateFile(url):
temp = tempfile.mkstemp(suffix=".html")
- os.write(temp[0], opener.open(url).read())
+ write(temp[0], opener.open(url).read())
params = { "ss" : "0", # show source
"doctype" : "Inline",
"uploaded_file" : open(temp[1], "rb") }
print opener.open(validatorURL, params).read()
- os.remove(temp[1])
+ remove(temp[1])
if len(sys.argv[1:]) > 0:
for arg in sys.argv[1:]:
diff --git a/module/plugins/container/CCF.py b/module/plugins/container/CCF.py
index 8b35589f3..90502c001 100644
--- a/module/plugins/container/CCF.py
+++ b/module/plugins/container/CCF.py
@@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-
import re
-import urllib2
+from urllib2 import build_opener
from module.plugins.Container import Container
from module.network.MultipartPostHandler import MultipartPostHandler
@@ -22,7 +22,7 @@ class CCF(Container):
infile = pyfile.url.replace("\n", "")
- opener = urllib2.build_opener(MultipartPostHandler)
+ opener = build_opener(MultipartPostHandler)
params = {"src": "ccf",
"filename": "test.ccf",
"upload": open(infile, "rb")}