summaryrefslogtreecommitdiffstats
path: root/pyload/network
diff options
context:
space:
mode:
authorGravatar ardi69 <armin@diedering.de> 2015-04-21 06:51:24 +0200
committerGravatar ardi69 <armin@diedering.de> 2015-04-21 06:51:24 +0200
commit2f8433b6a10505d29a1b63ea8bbd9b0bf3f7d9f6 (patch)
treeb82a8b5fc0a309f69733b0a004284f4ef45833d8 /pyload/network
parentadded check of classname == filename (diff)
parentMerge branch 'pr/n10_ardi69' into 0.4.10 (diff)
downloadpyload-2f8433b6a10505d29a1b63ea8bbd9b0bf3f7d9f6.tar.xz
Merge pull request #4 from vuolter/0.4.10
vuolter HEAD
Diffstat (limited to 'pyload/network')
-rw-r--r--pyload/network/Browser.py19
-rw-r--r--pyload/network/Bucket.py7
-rw-r--r--pyload/network/HTTPChunk.py8
-rw-r--r--pyload/network/HTTPDownload.py76
-rw-r--r--pyload/network/HTTPRequest.py20
-rw-r--r--pyload/network/JsEngine.py2
-rw-r--r--pyload/network/RequestFactory.py9
-rw-r--r--pyload/network/XDCCRequest.py11
8 files changed, 70 insertions, 82 deletions
diff --git a/pyload/network/Browser.py b/pyload/network/Browser.py
index d8617fabc..482c2320a 100644
--- a/pyload/network/Browser.py
+++ b/pyload/network/Browser.py
@@ -24,7 +24,8 @@ class Browser(object):
def renewHTTPRequest(self):
- if hasattr(self, "http"): self.http.close()
+ if hasattr(self, "http"):
+ self.http.close()
self.http = HTTPRequest(self.cj, self.options)
@@ -61,16 +62,12 @@ class Browser(object):
@property
def arrived(self):
- if self.dl:
- return self.dl.arrived
- return 0
+ return self.dl.arrived if self.dl else 0
@property
def percent(self):
- if not self.size:
- return 0
- return (self.arrived * 100) / self.size
+ return (self.arrived * 100) / self.size if self.size else 0
def clearCookies(self):
@@ -95,7 +92,7 @@ class Browser(object):
""" this can also download ftp """
self._size = 0
self.dl = HTTPDownload(url, filename, get, post, self.lastEffectiveURL if ref else None,
- self.cj if cookies else None, self.bucket, self.options, progressNotify, disposition)
+ self.cj if cookies else None, self.bucket, self.options, progressNotify, disposition)
name = self.dl.download(chunks, resume)
self._size = self.dl.size
@@ -124,7 +121,8 @@ class Browser(object):
def removeAuth(self):
- if "auth" in self.options: del self.options['auth']
+ if "auth" in self.options:
+ del self.options['auth']
self.renewHTTPRequest()
@@ -134,7 +132,8 @@ class Browser(object):
def deleteOption(self, name):
- if name in self.options: del self.options[name]
+ if name in self.options:
+ del self.options[name]
def clearHeaders(self):
diff --git a/pyload/network/Bucket.py b/pyload/network/Bucket.py
index 5f8260384..2f957fcad 100644
--- a/pyload/network/Bucket.py
+++ b/pyload/network/Bucket.py
@@ -17,7 +17,7 @@ class Bucket(object):
def __nonzero__(self):
- return False if self.rate < MIN_RATE else True
+ return self.rate >= MIN_RATE
def setRate(self, rate):
@@ -35,10 +35,7 @@ class Bucket(object):
self.calc_tokens()
self.tokens -= amount
- if self.tokens < 0:
- time = -self.tokens/float(self.rate)
- else:
- time = 0
+ time = -self.tokens / float(self.rate) if self.tokens < 0 else 0
self.lock.release()
return time
diff --git a/pyload/network/HTTPChunk.py b/pyload/network/HTTPChunk.py
index 784b64349..85c20d519 100644
--- a/pyload/network/HTTPChunk.py
+++ b/pyload/network/HTTPChunk.py
@@ -30,7 +30,6 @@ class ChunkInfo(object):
ret = "ChunkInfo: %s, %s\n" % (self.name, self.size)
for i, c in enumerate(self.chunks):
ret += "%s# %s\n" % (i, c[1])
-
return ret
@@ -51,7 +50,7 @@ class ChunkInfo(object):
chunk_size = self.size / chunks
current = 0
- for i in range(chunks):
+ for i in xrange(chunks):
end = self.size - 1 if (i == chunks - 1) else current + chunk_size
self.addChunk("%s.chunk%s" % (self.name, i), (current, end))
current += chunk_size + 1
@@ -222,7 +221,7 @@ class HTTPChunk(HTTPRequest):
def writeBody(self, buf):
- #ignore BOM, it confuses unrar
+ # ignore BOM, it confuses unrar
if not self.BOMChecked:
if [ord(b) for b in buf[:3]] == [239, 187, 191]:
buf = buf[3:]
@@ -310,7 +309,8 @@ class HTTPChunk(HTTPRequest):
""" closes everything, unusable after this """
if self.fp: self.fp.close()
self.c.close()
- if hasattr(self, "p"): del self.p
+ if hasattr(self, "p"):
+ del self.p
def charEnc(enc):
diff --git a/pyload/network/HTTPDownload.py b/pyload/network/HTTPDownload.py
index 13666195a..1e74d4476 100644
--- a/pyload/network/HTTPDownload.py
+++ b/pyload/network/HTTPDownload.py
@@ -1,14 +1,16 @@
# -*- coding: utf-8 -*-
# @author: RaNaN
+from __future__ import with_statement
+
+import pycurl
+
from os import remove, fsync
from os.path import dirname
from time import sleep, time
from shutil import move
from logging import getLogger
-import pycurl
-
from pyload.network.HTTPChunk import ChunkInfo, HTTPChunk
from pyload.network.HTTPRequest import BadHeader
@@ -22,11 +24,11 @@ class HTTPDownload(object):
def __init__(self, url, filename, get={}, post={}, referer=None, cj=None, bucket=None,
options={}, progress=None, disposition=False):
self.url = url
- self.filename = filename #complete file destination, not only name
+ self.filename = filename #: complete file destination, not only name
self.get = get
self.post = post
self.referer = referer
- self.cj = cj #cookiejar if cookies are needed
+ self.cj = cj #: cookiejar if cookies are needed
self.bucket = bucket
self.options = options
self.disposition = disposition
@@ -51,7 +53,7 @@ class HTTPDownload(object):
self.chunkSupport = True
self.m = pycurl.CurlMulti()
- #needed for speed calculation
+ # needed for speed calculation
self.lastArrived = []
self.speeds = []
self.lastSpeeds = [0, 0]
@@ -72,36 +74,30 @@ class HTTPDownload(object):
@property
def percent(self):
- if not self.size:
- return 0
- return (self.arrived * 100) / self.size
-
+ return (self.arrived * 100) / self.size if self.size else 0
def _copyChunks(self):
init = fs_encode(self.info.getChunkName(0)) #: initial chunk name
if self.info.getCount() > 1:
- fo = open(init, "rb+") #: first chunkfile
- for i in range(1, self.info.getCount()):
- #input file
- fo.seek(
- self.info.getChunkRange(i - 1)[1] + 1) #: seek to beginning of chunk, to get rid of overlapping chunks
- fname = fs_encode("%s.chunk%d" % (self.filename, i))
- fi = open(fname, "rb")
- buf = 32 * 1024
- while True: #: copy in chunks, consumes less memory
- data = fi.read(buf)
- if not data:
- break
- fo.write(data)
- fi.close()
- if fo.tell() < self.info.getChunkRange(i)[1]:
- fo.close()
- remove(init)
- self.info.remove() #: there are probably invalid chunks
- raise Exception("Downloaded content was smaller than expected. Try to reduce download connections.")
- remove(fname) #: remove chunk
- fo.close()
+ with open(init, "rb+") as fo: #: first chunkfile
+ for i in xrange(1, self.info.getCount()):
+ # input file
+ fo.seek(
+ self.info.getChunkRange(i - 1)[1] + 1) #: seek to beginning of chunk, to get rid of overlapping chunks
+ fname = fs_encode("%s.chunk%d" % (self.filename, i))
+ with open(fname, "rb") as fi:
+ buf = 32 * 1024
+ while True: #: copy in chunks, consumes less memory
+ data = fi.read(buf)
+ if not data:
+ break
+ fo.write(data)
+ if fo.tell() < self.info.getChunkRange(i)[1]:
+ remove(init)
+ self.info.remove() #: there are probably invalid chunks
+ raise Exception("Downloaded content was smaller than expected. Try to reduce download connections.")
+ remove(fname) #: remove chunk
if self.nameDisposition and self.disposition:
self.filename = fs_join(dirname(self.filename), self.nameDisposition)
@@ -119,13 +115,13 @@ class HTTPDownload(object):
try:
self._download(chunks, resume)
except pycurl.error, e:
- #code 33 - no resume
+ # code 33 - no resume
code = e.args[0]
if resume is True and code == 33:
# try again without resume
self.log.debug("Errno 33 -> Restart without resume")
- #remove old handles
+ # remove old handles
for chunk in self.chunks:
self.closeChunk(chunk)
@@ -155,14 +151,14 @@ class HTTPDownload(object):
lastFinishCheck = 0
lastTimeCheck = 0
- chunksDone = set() # list of curl handles that are finished
+ chunksDone = set() #: list of curl handles that are finished
chunksCreated = False
done = False
if self.info.getCount() is 0: #: This is a resume, if we were chunked originally assume still can
self.chunkSupport = False
while 1:
- #need to create chunks
+ # need to create chunks
if not chunksCreated and self.chunkSupport and self.size: #: will be setted later by first chunk
if not resume:
@@ -174,7 +170,7 @@ class HTTPDownload(object):
init.setRange(self.info.getChunkRange(0))
- for i in range(1, chunks):
+ for i in xrange(1, chunks):
c = HTTPChunk(i, self, self.info.getChunkRange(i), resume)
handle = c.getHandle()
@@ -182,7 +178,7 @@ class HTTPDownload(object):
self.chunks.append(c)
self.m.add_handle(handle)
else:
- #close immediatly
+ # close immediatly
self.log.debug("Invalid curl handle -> closed")
c.close()
@@ -216,7 +212,7 @@ class HTTPDownload(object):
for c in err_list:
curl, errno, msg = c
chunk = self.findChunk(curl)
- #test if chunk was finished
+ # test if chunk was finished
if errno != 23 or "0 !=" not in msg:
failed.append(chunk)
ex = pycurl.error(errno, msg)
@@ -238,14 +234,14 @@ class HTTPDownload(object):
if failed and init not in failed and init.c not in chunksDone:
self.log.error(_("Download chunks failed, fallback to single connection | %s" % (str(ex))))
- #list of chunks to clean and remove
+ # list of chunks to clean and remove
to_clean = filter(lambda x: x is not init, self.chunks)
for chunk in to_clean:
self.closeChunk(chunk)
self.chunks.remove(chunk)
remove(fs_encode(self.info.getChunkName(chunk.id)))
- #let first chunk load the rest and update the info file
+ # let first chunk load the rest and update the info file
init.resetRange()
self.info.clear()
self.info.addChunk("%s.chunk0" % self.filename, (0, self.size))
@@ -258,7 +254,7 @@ class HTTPDownload(object):
if len(chunksDone) >= len(self.chunks):
if len(chunksDone) > len(self.chunks):
self.log.warning("Finished download chunks size incorrect, please report bug.")
- done = True #all chunks loaded
+ done = True #: all chunks loaded
break
diff --git a/pyload/network/HTTPRequest.py b/pyload/network/HTTPRequest.py
index 62c0ef72b..92ce6ec4b 100644
--- a/pyload/network/HTTPRequest.py
+++ b/pyload/network/HTTPRequest.py
@@ -24,7 +24,7 @@ def myurlencode(data):
data = dict(data)
return urlencode(dict((encode(x), encode(y)) for x, y in data.iteritems()))
-bad_headers = range(400, 404) + range(405, 418) + range(500, 506)
+bad_headers = xrange(400, 404) + xrange(405, 418) + xrange(500, 506)
class BadHeader(Exception):
@@ -41,16 +41,16 @@ class HTTPRequest(object):
self.c = pycurl.Curl()
self.rep = StringIO()
- self.cj = cookies # cookiejar
+ self.cj = cookies #: cookiejar
self.lastURL = None
self.lastEffectiveURL = None
self.abort = False
- self.code = 0 # last http code
+ self.code = 0 #: last http code
self.header = ""
- self.headers = [] # temporary request header
+ self.headers = [] #: temporary request header
self.initHandle()
self.setInterface(options)
@@ -76,7 +76,7 @@ class HTTPRequest(object):
if hasattr(pycurl, "USE_SSL"):
self.c.setopt(pycurl.USE_SSL, pycurl.CURLUSESSL_TRY)
- #self.c.setopt(pycurl.VERBOSE, 1)
+ # self.c.setopt(pycurl.VERBOSE, 1)
self.c.setopt(pycurl.USERAGENT,
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0")
@@ -158,7 +158,7 @@ class HTTPRequest(object):
self.c.setopt(pycurl.POST, 1)
if not multipart:
if type(post) == unicode:
- post = str(post) # unicode not allowed
+ post = str(post) #: unicode not allowed
elif type(post) == str:
pass
else:
@@ -250,12 +250,12 @@ class HTTPRequest(object):
def decodeResponse(self, rep):
""" decode with correct encoding, relies on header """
header = self.header.splitlines()
- encoding = "utf8" # default encoding
+ encoding = "utf8" #: default encoding
for line in header:
line = line.lower().replace(" ", "")
- if not line.startswith("content-type:") or\
- ("text" not in line and "application" not in line):
+ if not line.startswith("content-type:") or \
+ ("text" not in line and "application" not in line):
continue
none, delemiter, charset = line.rpartition("charset=")
@@ -265,7 +265,7 @@ class HTTPRequest(object):
encoding = charset[0]
try:
- #self.log.debug("Decoded %s" % encoding )
+ # self.log.debug("Decoded %s" % encoding )
if lookup(encoding).name == 'utf-8' and rep.startswith(BOM_UTF8):
encoding = 'utf-8-sig'
diff --git a/pyload/network/JsEngine.py b/pyload/network/JsEngine.py
index c64e8c490..b59d07dc4 100644
--- a/pyload/network/JsEngine.py
+++ b/pyload/network/JsEngine.py
@@ -112,7 +112,7 @@ class AbstractEngine(object):
def __init__(self, force=False):
self.setup()
- self.available = True if force else self.find()
+ self.available = force or self.find()
def setup(self):
diff --git a/pyload/network/RequestFactory.py b/pyload/network/RequestFactory.py
index 0591c5162..5e2c15f4b 100644
--- a/pyload/network/RequestFactory.py
+++ b/pyload/network/RequestFactory.py
@@ -7,7 +7,6 @@ from pyload.network.Browser import Browser
from pyload.network.Bucket import Bucket
from pyload.network.HTTPRequest import HTTPRequest
from pyload.network.CookieJar import CookieJar
-
from pyload.network.XDCCRequest import XDCCRequest
@@ -88,8 +87,10 @@ class RequestFactory(object):
else:
type = "http"
setting = self.core.config.get("proxy", "type").lower()
- if setting == "socks4": type = "socks4"
- elif setting == "socks5": type = "socks5"
+ if setting == "socks4":
+ type = "socks4"
+ elif setting == "socks5":
+ type = "socks5"
username = None
if self.core.config.get("proxy", "username") and self.core.config.get("proxy", "username").lower() != "none":
@@ -105,7 +106,7 @@ class RequestFactory(object):
"port": self.core.config.get("proxy", "port"),
"username": username,
"password": pw,
- }
+ }
def getOptions(self):
diff --git a/pyload/network/XDCCRequest.py b/pyload/network/XDCCRequest.py
index dff500749..24146ccaa 100644
--- a/pyload/network/XDCCRequest.py
+++ b/pyload/network/XDCCRequest.py
@@ -2,15 +2,12 @@
# @author: jeix
import socket
-import re
+import struct
from os import remove
from os.path import exists
-
-from time import time
-
-import struct
from select import select
+from time import time
from pyload.plugin.Plugin import Abort
@@ -145,9 +142,7 @@ class XDCCRequest(object):
@property
def percent(self):
- if not self.filesize:
- return 0
- return (self.recv * 100) / self.filesize
+ return (self.recv * 100) / self.filesize if elf.filesize else 0
def close(self):