summaryrefslogtreecommitdiffstats
path: root/module/network
diff options
context:
space:
mode:
authorGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2010-08-25 18:22:27 +0200
committerGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2010-08-25 18:22:27 +0200
commit29f9dc8fb3396b03d732ebcbeb1cc8f00fe13897 (patch)
treef2a910cbea747a7b0c0a50d6c66691e54f5ef47f /module/network
parentmerged gui (diff)
downloadpyload-29f9dc8fb3396b03d732ebcbeb1cc8f00fe13897.tar.xz
new dirs
Diffstat (limited to 'module/network')
-rw-r--r--module/network/FtpRequest.py259
-rw-r--r--module/network/MultipartPostHandler.py139
-rwxr-xr-xmodule/network/Request.py400
-rw-r--r--module/network/XdccRequest.py338
-rw-r--r--module/network/__init__.py1
5 files changed, 1137 insertions, 0 deletions
diff --git a/module/network/FtpRequest.py b/module/network/FtpRequest.py
new file mode 100644
index 000000000..eecb40c9f
--- /dev/null
+++ b/module/network/FtpRequest.py
@@ -0,0 +1,259 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License,
+ or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, see <http://www.gnu.org/licenses/>.
+
+ @author: spoob
+ @author: RaNaN
+ @author: mkaay
+ @author: jeix
+ @version: v0.4.0
+"""
+
+import time
+from os import rename
+from os.path import exists
+from cStringIO import StringIO
+import pycurl
+
+class AbortDownload(Exception):
+ pass
+
+class FtpRequest:
+ def __init__(self, interface=None):
+
+ self.dl_time = 0
+ self.dl_finished = 0
+ self.dl_size = 0
+ self.dl_arrived = 0
+ self.dl = False
+
+ self.abort = False
+
+ self.timeout = 5
+ self.auth = False
+
+ bufferBase = 1024
+ bufferMulti = 4
+ self.bufferSize = bufferBase*bufferMulti
+ self.canContinue = False
+ self.offset = 0
+
+ self.dl_speed = 0.0
+ self.averageSpeed = 0.0
+ self.averageSpeeds = []
+ self.averageSpeedTime = 0.0
+ self.averageSpeedCount = 0.0
+
+ self.speedLimitActive = False
+ self.maxSpeed = 0
+ self.isSlow = False
+ self.interface = interface
+
+ # change this for connection information
+ self.debug = False
+
+ self.init_curl()
+
+ def set_timeout(self, timeout):
+ self.timeout = int(timeout)
+
+ def init_curl(self):
+ self.rep = StringIO()
+ self.header = ""
+
+ self.pycurl = pycurl.Curl()
+ self.pycurl.setopt(pycurl.FOLLOWLOCATION, 1)
+ self.pycurl.setopt(pycurl.MAXREDIRS, 5)
+ self.pycurl.setopt(pycurl.TIMEOUT, (self.timeout*3600))
+ self.pycurl.setopt(pycurl.CONNECTTIMEOUT, 30)
+ self.pycurl.setopt(pycurl.NOSIGNAL, 1)
+ self.pycurl.setopt(pycurl.NOPROGRESS, 0)
+ self.pycurl.setopt(pycurl.PROGRESSFUNCTION, self.progress)
+ self.pycurl.setopt(pycurl.AUTOREFERER, 1)
+ self.pycurl.setopt(pycurl.BUFFERSIZE, self.bufferSize)
+ self.pycurl.setopt(pycurl.SSL_VERIFYPEER, 0)
+ if self.debug:
+ self.pycurl.setopt(pycurl.VERBOSE, 1)
+ if self.interface:
+ self.pycurl.setopt(pycurl.INTERFACE, self.interface)
+
+
+ def add_auth(self, user, pw):
+ self.auth = True
+ self.pycurl.setopt(pycurl.USERNAME, user)
+ self.pycurl.setopt(pycurl.PASSWORD, pw)
+
+ def add_proxy(self, protocol, adress):
+ # @TODO: pycurl proxy protocoll selection
+ self.pycurl.setopt(pycurl.PROXY, adress.split(":")[0])
+ self.pycurl.setopt(pycurl.PROXYPORT, adress.split(":")[1])
+
+ def download(self, url, file_name):
+ file_temp = self.get_free_name(file_name) + ".part"
+ self.fp = open(file_temp, 'wb')
+
+ self.init_curl()
+ self.pycurl.setopt(pycurl.URL, url)
+
+ self.dl_arrived = self.offset
+
+ if self.auth:
+ self.add_auth(self.user, self.pw)
+
+ self.dl_time = time.time()
+ self.dl = True
+
+ self.chunkSize = 0
+ self.chunkRead = 0
+ self.subStartTime = 0
+ self.maxChunkSize = 0
+
+ def restLimit():
+ subTime = time.time() - self.subStartTime
+ if subTime <= 1:
+ if self.speedLimitActive:
+ return self.maxChunkSize
+ else:
+ return -1
+ else:
+ self.updateCurrentSpeed(float(self.chunkRead/1024) / subTime)
+
+ self.subStartTime = time.time()
+ self.chunkRead = 0
+ if self.maxSpeed > 0:
+ self.maxChunkSize = self.maxSpeed
+ else:
+ self.maxChunkSize = 0
+ return 0
+
+ def writefunc(buf):
+ if self.abort:
+ return False
+ chunkSize = len(buf)
+ while chunkSize > restLimit() > -1:
+ time.sleep(0.05)
+ self.maxChunkSize -= chunkSize
+ self.fp.write(buf)
+ self.chunkRead += chunkSize
+ self.dl_arrived += chunkSize
+
+ self.pycurl.setopt(pycurl.WRITEFUNCTION, writefunc)
+
+ try:
+ self.pycurl.perform()
+ except Exception, e:
+ code, msg = e
+ if not code == 23:
+ raise Exception, e
+
+ self.fp.close()
+
+ if self.abort:
+ raise AbortDownload
+
+ free_name = self.get_free_name(file_name)
+ rename(file_temp, free_name)
+
+ self.dl = False
+ self.dl_finished = time.time()
+
+ return free_name
+
+ def updateCurrentSpeed(self, speed):
+ self.dl_speed = speed
+ if self.averageSpeedTime + 10 < time.time():
+ self.averageSpeeds = []
+ self.averageSpeeds.append(self.averageSpeed)
+ self.averageSpeeds.append(speed)
+ self.averageSpeed = (speed + self.averageSpeed)/2
+ self.averageSpeedTime = time.time()
+ self.averageSpeedCount = 2
+ else:
+ self.averageSpeeds.append(speed)
+ self.averageSpeedCount += 1
+ allspeed = 0.0
+ for s in self.averageSpeeds:
+ allspeed += s
+ self.averageSpeed = allspeed / self.averageSpeedCount
+
+ def write_header(self, string):
+ self.header += string
+
+ def get_rep(self):
+ value = self.rep.getvalue()
+ self.rep.close()
+ self.rep = StringIO()
+ return value
+
+ def get_header(self):
+ h = self.header
+ self.header = ""
+ return h
+
+ def get_speed(self):
+ try:
+ return self.dl_speed
+ except:
+ return 0
+
+ def get_ETA(self):
+ try:
+ return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time))
+ except:
+ return 0
+
+ def kB_left(self):
+ return (self.dl_size - self.dl_arrived) / 1024
+
+ def progress(self, dl_t, dl_d, up_t, up_d):
+ if self.abort:
+ return False
+ self.dl_arrived = int(dl_d)
+ self.dl_size = int(dl_t)
+
+ def get_free_name(self, file_name):
+ file_count = 0
+ while exists(file_name):
+ file_count += 1
+ if "." in file_name:
+ file_split = file_name.split(".")
+ temp_name = "%s-%i.%s" % (".".join(file_split[:-1]), file_count, file_split[-1])
+ else:
+ temp_name = "%s-%i" % (file_name, file_count)
+ if not exists(temp_name):
+ file_name = temp_name
+ return file_name
+
+ def __del__(self):
+ self.clean()
+
+ def clean(self):
+ try:
+ self.pycurl.close()
+ except:
+ pass
+
+# def getURL(url):
+ # """
+ # currently used for update check
+ # """
+ # req = Request()
+ # c = req.load(url)
+ # req.pycurl.close()
+ # return c
+
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
diff --git a/module/network/MultipartPostHandler.py b/module/network/MultipartPostHandler.py
new file mode 100644
index 000000000..6804bcc90
--- /dev/null
+++ b/module/network/MultipartPostHandler.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+####
+# 02/2006 Will Holcomb <wholcomb@gmail.com>
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# 7/26/07 Slightly modified by Brian Schneider
+# in order to support unicode files ( multipart_encode function )
+"""
+Usage:
+ Enables the use of multipart/form-data for posting forms
+
+Inspirations:
+ Upload files in python:
+ http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
+ urllib2_file:
+ Fabien Seisen: <fabien@seisen.org>
+
+Example:
+ import MultipartPostHandler, urllib2, cookielib
+
+ cookies = cookielib.CookieJar()
+ opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies),
+ MultipartPostHandler.MultipartPostHandler)
+ params = { "username" : "bob", "password" : "riviera",
+ "file" : open("filename", "rb") }
+ opener.open("http://wwww.bobsite.com/upload/", params)
+
+Further Example:
+ The main function of this file is a sample which downloads a page and
+ then uploads it to the W3C validator.
+"""
+
+import urllib
+import urllib2
+import mimetools, mimetypes
+import os, stat
+from cStringIO import StringIO
+
+class Callable:
+ def __init__(self, anycallable):
+ self.__call__ = anycallable
+
+# Controls how sequences are uncoded. If true, elements may be given multiple values by
+# assigning a sequence.
+doseq = 1
+
+class MultipartPostHandler(urllib2.BaseHandler):
+ handler_order = urllib2.HTTPHandler.handler_order - 10 # needs to run first
+
+ def http_request(self, request):
+ data = request.get_data()
+ if data is not None and type(data) != str:
+ v_files = []
+ v_vars = []
+ try:
+ for(key, value) in data.items():
+ if type(value) == file:
+ v_files.append((key, value))
+ else:
+ v_vars.append((key, value))
+ except TypeError:
+ systype, value, traceback = sys.exc_info()
+ raise TypeError, "not a valid non-string sequence or mapping object", traceback
+
+ if len(v_files) == 0:
+ data = urllib.urlencode(v_vars, doseq)
+ else:
+ boundary, data = self.multipart_encode(v_vars, v_files)
+
+ contenttype = 'multipart/form-data; boundary=%s' % boundary
+ if(request.has_header('Content-Type')
+ and request.get_header('Content-Type').find('multipart/form-data') != 0):
+ print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data')
+ request.add_unredirected_header('Content-Type', contenttype)
+
+ request.add_data(data)
+
+ return request
+
+ def multipart_encode(vars, files, boundary = None, buf = None):
+ if boundary is None:
+ boundary = mimetools.choose_boundary()
+ if buf is None:
+ buf = StringIO()
+ for(key, value) in vars:
+ buf.write('--%s\r\n' % boundary)
+ buf.write('Content-Disposition: form-data; name="%s"' % key)
+ buf.write('\r\n\r\n' + value + '\r\n')
+ for(key, fd) in files:
+ file_size = os.fstat(fd.fileno())[stat.ST_SIZE]
+ filename = fd.name.split('/')[-1]
+ contenttype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+ buf.write('--%s\r\n' % boundary)
+ buf.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, filename))
+ buf.write('Content-Type: %s\r\n' % contenttype)
+ # buffer += 'Content-Length: %s\r\n' % file_size
+ fd.seek(0)
+ buf.write('\r\n' + fd.read() + '\r\n')
+ buf.write('--' + boundary + '--\r\n\r\n')
+ buf = buf.getvalue()
+ return boundary, buf
+ multipart_encode = Callable(multipart_encode)
+
+ https_request = http_request
+
+def main():
+ import tempfile, sys
+
+ validatorURL = "http://validator.w3.org/check"
+ opener = urllib2.build_opener(MultipartPostHandler)
+
+ def validateFile(url):
+ temp = tempfile.mkstemp(suffix=".html")
+ os.write(temp[0], opener.open(url).read())
+ params = { "ss" : "0", # show source
+ "doctype" : "Inline",
+ "uploaded_file" : open(temp[1], "rb") }
+ print opener.open(validatorURL, params).read()
+ os.remove(temp[1])
+
+ if len(sys.argv[1:]) > 0:
+ for arg in sys.argv[1:]:
+ validateFile(arg)
+ else:
+ validateFile("http://www.google.com")
+
+if __name__=="__main__":
+ main() \ No newline at end of file
diff --git a/module/network/Request.py b/module/network/Request.py
new file mode 100755
index 000000000..75a490b9f
--- /dev/null
+++ b/module/network/Request.py
@@ -0,0 +1,400 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License,
+ or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, see <http://www.gnu.org/licenses/>.
+
+ @author: spoob
+ @author: RaNaN
+ @author: mkaay
+ @version: v0.3.2
+"""
+
+import base64
+import time
+from os import sep, rename, stat
+from os.path import exists, join
+from shutil import move
+import urllib
+from cStringIO import StringIO
+import pycurl
+
+from module.plugins.Plugin import Abort
+
+class Request:
+ def __init__(self, interface=None):
+
+ self.dl_time = 0
+ self.dl_finished = 0
+ self.dl_size = 0
+ self.dl_arrived = 0
+ self.dl = False
+
+ self.abort = False
+
+ self.lastEffectiveURL = None
+ self.lastURL = None
+ self.auth = False
+
+ self.timeout = 5
+
+ bufferBase = 1024
+ bufferMulti = 4
+ self.bufferSize = bufferBase*bufferMulti
+ self.canContinue = False
+ self.offset = 0
+
+ self.dl_speed = 0.0
+ self.averageSpeed = 0.0
+ self.averageSpeeds = []
+ self.averageSpeedTime = 0.0
+ self.averageSpeedCount = 0.0
+
+ self.speedLimitActive = False
+ self.maxSpeed = 0
+ self.isSlow = False
+ self.cookieJar = None
+ self.interface = interface
+
+ # change this for connection information
+ self.debug = False
+
+ self.init_curl()
+
+ def set_timeout(self, timeout):
+ self.timeout = int(timeout)
+
+ def init_curl(self):
+ self.rep = StringIO()
+ self.header = ""
+
+ self.pycurl = pycurl.Curl()
+ self.pycurl.setopt(pycurl.FOLLOWLOCATION, 1)
+ self.pycurl.setopt(pycurl.MAXREDIRS, 5)
+ self.pycurl.setopt(pycurl.TIMEOUT, (self.timeout*3600))
+ self.pycurl.setopt(pycurl.CONNECTTIMEOUT, 30)
+ self.pycurl.setopt(pycurl.NOSIGNAL, 1)
+ self.pycurl.setopt(pycurl.NOPROGRESS, 0)
+ self.pycurl.setopt(pycurl.PROGRESSFUNCTION, self.progress)
+ self.pycurl.setopt(pycurl.AUTOREFERER, 1)
+ self.pycurl.setopt(pycurl.HEADERFUNCTION, self.write_header)
+ self.pycurl.setopt(pycurl.BUFFERSIZE, self.bufferSize)
+ self.pycurl.setopt(pycurl.SSL_VERIFYPEER, 0)
+
+ if self.debug:
+ self.pycurl.setopt(pycurl.VERBOSE, 1)
+ if self.interface and self.interface.lower() != "none":
+ self.pycurl.setopt(pycurl.INTERFACE, self.interface)
+
+
+ self.pycurl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows; U; Windows NT 5.1; en; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.10")
+ if pycurl.version_info()[7]:
+ self.pycurl.setopt(pycurl.ENCODING, "gzip, deflate")
+ self.pycurl.setopt(pycurl.HTTPHEADER, ["Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7",
+ "Connection: keep-alive",
+ "Keep-Alive: 300"])
+
+ def setCookieJar(self, j):
+ self.cookieJar = j
+
+ def addCookies(self):
+ if self.cookieJar:
+ self.cookieJar.addCookies(self.pycurl.getinfo(pycurl.INFO_COOKIELIST))
+ return
+
+ def getCookies(self):
+ if self.cookieJar:
+ for c in self.cookieJar.getCookies():
+ self.pycurl.setopt(pycurl.COOKIELIST, c)
+ return
+
+ def getCookie(self, name):
+ if self.cookieJar:
+ return self.cookieJar.getCookie(name)
+ return None
+
+ def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, no_post_encode=False):
+
+ self.pycurl.setopt(pycurl.NOPROGRESS, 1)
+
+ url = str(url)
+
+ if post:
+ if not no_post_encode:
+ post = urllib.urlencode(post)
+ else:
+ post = None
+
+ if get:
+ get = urllib.urlencode(get)
+ url = "%s?%s" % (url, get)
+ else:
+ get = ""
+
+ self.pycurl.setopt(pycurl.URL, url)
+ self.pycurl.setopt(pycurl.WRITEFUNCTION, self.rep.write)
+
+ if cookies:
+ self.curl_enable_cookies()
+ self.getCookies()
+
+ if post:
+ self.pycurl.setopt(pycurl.POSTFIELDS, post)
+
+ if ref and self.lastURL is not None:
+ self.pycurl.setopt(pycurl.REFERER, self.lastURL)
+
+ if just_header:
+ self.pycurl.setopt(pycurl.NOBODY, 1)
+ self.pycurl.perform()
+ self.lastEffectiveURL = self.pycurl.getinfo(pycurl.EFFECTIVE_URL)
+ self.pycurl.setopt(pycurl.NOPROGRESS, 0)
+ self.pycurl.setopt(pycurl.NOBODY, 0)
+ return self.header
+
+ self.pycurl.perform()
+
+ self.lastEffectiveURL = self.pycurl.getinfo(pycurl.EFFECTIVE_URL)
+ self.addCookies()
+
+ #reset progress
+
+ self.dl_time = 0
+ self.dl_finished = 0
+ self.dl_size = 0
+ self.dl_arrived = 0
+
+ self.lastURL = url
+ header = self.get_header()
+
+ return self.get_rep()
+
+ def curl_enable_cookies(self):
+ self.pycurl.setopt(pycurl.COOKIEFILE, "")
+ self.pycurl.setopt(pycurl.COOKIEJAR, "")
+
+ def add_auth(self, user, pw):
+
+ self.auth = True
+ self.user = user
+ self.pw = pw
+
+ upwstr = str("%s:%s" % (user,pw))
+ self.pycurl.setopt(pycurl.HTTPHEADER, ['Authorization: Basic ' + base64.encodestring(upwstr)[:-1]])
+ self.pycurl.setopt(pycurl.USERPWD, upwstr)
+ self.pycurl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_ANY)
+
+ def clearCookies(self):
+ self.pycurl.setopt(pycurl.COOKIELIST, "")
+
+ def add_proxy(self, protocol, adress):
+ # @TODO: pycurl proxy protocol selection
+ self.pycurl.setopt(pycurl.PROXY, adress.split(":")[0])
+ self.pycurl.setopt(pycurl.PROXYPORT, adress.split(":")[1])
+
+ def download(self, url, file_name, folder, get={}, post={}, ref=True, cookies=True, no_post_encode=False):
+
+ url = str(url)
+
+ self.pycurl.setopt(pycurl.NOPROGRESS, 0)
+
+ if post:
+ if not no_post_encode:
+ post = urllib.urlencode(post)
+ else:
+ post = None
+
+ if get:
+ get = urllib.urlencode(get)
+ url = "%s?%s" % (url, get)
+ else:
+ get = ""
+
+ file_temp = self.get_free_name(folder,file_name) + ".part"
+
+ self.fp = open(file_temp, 'wb' if not self.canContinue else 'ab')
+
+ partSize = self.fp.tell()
+
+ self.init_curl()
+
+ self.pycurl.setopt(pycurl.URL, url)
+
+ if self.canContinue:
+ self.offset = stat(file_temp).st_size
+ self.pycurl.setopt(pycurl.RESUME_FROM, self.offset)
+
+ self.dl_arrived = self.offset
+
+ if cookies:
+ self.curl_enable_cookies()
+ self.getCookies()
+
+ if post:
+ self.pycurl.setopt(pycurl.POSTFIELDS, post)
+
+ if self.auth:
+ self.add_auth(self.user, self.pw)
+
+ if ref and self.lastURL is not None:
+ self.pycurl.setopt(pycurl.REFERER, self.lastURL)
+
+ self.dl_time = time.time()
+ self.dl = True
+
+ self.chunkSize = 0
+ self.chunkRead = 0
+ self.subStartTime = 0
+ self.maxChunkSize = 0
+
+ def restLimit():
+ subTime = time.time() - self.subStartTime
+ if subTime <= 1:
+ if self.speedLimitActive:
+ return self.maxChunkSize
+ else:
+ return -1
+ else:
+ self.updateCurrentSpeed(float(self.chunkRead/1024) / subTime)
+
+ self.subStartTime = time.time()
+ self.chunkRead = 0
+ if self.maxSpeed > 0:
+ self.maxChunkSize = self.maxSpeed
+ else:
+ self.maxChunkSize = 0
+ return 0
+
+ def writefunc(buf):
+ if self.abort:
+ return False
+ chunkSize = len(buf)
+ while chunkSize > restLimit() > -1:
+ time.sleep(0.05)
+ self.maxChunkSize -= chunkSize
+ self.fp.write(buf)
+ self.chunkRead += chunkSize
+ self.dl_arrived += chunkSize
+
+ self.pycurl.setopt(pycurl.WRITEFUNCTION, writefunc)
+
+ try:
+ self.pycurl.perform()
+ except Exception, e:
+ code, msg = e
+ if not code == 23:
+ raise Exception, e
+ finally:
+ self.dl = False
+ self.dl_finished = time.time()
+
+ self.addCookies()
+ self.fp.close()
+
+ if self.abort: raise Abort
+
+ free_name = self.get_free_name(folder, file_name)
+ move(file_temp, free_name)
+
+ #@TODO content disposition
+
+ #return free_name
+
+ def updateCurrentSpeed(self, speed):
+ self.dl_speed = speed
+ if self.averageSpeedTime + 10 < time.time():
+ self.averageSpeeds = []
+ self.averageSpeeds.append(self.averageSpeed)
+ self.averageSpeeds.append(speed)
+ self.averageSpeed = (speed + self.averageSpeed)/2
+ self.averageSpeedTime = time.time()
+ self.averageSpeedCount = 2
+ else:
+ self.averageSpeeds.append(speed)
+ self.averageSpeedCount += 1
+ allspeed = 0.0
+ for s in self.averageSpeeds:
+ allspeed += s
+ self.averageSpeed = allspeed / self.averageSpeedCount
+
+ def write_header(self, string):
+ self.header += string
+
+ def get_rep(self):
+ value = self.rep.getvalue()
+ self.rep.close()
+ self.rep = StringIO()
+ return value
+
+ def get_header(self):
+ h = self.header
+ self.header = ""
+ return h
+
+ def get_speed(self):
+ try:
+ return self.dl_speed
+ except:
+ return 0
+
+ def get_ETA(self):
+ try:
+ return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time))
+ except:
+ return 0
+
+ def bytes_left(self):
+ return (self.dl_size - self.dl_arrived)
+
+ def progress(self, dl_t, dl_d, up_t, up_d):
+ if self.abort:
+ return False
+ self.dl_arrived = int(dl_d)
+ self.dl_size = int(dl_t)
+
+ def get_free_name(self, folder, file_name):
+ file_count = 0
+ file_name = join(folder, file_name)
+ while exists(file_name):
+ file_count += 1
+ if "." in file_name:
+ file_split = file_name.split(".")
+ temp_name = "%s-%i.%s" % (".".join(file_split[:-1]), file_count, file_split[-1])
+ else:
+ temp_name = "%s-%i" % (file_name, file_count)
+ if not exists(temp_name):
+ file_name = temp_name
+ return file_name
+
+ def __del__(self):
+ self.clean()
+
+ def clean(self):
+ try:
+ self.pycurl.close()
+ except:
+ pass
+
+def getURL(url, get={}, post={}):
+ """
+ currently used for update check
+ """
+ req = Request()
+ c = req.load(url, get, post)
+ req.pycurl.close()
+ return c
+
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
diff --git a/module/network/XdccRequest.py b/module/network/XdccRequest.py
new file mode 100644
index 000000000..ce764eb12
--- /dev/null
+++ b/module/network/XdccRequest.py
@@ -0,0 +1,338 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License,
+ or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, see <http://www.gnu.org/licenses/>.
+
+ @author: spoob
+ @author: RaNaN
+ @author: mkaay
+ @author: jeix
+ @version: v0.4.0
+"""
+
+import time
+import socket
+from select import select
+import re
+from os import sep, rename, stat
+from os.path import exists
+import struct
+
+class AbortDownload(Exception):
+ pass
+
+class IRCError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+class XDCCError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+class XdccRequest:
+ def __init__(self):
+
+ self.dl_time = 0
+ self.dl_finished = 0
+ self.dl_size = 0
+ self.dl_arrived = 0
+ self.dl = False
+
+ self.abort = False
+
+ self.timeout = 20
+
+ bufferBase = 1024
+ bufferMulti = 4
+ self.bufferSize = bufferBase*bufferMulti
+ self.canContinue = False
+ self.offset = 0
+
+ self.dl_speed = 0.0
+ self.averageSpeed = 0.0
+ self.averageSpeeds = []
+ self.averageSpeedTime = 0.0
+ self.averageSpeedCount = 0.0
+
+ self.speedLimitActive = False
+ self.maxSpeed = 0
+ self.isSlow = False
+
+ # change this for connection information
+ self.debug = False
+
+ def set_timeout(self, timeout):
+ self.timeout = int(timeout)
+
+ def add_proxy(self, protocol, adress):
+ # @TODO: pycurl proxy protocoll selection
+ raise NotImplementedError
+
+ # xdcc://irc.Abjects.net/[XDCC]|Shit/#0004/
+ #nick, ident, realname, servers
+ def download(self, bot, pack, path, nick, ident, realname, channel, host, port=6667):
+ self.dl_time = time.time()
+ self.dl = True
+
+ self.chunkSize = 0
+ self.chunkRead = 0
+ self.subStartTime = 0
+ self.maxChunkSize = 0
+
+ def restLimit():
+ subTime = time.time() - self.subStartTime
+ if subTime <= 1:
+ if self.speedLimitActive:
+ return self.maxChunkSize
+ else:
+ return -1
+ else:
+ self.updateCurrentSpeed(float(self.chunkRead/1024) / subTime)
+
+ self.subStartTime = time.time()
+ self.chunkRead = 0
+ if self.maxSpeed > 0:
+ self.maxChunkSize = self.maxSpeed
+ else:
+ self.maxChunkSize = 0
+ return 0
+
+ def writefunc(in_chunkSize):
+ chunkSize = in_chunkSize
+ while chunkSize > restLimit() > -1:
+ time.sleep(0.05)
+ self.maxChunkSize -= chunkSize
+ self.chunkRead += chunkSize
+ self.dl_arrived += chunkSize
+
+
+ # connect to IRC
+ sock = socket.socket()
+ sock.connect((host, port))
+ if nick == "pyload":
+ nick = "pyload-%d" % (time.time() % 1000) # last 3 digits
+ sock.send("NICK %s\r\n" % nick)
+ sock.send("USER %s %s bla :%s\r\n" % (ident, host, realname))
+ sock.send("JOIN #%s\r\n" % channel)
+ sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
+
+ # IRC recv loop
+ readbuffer = ""
+ while True:
+ if self.abort:
+ raise AbortDownload
+
+ if self.dl_time + self.timeout < time.time():
+ raise XDCCError("timeout, bot did not answer")
+
+ #time.sleep(5) # cool down <- was a bullshit idea
+
+ fdset = select([sock], [], [], 0)
+ if sock not in fdset[0]:
+ continue
+
+ readbuffer += sock.recv(1024)
+ temp = readbuffer.split("\n")
+ readbuffer = temp.pop()
+
+ for line in temp:
+ if self.debug: print "*> " + line
+ line = line.rstrip()
+ first = line.split()
+
+ if(first[0] == "PING"):
+ sock.send("PONG %s\r\n" % first[1])
+
+ if first[0] == "ERROR":
+ raise IRCError(line)
+
+ msg = line.split(None, 3)
+ if len(msg) != 4:
+ continue
+
+ msg = { \
+ "origin":msg[0][1:], \
+ "action":msg[1], \
+ "target":msg[2], \
+ "text" :msg[3][1:] \
+ }
+
+
+ if nick == msg["target"][0:len(nick)]\
+ and "PRIVMSG" == msg["action"]:
+ if msg["text"] == "\x01VERSION\x01":
+ if self.debug: print "Sending CTCP VERSION."
+ sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
+ elif msg["text"] == "\x01TIME\x01":
+ if self.debug: print "Sending CTCP TIME."
+ sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
+ elif msg["text"] == "\x01LAG\x01":
+ pass # don't know how to answer
+
+ if not (bot == msg["origin"][0:len(bot)]
+ and nick == msg["target"][0:len(nick)]
+ and "PRIVMSG" == msg["action"]):
+ continue
+
+ m = re.match('\x01DCC SEND (.*?) (.*?) (.*?) (.*?)\x01', msg["text"])
+ if m != None:
+ break
+
+ # kill IRC socket
+ sock.send("QUIT :byebye\r\n")
+ sock.close()
+
+ # connect to XDCC Bot
+ dcc = socket.socket()
+ ip = socket.inet_ntoa(struct.pack('L', socket.ntohl(int(m.group(2)))))
+ port = int(m.group(3))
+ dcc.connect((ip, port))
+
+ dcc_packname = m.group(1)
+ if len(m.groups()) > 3:
+ self.dl_size = int(m.group(4))
+ dcc_packname = self.get_free_name(path + '\\' + dcc_packname)
+ dcc_fpointer = open(dcc_packname + ".part", "wb")
+ dcc_total = 0
+
+ # recv loop for dcc socket
+ while True:
+ if self.abort:
+ dcc.close()
+ dcc_fpointer.close()
+ raise AbortDownload
+
+ fdset = select([dcc], [], [], 0)
+ if dcc not in fdset[0]:
+ continue
+
+ # recv something
+ recvbytes = dcc.recv(2**14)
+
+ # connection closed and everything received -> reset variables
+ if len(recvbytes) == 0:
+ dcc.close()
+ dcc_fpointer.close()
+ break
+
+ # status updates, speedmanaging, etc.
+ writefunc(len(recvbytes))
+
+ # add response to file
+ dcc_fpointer.write(recvbytes)
+ dcc_total += len(recvbytes)
+
+ # acknowledge data by sending number of recceived bytes
+ dcc.send(struct.pack('!I', dcc_total))
+ ########################
+
+ free_name = self.get_free_name(dcc_packname)
+ rename(dcc_packname + ".part", free_name)
+
+ self.dl = False
+ self.dl_finished = time.time()
+
+ return free_name
+
+ def updateCurrentSpeed(self, speed):
+ self.dl_speed = speed
+ if self.averageSpeedTime + 10 < time.time():
+ self.averageSpeeds = []
+ self.averageSpeeds.append(self.averageSpeed)
+ self.averageSpeeds.append(speed)
+ self.averageSpeed = (speed + self.averageSpeed)/2
+ self.averageSpeedTime = time.time()
+ self.averageSpeedCount = 2
+ else:
+ self.averageSpeeds.append(speed)
+ self.averageSpeedCount += 1
+ allspeed = 0.0
+ for s in self.averageSpeeds:
+ allspeed += s
+ self.averageSpeed = allspeed / self.averageSpeedCount
+
+ def write_header(self, string):
+ self.header += string
+
+ def get_rep(self):
+ value = self.rep.getvalue()
+ self.rep.close()
+ self.rep = StringIO()
+ return value
+
+ def get_header(self):
+ h = self.header
+ self.header = ""
+ return h
+
+ def get_speed(self):
+ try:
+ return self.dl_speed
+ except:
+ return 0
+
+ def get_ETA(self):
+ try:
+ return (self.dl_size - self.dl_arrived) / (self.dl_arrived / (time.time() - self.dl_time))
+ except:
+ return 0
+
+ def kB_left(self):
+ return (self.dl_size - self.dl_arrived) / 1024
+
+ def progress(self, dl_t, dl_d, up_t, up_d):
+ if self.abort:
+ return False
+ self.dl_arrived = int(dl_d)
+ self.dl_size = int(dl_t)
+
+ def get_free_name(self, file_name):
+ file_count = 0
+ while exists(file_name):
+ file_count += 1
+ if "." in file_name:
+ file_split = file_name.split(".")
+ temp_name = "%s-%i.%s" % (".".join(file_split[:-1]), file_count, file_split[-1])
+ else:
+ temp_name = "%s-%i" % (file_name, file_count)
+ if not exists(temp_name):
+ file_name = temp_name
+ return file_name
+
+ def __del__(self):
+ self.clean()
+
+ def clean(self):
+ try:
+ pass
+ # self.pycurl.close()
+ except:
+ pass
+
+# def getURL(url):
+ # """
+ # currently used for update check
+ # """
+ # req = Request()
+ # c = req.load(url)
+ # req.pycurl.close()
+ # return c
+
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
diff --git a/module/network/__init__.py b/module/network/__init__.py
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/module/network/__init__.py
@@ -0,0 +1 @@
+