summaryrefslogtreecommitdiffstats
path: root/pyload/plugin/addon
diff options
context:
space:
mode:
Diffstat (limited to 'pyload/plugin/addon')
-rw-r--r--pyload/plugin/addon/AndroidPhoneNotify.py75
-rw-r--r--pyload/plugin/addon/Checksum.py195
-rw-r--r--pyload/plugin/addon/ClickAndLoad.py119
-rw-r--r--pyload/plugin/addon/DeleteFinished.py79
-rw-r--r--pyload/plugin/addon/DownloadScheduler.py77
-rw-r--r--pyload/plugin/addon/ExternalScripts.py150
-rw-r--r--pyload/plugin/addon/ExtractArchive.py504
-rw-r--r--pyload/plugin/addon/HotFolder.py71
-rw-r--r--pyload/plugin/addon/IRCInterface.py431
-rw-r--r--pyload/plugin/addon/JustPremium.py46
-rw-r--r--pyload/plugin/addon/MergeFiles.py85
-rw-r--r--pyload/plugin/addon/MultiHome.py81
-rw-r--r--pyload/plugin/addon/RestartFailed.py45
-rw-r--r--pyload/plugin/addon/RestartSlow.py57
-rw-r--r--pyload/plugin/addon/SkipRev.py93
-rw-r--r--pyload/plugin/addon/UnSkipOnFail.py90
-rw-r--r--pyload/plugin/addon/UpdateManager.py306
-rw-r--r--pyload/plugin/addon/WindowsPhoneNotify.py91
-rw-r--r--pyload/plugin/addon/XMPPInterface.py252
-rw-r--r--pyload/plugin/addon/__init__.py1
20 files changed, 2848 insertions, 0 deletions
diff --git a/pyload/plugin/addon/AndroidPhoneNotify.py b/pyload/plugin/addon/AndroidPhoneNotify.py
new file mode 100644
index 000000000..2b4f8fcca
--- /dev/null
+++ b/pyload/plugin/addon/AndroidPhoneNotify.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+
+from time import time
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Addon
+
+
+class AndroidPhoneNotify(Addon):
+ __name__ = "AndroidPhoneNotify"
+ __type__ = "addon"
+ __version__ = "0.05"
+
+ __config__ = [("apikey" , "str" , "API key" , "" ),
+ ("notifycaptcha" , "bool", "Notify captcha request" , True ),
+ ("notifypackage" , "bool", "Notify package finished" , True ),
+ ("notifyprocessed", "bool", "Notify processed packages status" , True ),
+ ("timeout" , "int" , "Timeout between captchas in seconds" , 5 ),
+ ("force" , "bool", "Send notifications if client is connected", False)]
+
+ __description__ = """Send push notifications to your Android Phone using notifymyandroid.com"""
+ __license__ = "GPLv3"
+ __authors__ = [("Steven Kosyra", "steven.kosyra@gmail.com"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ event_list = ["allDownloadsProcessed"]
+
+
+ def setup(self):
+ self.info = {} #@TODO: Remove in 0.4.10
+ self.last_notify = 0
+
+
+ def newCaptchaTask(self, task):
+ if not self.getConfig("notifycaptcha"):
+ return False
+
+ if time() - self.last_notify < self.getConf("timeout"):
+ return False
+
+ self.notify(_("Captcha"), _("New request waiting user input"))
+
+
+ def packageFinished(self, pypack):
+ if self.getConfig("notifypackage"):
+ self.notify(_("Package finished"), pypack.name)
+
+
+ def allDownloadsProcessed(self):
+ if not self.getConfig("notifyprocessed"):
+ return False
+
+ if any(True for pdata in self.core.api.getQueue() if pdata.linksdone < pdata.linkstotal):
+ self.notify(_("Package failed"), _("One or more packages was not completed successfully"))
+ else:
+ self.notify(_("All packages finished"))
+
+
+ def notify(self, event, msg=""):
+ apikey = self.getConfig("apikey")
+
+ if not apikey:
+ return False
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return False
+
+ getURL("http://www.notifymyandroid.com/publicapi/notify",
+ get={'apikey' : apikey,
+ 'application': "pyLoad",
+ 'event' : event,
+ 'description': msg})
+
+ self.last_notify = time()
diff --git a/pyload/plugin/addon/Checksum.py b/pyload/plugin/addon/Checksum.py
new file mode 100644
index 000000000..35be60773
--- /dev/null
+++ b/pyload/plugin/addon/Checksum.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import hashlib
+import re
+import zlib
+
+from os import remove
+from os.path import getsize, isfile, splitext
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import safe_join, fs_encode
+
+
+def computeChecksum(local_file, algorithm):
+ if algorithm in getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")):
+ h = getattr(hashlib, algorithm)()
+
+ with open(local_file, 'rb') as f:
+ for chunk in iter(lambda: f.read(128 * h.block_size), ''):
+ h.update(chunk)
+
+ return h.hexdigest()
+
+ elif algorithm in ("adler32", "crc32"):
+ hf = getattr(zlib, algorithm)
+ last = 0
+
+ with open(local_file, 'rb') as f:
+ for chunk in iter(lambda: f.read(8192), ''):
+ last = hf(chunk, last)
+
+ return "%x" % last
+
+ else:
+ return None
+
+
+class Checksum(Addon):
+ __name__ = "Checksum"
+ __type__ = "addon"
+ __version__ = "0.16"
+
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("check_checksum", "bool" , "Check checksum? (If False only size will be verified)", True ),
+ ("check_action" , "fail;retry;nothing", "What to do if check fails?" , "retry"),
+ ("max_tries" , "int" , "Number of retries" , 2 ),
+ ("retry_action" , "fail;nothing" , "What to do if all retries fail?" , "fail" ),
+ ("wait_time" , "int" , "Time to wait before each retry (seconds)" , 1 )]
+
+ __description__ = """Verify downloaded file size and checksum"""
+ __license__ = "GPLv3"
+ __authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ methods = {'sfv' : 'crc32',
+ 'crc' : 'crc32',
+ 'hash': 'md5'}
+ regexps = {'sfv' : r'^(?P<NAME>[^;].+)\s+(?P<HASH>[0-9A-Fa-f]{8})$',
+ 'md5' : r'^(?P<NAME>[0-9A-Fa-f]{32}) (?P<FILE>.+)$',
+ 'crc' : r'filename=(?P<NAME>.+)\nsize=(?P<SIZE>\d+)\ncrc32=(?P<HASH>[0-9A-Fa-f]{8})$',
+ 'default': r'^(?P<HASH>[0-9A-Fa-f]+)\s+\*?(?P<NAME>.+)$'}
+
+
+ def activate(self):
+ if not self.getConfig("check_checksum"):
+ self.logInfo(_("Checksum validation is disabled in plugin configuration"))
+
+
+ def setup(self):
+ self.algorithms = sorted(
+ getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")), reverse=True)
+ self.algorithms.extend(["crc32", "adler32"])
+ self.formats = self.algorithms + ["sfv", "crc", "hash"]
+
+
+ def downloadFinished(self, pyfile):
+ """
+ Compute checksum for the downloaded file and compare it with the hash provided by the hoster.
+ pyfile.plugin.check_data should be a dictionary which can contain:
+ a) if known, the exact filesize in bytes (e.g. "size": 123456789)
+ b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307")
+ """
+ if hasattr(pyfile.plugin, "check_data") and isinstance(pyfile.plugin.check_data, dict):
+ data = pyfile.plugin.check_data.copy()
+
+ elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict):
+ data = pyfile.plugin.api_data.copy()
+
+ elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict):
+ data = pyfile.plugin.info.copy()
+ data.pop('size', None) #@NOTE: Don't check file size until a similary matcher will be implemented
+
+ else:
+ return
+
+ self.logDebug(data)
+
+ if not pyfile.plugin.lastDownload:
+ self.checkFailed(pyfile, None, "No file downloaded")
+
+ local_file = fs_encode(pyfile.plugin.lastDownload)
+ #download_folder = self.config['general']['download_folder']
+ #local_file = fs_encode(safe_join(download_folder, pyfile.package().folder, pyfile.name))
+
+ if not isfile(local_file):
+ self.checkFailed(pyfile, None, "File does not exist")
+
+ # validate file size
+ if "size" in data:
+ api_size = int(data['size'])
+ file_size = getsize(local_file)
+
+ if api_size != file_size:
+ self.logWarning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size))
+ self.checkFailed(pyfile, local_file, "Incorrect file size")
+
+ data.pop('size', None)
+
+ # validate checksum
+ if data and self.getConfig("check_checksum"):
+
+ if not 'md5' in data:
+ for type in ("checksum", "hashsum", "hash"):
+ if type in data:
+ data['md5'] = data[type] #@NOTE: What happens if it's not an md5 hash?
+ break
+
+ for key in self.algorithms:
+ if key in data:
+ checksum = computeChecksum(local_file, key.replace("-", "").lower())
+ if checksum:
+ if checksum == data[key].lower():
+ self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
+ (pyfile.name, key.upper(), checksum))
+ break
+ else:
+ self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
+ (key.upper(), pyfile.name, checksum, data[key]))
+ self.checkFailed(pyfile, local_file, "Checksums do not match")
+ else:
+ self.logWarning(_("Unsupported hashing algorithm"), key.upper())
+ else:
+ self.logWarning(_("Unable to validate checksum for file: ") + pyfile.name)
+
+
+ def checkFailed(self, pyfile, local_file, msg):
+ check_action = self.getConfig("check_action")
+ if check_action == "retry":
+ max_tries = self.getConfig("max_tries")
+ retry_action = self.getConfig("retry_action")
+ if pyfile.plugin.retries < max_tries:
+ if local_file:
+ remove(local_file)
+ pyfile.plugin.retry(max_tries, self.getConfig("wait_time"), msg)
+ elif retry_action == "nothing":
+ return
+ elif check_action == "nothing":
+ return
+ pyfile.plugin.fail(reason=msg)
+
+
+ def packageFinished(self, pypack):
+ download_folder = safe_join(self.config['general']['download_folder'], pypack.folder, "")
+
+ for link in pypack.getChildren().itervalues():
+ file_type = splitext(link['name'])[1][1:].lower()
+
+ if file_type not in self.formats:
+ continue
+
+ hash_file = fs_encode(safe_join(download_folder, link['name']))
+ if not isfile(hash_file):
+ self.logWarning(_("File not found"), link['name'])
+ continue
+
+ with open(hash_file) as f:
+ text = f.read()
+
+ for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
+ data = m.groupdict()
+ self.logDebug(link['name'], data)
+
+ local_file = fs_encode(safe_join(download_folder, data['NAME']))
+ algorithm = self.methods.get(file_type, file_type)
+ checksum = computeChecksum(local_file, algorithm)
+ if checksum == data['HASH']:
+ self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
+ (data['NAME'], algorithm, checksum))
+ else:
+ self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
+ (algorithm, data['NAME'], checksum, data['HASH']))
diff --git a/pyload/plugin/addon/ClickAndLoad.py b/pyload/plugin/addon/ClickAndLoad.py
new file mode 100644
index 000000000..cd71e9972
--- /dev/null
+++ b/pyload/plugin/addon/ClickAndLoad.py
@@ -0,0 +1,119 @@
+# -*- coding: utf-8 -*-
+
+import socket
+
+from threading import Lock
+
+from pyload.plugin.Addon import Addon, threaded
+
+
+def forward(source, destination):
+ try:
+ bufsize = 1024
+ bufdata = source.recv(bufsize)
+ while bufdata:
+ destination.sendall(bufdata)
+ bufdata = source.recv(bufsize)
+ finally:
+ destination.shutdown(socket.SHUT_WR)
+
+
+#: create_connection wrapper for python 2.5 socket module
+def create_connection(address, timeout=object(), source_address=None):
+ if hasattr(socket, 'create_connection'):
+ if type(timeout) == object:
+ timeout = socket._GLOBAL_DEFAULT_TIMEOUT
+
+ return socket.create_connection(address, timeout, source_address)
+
+ else:
+ host, port = address
+ err = None
+ for res in getaddrinfo(host, port, 0, SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ sock = None
+ try:
+ sock = socket(af, socktype, proto)
+ if type(timeout) != object:
+ sock.settimeout(timeout)
+ if source_address:
+ sock.bind(source_address)
+ sock.connect(sa)
+ return sock
+
+ except socket.error, _:
+ err = _
+ if sock is not None:
+ sock.close()
+
+ if err is not None:
+ raise err
+ else:
+ raise socket.error("getaddrinfo returns an empty list")
+
+
+class ClickAndLoad(Addon):
+ __name__ = "ClickAndLoad"
+ __type__ = "addon"
+ __version__ = "0.35"
+
+ __config__ = [("activated", "bool", "Activated" , True),
+ ("port" , "int" , "Port" , 9666),
+ ("extern" , "bool", "Listen on the public network interface", True)]
+
+ __description__ = """Click'N'Load addon plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("RaNaN", "RaNaN@pyload.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def activate(self):
+ if not self.config['webinterface']['activated']:
+ return
+
+ ip = socket.gethostbyname(socket.gethostname()) if self.getConfig("extern") else "127.0.0.1"
+ webport = int(self.config['webinterface']['port'])
+ cnlport = self.getConfig('port')
+
+ self.proxy(ip, webport, cnlport)
+
+
+ @threaded
+ def proxy(self, ip, webport, cnlport):
+ self.manager.startThread(self._server, ip, webport, cnlport)
+ lock = Lock()
+ lock.acquire()
+ lock.acquire()
+
+
+ def _server(self, ip, webport, cnlport, thread):
+ try:
+ server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ server_socket.bind((ip, cnlport))
+ server_socket.listen(5)
+
+ while True:
+ client_socket = server_socket.accept()[0]
+ dock_socket = create_connection(("127.0.0.1", webport))
+
+ self.manager.startThread(forward, dock_socket, client_socket)
+ self.manager.startThread(forward, client_socket, dock_socket)
+
+ except socket.error, e:
+ self.logDebug(e)
+ self._server(ip, webport, cnlport, thread)
+
+ except Exception, e:
+ self.logError(e)
+
+ try:
+ client_socket.close()
+ dock_socket.close()
+ except Exception:
+ pass
+
+ try:
+ server_socket.close()
+ except Exception:
+ pass
diff --git a/pyload/plugin/addon/DeleteFinished.py b/pyload/plugin/addon/DeleteFinished.py
new file mode 100644
index 000000000..59f2e3321
--- /dev/null
+++ b/pyload/plugin/addon/DeleteFinished.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+from pyload.database import style
+from pyload.plugin.Addon import Addon
+
+
+class DeleteFinished(Addon):
+ __name__ = "DeleteFinished"
+ __type__ = "addon"
+ __version__ = "1.11"
+
+ __config__ = [('interval' , 'int' , 'Delete every (hours)' , '72' ),
+ ('deloffline', 'bool', 'Delete packages with offline links', 'False')]
+
+ __description__ = """Automatically delete all finished packages from queue"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+
+ ## overwritten methods ##
+ def periodical(self):
+ if not self.info['sleep']:
+ deloffline = self.getConfig('deloffline')
+ mode = '0,1,4' if deloffline else '0,4'
+ msg = _('delete all finished packages in queue list (%s packages with offline links)')
+ self.logInfo(msg % (_('including') if deloffline else _('excluding')))
+ self.deleteFinished(mode)
+ self.info['sleep'] = True
+ self.addEvent('packageFinished', self.wakeup)
+
+
+ def pluginConfigChanged(self, plugin, name, value):
+ if name == "interval" and value != self.interval:
+ self.interval = value * 3600
+ self.initPeriodical()
+
+
+ def deactivate(self):
+ self.removeEvent('packageFinished', self.wakeup)
+
+
+ def activate(self):
+ self.info = {'sleep': True}
+ interval = self.getConfig('interval')
+ self.pluginConfigChanged(self.__name__, 'interval', interval)
+ self.addEvent('packageFinished', self.wakeup)
+
+
+ ## own methods ##
+ @style.queue
+ def deleteFinished(self, mode):
+ self.c.execute('DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE package=packages.id AND status NOT IN (%s))' % mode)
+ self.c.execute('DELETE FROM links WHERE NOT EXISTS(SELECT 1 FROM packages WHERE id=links.package)')
+
+
+ def wakeup(self, pypack):
+ self.removeEvent('packageFinished', self.wakeup)
+ self.info['sleep'] = False
+
+
+ ## event managing ##
+ def addEvent(self, event, func):
+ """Adds an event listener for event name"""
+ if event in self.m.events:
+ if func in self.m.events[event]:
+ self.logDebug("Function already registered", func)
+ else:
+ self.m.events[event].append(func)
+ else:
+ self.m.events[event] = [func]
+
+
+ def setup(self):
+ self.interval = 0
+ self.m = self.manager
+ self.removeEvent = self.m.removeEvent
diff --git a/pyload/plugin/addon/DownloadScheduler.py b/pyload/plugin/addon/DownloadScheduler.py
new file mode 100644
index 000000000..e5e25e389
--- /dev/null
+++ b/pyload/plugin/addon/DownloadScheduler.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from time import localtime
+
+from pyload.plugin.Addon import Addon
+
+
+class DownloadScheduler(Addon):
+ __name__ = "DownloadScheduler"
+ __type__ = "addon"
+ __version__ = "0.22"
+
+ __config__ = [("timetable", "str" , "List time periods as hh:mm full or number(kB/s)" , "0:00 full, 7:00 250, 10:00 0, 17:00 150"),
+ ("abort" , "bool", "Abort active downloads when start period with speed 0", False )]
+
+ __description__ = """Download Scheduler"""
+ __license__ = "GPLv3"
+ __authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ def setup(self):
+ self.cb = None #: callback to scheduler job; will be by removed AddonManager when addon unloaded
+
+
+ def activate(self):
+ self.updateSchedule()
+
+
+ def updateSchedule(self, schedule=None):
+ if schedule is None:
+ schedule = self.getConfig("timetable")
+
+ schedule = re.findall("(\d{1,2}):(\d{2})[\s]*(-?\d+)",
+ schedule.lower().replace("full", "-1").replace("none", "0"))
+ if not schedule:
+ self.logError(_("Invalid schedule"))
+ return
+
+ t0 = localtime()
+ now = (t0.tm_hour, t0.tm_min, t0.tm_sec, "X")
+ schedule = sorted([(int(x[0]), int(x[1]), 0, int(x[2])) for x in schedule] + [now])
+
+ self.logDebug("Schedule", schedule)
+
+ for i, v in enumerate(schedule):
+ if v[3] == "X":
+ last, next = schedule[i - 1], schedule[(i + 1) % len(schedule)]
+ self.logDebug("Now/Last/Next", now, last, next)
+
+ self.setDownloadSpeed(last[3])
+
+ next_time = (((24 + next[0] - now[0]) * 60 + next[1] - now[1]) * 60 + next[2] - now[2]) % 86400
+ self.core.scheduler.removeJob(self.cb)
+ self.cb = self.core.scheduler.addJob(next_time, self.updateSchedule, threaded=False)
+
+
+ def setDownloadSpeed(self, speed):
+ if speed == 0:
+ abort = self.getConfig("abort")
+ self.logInfo(_("Stopping download server. (Running downloads will %sbe aborted.)") % '' if abort else _('not '))
+ self.core.api.pauseServer()
+ if abort:
+ self.core.api.stopAllDownloads()
+ else:
+ self.core.api.unpauseServer()
+
+ if speed > 0:
+ self.logInfo(_("Setting download speed to %d kB/s") % speed)
+ self.core.api.setConfigValue("download", "limit_speed", 1)
+ self.core.api.setConfigValue("download", "max_speed", speed)
+ else:
+ self.logInfo(_("Setting download speed to FULL"))
+ self.core.api.setConfigValue("download", "limit_speed", 0)
+ self.core.api.setConfigValue("download", "max_speed", -1)
diff --git a/pyload/plugin/addon/ExternalScripts.py b/pyload/plugin/addon/ExternalScripts.py
new file mode 100644
index 000000000..5aebf2338
--- /dev/null
+++ b/pyload/plugin/addon/ExternalScripts.py
@@ -0,0 +1,150 @@
+# -*- coding: utf-8 -*-
+
+import os
+import subprocess
+
+from itertools import chain
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import safe_join
+
+
+class ExternalScripts(Addon):
+ __name__ = "ExternalScripts"
+ __type__ = "addon"
+ __version__ = "0.29"
+
+ __config__ = [("activated", "bool", "Activated" , True ),
+ ("wait" , "bool", "Wait script ending", False)]
+
+ __description__ = """Run external scripts"""
+ __license__ = "GPLv3"
+ __authors__ = [("mkaay", "mkaay@mkaay.de"),
+ ("RaNaN", "ranan@pyload.org"),
+ ("spoob", "spoob@pyload.org"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ event_map = {'archive-extracted' : "archive_extracted",
+ 'package-extracted' : "package_extracted",
+ 'all_archives-extracted' : "all_archives_extracted",
+ 'all_archives-processed' : "all_archives_processed",
+ 'all_downloads-finished' : "allDownloadsFinished",
+ 'all_downloads-processed': "allDownloadsProcessed"}
+
+
+ def setup(self):
+ self.scripts = {}
+
+ folders = ["download_preparing", "download_finished", "all_downloads_finished", "all_downloads_processed",
+ "before_reconnect", "after_reconnect",
+ "package_finished", "package_extracted",
+ "archive_extracted", "all_archives_extracted", "all_archives_processed",
+ # deprecated folders
+ "unrar_finished", "all_dls_finished", "all_dls_processed"]
+
+ for folder in folders:
+ self.scripts[folder] = []
+
+ self.initPluginType(folder, os.path.join(pypath, 'scripts', folder))
+ self.initPluginType(folder, os.path.join('scripts', folder))
+
+ for script_type, names in self.scripts.iteritems():
+ if names:
+ self.logInfo(_("Installed scripts for"), script_type, ", ".join(map(os.path.basename, names)))
+
+
+ def initPluginType(self, folder, path):
+ if not os.path.exists(path):
+ try:
+ os.makedirs(path)
+
+ except Exception:
+ self.logDebug("Script folder %s not created" % folder)
+ return
+
+ for f in os.listdir(path):
+ if f.startswith("#") or f.startswith(".") or f.startswith("_") or f.endswith("~") or f.endswith(".swp"):
+ continue
+
+ if not os.access(os.path.join(path, f), os.X_OK):
+ self.logWarning(_("Script not executable:") + " %s/%s" % (folder, f))
+
+ self.scripts[folder].append(os.path.join(path, f))
+
+
+ def callScript(self, script, *args):
+ try:
+ cmd = [script] + [str(x) if not isinstance(x, basestring) else x for x in args]
+
+ self.logDebug("Executing", os.path.abspath(script), " ".join(cmd))
+
+ p = subprocess.Popen(cmd, bufsize=-1) #@NOTE: output goes to pyload
+ if self.getConfig('wait'):
+ p.communicate()
+
+ except Exception, e:
+ self.logError(_("Error in %(script)s: %(error)s") % {"script": os.path.basename(script), "error": e})
+
+
+ def downloadPreparing(self, pyfile):
+ for script in self.scripts['download_preparing']:
+ self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.id)
+
+
+ def downloadFinished(self, pyfile):
+ download_folder = self.config['general']['download_folder']
+ for script in self.scripts['download_finished']:
+ filename = safe_join(download_folder, pyfile.package().folder, pyfile.name)
+ self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.name, filename, pyfile.id)
+
+
+ def packageFinished(self, pypack):
+ download_folder = self.config['general']['download_folder']
+ for script in self.scripts['package_finished']:
+ folder = safe_join(download_folder, pypack.folder)
+ self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
+
+
+ def beforeReconnecting(self, ip):
+ for script in self.scripts['before_reconnect']:
+ self.callScript(script, ip)
+
+
+ def afterReconnecting(self, ip):
+ for script in self.scripts['after_reconnect']:
+ self.callScript(script, ip)
+
+
+ def archive_extracted(self, pyfile, folder, filename, files):
+ for script in self.scripts['archive_extracted']:
+ self.callScript(script, folder, filename, files)
+ for script in self.scripts['unrar_finished']: #: deprecated
+ self.callScript(script, folder, filename)
+
+
+ def package_extracted(self, pypack):
+ download_folder = self.config['general']['download_folder']
+ for script in self.scripts['package_extracted']:
+ folder = safe_join(download_folder, pypack.folder)
+ self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
+
+
+ def all_archives_extracted(self):
+ for script in self.scripts['all_archives_extracted']:
+ self.callScript(script)
+
+
+ def all_archives_processed(self):
+ for script in self.scripts['all_archives_processed']:
+ self.callScript(script)
+
+
+ def allDownloadsFinished(self):
+ for script in chain(self.scripts['all_downloads_finished'], self.scripts['all_dls_finished']):
+ self.callScript(script)
+
+
+ def allDownloadsProcessed(self):
+ for script in chain(self.scripts['all_downloads_processed'], self.scripts['all_dls_processed']):
+ self.callScript(script)
diff --git a/pyload/plugin/addon/ExtractArchive.py b/pyload/plugin/addon/ExtractArchive.py
new file mode 100644
index 000000000..951434e3e
--- /dev/null
+++ b/pyload/plugin/addon/ExtractArchive.py
@@ -0,0 +1,504 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import os
+import sys
+
+from copy import copy
+from traceback import print_exc
+
+# monkey patch bug in python 2.6 and lower
+# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
+if sys.version_info < (2, 7) and os.name != "nt":
+ import errno
+
+ from subprocess import Popen
+
+ def _eintr_retry_call(func, *args):
+ while True:
+ try:
+ return func(*args)
+
+ except OSError, e:
+ if e.errno == errno.EINTR:
+ continue
+ raise
+
+
+ # unsued timeout option for older python version
+ def wait(self, timeout=0):
+ """Wait for child process to terminate. Returns returncode
+ attribute."""
+ if self.returncode is None:
+ try:
+ pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
+ except OSError, e:
+ if e.errno != errno.ECHILD:
+ raise
+ # This happens if SIGCLD is set to be ignored or waiting
+ # for child processes has otherwise been disabled for our
+ # process. This child is dead, we can't get the status.
+ sts = 0
+ self._handle_exitstatus(sts)
+ return self.returncode
+
+ Popen.wait = wait
+
+if os.name != "nt":
+ from grp import getgrnam
+ from pwd import getpwnam
+
+from pyload.plugin.Addon import Addon, threaded, Expose
+from pyload.plugin.internal.Extractor import ArchiveError, CRCError, PasswordError
+from pyload.plugin.internal.SimpleHoster import replace_patterns
+from pyload.utils import fs_encode, safe_join, uniqify
+
+
+class ArchiveQueue(object):
+
+ def __init__(self, plugin, storage):
+ self.plugin = plugin
+ self.storage = storage
+
+
+ def get(self):
+ try:
+ return [int(pid) for pid in self.plugin.getStorage("ExtractArchive:%s" % self.storage, "").decode('base64').split()]
+ except Exception:
+ return []
+
+
+ def set(self, value):
+ if isinstance(value, list):
+ item = str(value)[1:-1].replace(' ', '').replace(',', ' ')
+ else:
+ item = str(value).strip()
+ return self.plugin.setStorage("ExtractArchive:%s" % self.storage, item.encode('base64')[:-1])
+
+
+ def delete(self):
+ return self.plugin.delStorage("ExtractArchive:%s" % self.storage)
+
+
+ def add(self, item):
+ queue = self.get()
+ if item not in queue:
+ return self.set(queue + [item])
+ else:
+ return True
+
+
+ def remove(self, item):
+ queue = self.get()
+ try:
+ queue.remove(item)
+ except ValueError:
+ pass
+ if queue == []:
+ return self.delete()
+ return self.set(queue)
+
+
+
+class ExtractArchive(Addon):
+ __name__ = "ExtractArchive"
+ __type__ = "addon"
+ __version__ = "1.29"
+
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("fullpath" , "bool" , "Extract with full paths" , True ),
+ ("overwrite" , "bool" , "Overwrite files" , False ),
+ ("keepbroken" , "bool" , "Try to extract broken archives" , False ),
+ ("repair" , "bool" , "Repair broken archives" , True ),
+ ("usepasswordfile" , "bool" , "Use password file" , True ),
+ ("passwordfile" , "file" , "Password file" , "archive_password.txt" ),
+ ("delete" , "bool" , "Delete archive when successfully extracted", False ),
+ ("subfolder" , "bool" , "Create subfolder for each package" , False ),
+ ("destination" , "folder", "Extract files to folder" , "" ),
+ ("extensions" , "str" , "Extract the following extensions" , "7z,bz2,bzip2,gz,gzip,lha,lzh,lzma,rar,tar,taz,tbz,tbz2,tgz,xar,xz,z,zip"),
+ ("excludefiles" , "str" , "Don't extract the following files" , "*.nfo,*.DS_Store,index.dat,thumb.db" ),
+ ("recursive" , "bool" , "Extract archives in archives" , True ),
+ ("waitall" , "bool" , "Wait for all downloads to be finished" , False ),
+ ("renice" , "int" , "CPU priority" , 0 )]
+
+ __description__ = """Extract different kind of archives"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com"),
+ ("Immenz" , "immenz@gmx.net" )]
+
+
+ event_list = ["allDownloadsProcessed"]
+
+ NAME_REPLACEMENTS = [(r'\.part\d+\.rar$', ".part.rar")]
+
+
+ def setup(self):
+ self.queue = ArchiveQueue(self, "Queue")
+ self.failed = ArchiveQueue(self, "Failed")
+
+ self.interval = 60
+ self.extracting = False
+ self.extractors = []
+ self.passwords = []
+
+
+ def activate(self):
+ # self.extracting = False
+
+ for p in ("UnRar", "SevenZip", "UnZip"):
+ try:
+ module = self.core.pluginManager.loadModule("internal", p)
+ klass = getattr(module, p)
+ if klass.isUsable():
+ self.extractors.append(klass)
+
+ except OSError, e:
+ if e.errno == 2:
+ self.logInfo(_("No %s installed") % p)
+ else:
+ self.logWarning(_("Could not activate: %s") % p, e)
+ if self.core.debug:
+ print_exc()
+
+ except Exception, e:
+ self.logWarning(_("Could not activate: %s") % p, e)
+ if self.core.debug:
+ print_exc()
+
+ if self.extractors:
+ self.logInfo(_("Activated") + " " + "|".join("%s %s" % (Extractor.__name__,Extractor.VERSION) for Extractor in self.extractors))
+
+ if self.getConfig("waitall"):
+ self.extractPackage(*self.queue.get()) #: Resume unfinished extractions
+ else:
+ super(ExtractArchive, self).initPeriodical()
+
+ else:
+ self.logInfo(_("No Extract plugins activated"))
+
+
+ def periodical(self):
+ if not self.extracting:
+ self.extractPackage(*self.queue.get())
+
+
+ @Expose
+ def extractPackage(self, *ids):
+ """ Extract packages with given id"""
+ self.manager.startThread(self.extract, ids)
+
+
+ def packageFinished(self, pypack):
+ self.queue.add(pypack.id)
+
+
+ @threaded
+ def allDownloadsProcessed(self, thread):
+ if self.extract(self.queue.get(), thread): #@NOTE: check only if all gone fine, no failed reporting for now
+ self.manager.dispatchEvent("all_archives_extracted")
+
+ self.manager.dispatchEvent("all_archives_processed")
+
+
+ def extract(self, ids, thread=None):
+ if not ids:
+ return False
+
+ self.extracting = True
+
+ processed = []
+ extracted = []
+ failed = []
+
+ toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|')
+
+ destination = self.getConfig("destination")
+ subfolder = self.getConfig("subfolder")
+ fullpath = self.getConfig("fullpath")
+ overwrite = self.getConfig("overwrite")
+ renice = self.getConfig("renice")
+ recursive = self.getConfig("recursive")
+ delete = self.getConfig("delete")
+ keepbroken = self.getConfig("keepbroken")
+
+ extensions = [x.lstrip('.').lower() for x in toList(self.getConfig("extensions"))]
+ excludefiles = toList(self.getConfig("excludefiles"))
+
+ if extensions:
+ self.logDebug("Use for extensions: %s" % "|.".join(extensions))
+
+ # reload from txt file
+ self.reloadPasswords()
+
+ # dl folder
+ dl = self.config['general']['download_folder']
+
+ #iterate packages -> extractors -> targets
+ for pid in ids:
+ pypack = self.core.files.getPackage(pid)
+
+ if not pypack:
+ continue
+
+ self.logInfo(_("Check package: %s") % pypack.name)
+
+ # determine output folder
+ out = safe_join(dl, pypack.folder, destination, "") #: force trailing slash
+
+ if subfolder:
+ out = safe_join(out, pypack.folder)
+
+ if not os.path.exists(out):
+ os.makedirs(out)
+
+ matched = False
+ success = True
+ files_ids = [(safe_join(dl, pypack.folder, pylink['name']), pylink['id'], out) for pylink in pypack.getChildren().itervalues()]
+
+ # check as long there are unseen files
+ while files_ids:
+ new_files_ids = []
+
+ if extensions:
+ files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
+ if filter(lambda ext: fname.lower().endswith(ext), extensions)]
+
+ for Extractor in self.extractors:
+ targets = Extractor.getTargets(files_ids)
+ if targets:
+ self.logDebug("Targets for %s: %s" % (Extractor.__name__, targets))
+ matched = True
+
+ for fname, fid, fout in targets:
+ name = os.path.basename(fname)
+
+ if not os.path.exists(fname):
+ self.logDebug(name, "File not found")
+ continue
+
+ self.logInfo(name, _("Extract to: %s") % fout)
+ try:
+ archive = Extractor(self,
+ fname,
+ fout,
+ fullpath,
+ overwrite,
+ excludefiles,
+ renice,
+ delete,
+ keepbroken,
+ fid)
+ archive.init()
+
+ new_files = self._extract(archive, fid, pypack.password, thread)
+
+ except Exception, e:
+ self.logError(name, e)
+ success = False
+ continue
+
+ files_ids.remove((fname, fid, fout)) # don't let other extractors spam log
+ self.logDebug("Extracted files: %s" % new_files)
+ self.setPermissions(new_files)
+
+ for filename in new_files:
+ file = fs_encode(safe_join(os.path.dirname(archive.filename), filename))
+ if not os.path.exists(file):
+ self.logDebug("New file %s does not exists" % filename)
+ continue
+
+ if recursive and os.path.isfile(file):
+ new_files_ids.append((filename, fid, os.path.dirname(filename))) # append as new target
+
+ files_ids = new_files_ids # also check extracted files
+
+ if matched:
+ if success:
+ extracted.append(pid)
+ self.manager.dispatchEvent("package_extracted", pypack)
+ else:
+ failed.append(pid)
+ self.manager.dispatchEvent("package_extract_failed", pypack)
+
+ self.failed.add(pid)
+ else:
+ self.logInfo(_("No files found to extract"))
+
+ if not matched or not success and subfolder:
+ try:
+ os.rmdir(out)
+
+ except OSError:
+ pass
+
+ self.queue.remove(pid)
+
+ self.extracting = False
+ return True if not failed else False
+
+
+ def _extract(self, archive, fid, password, thread):
+ pyfile = self.core.files.getFile(fid)
+ name = os.path.basename(archive.filename)
+
+ thread.addActive(pyfile)
+ pyfile.setStatus("processing")
+
+ encrypted = False
+ try:
+ try:
+ archive.check()
+
+ except CRCError, e:
+ self.logDebug(name, e)
+ self.logInfo(name, _("Header protected"))
+
+ if self.getConfig("repair"):
+ self.logWarning(name, _("Repairing..."))
+
+ pyfile.setCustomStatus(_("repairing"))
+ pyfile.setProgress(0)
+
+ repaired = archive.repair()
+
+ pyfile.setProgress(100)
+
+ if not repaired and not self.getConfig("keepbroken"):
+ raise CRCError("Archive damaged")
+
+ except PasswordError:
+ self.logInfo(name, _("Password protected"))
+ encrypted = True
+
+ except ArchiveError, e:
+ raise ArchiveError(e)
+
+ self.logDebug("Password: %s" % (password or "No provided"))
+
+ pyfile.setCustomStatus(_("extracting"))
+ pyfile.setProgress(0)
+
+ if not encrypted or not self.getConfig("usepasswordfile"):
+ archive.extract(password)
+ else:
+ for pw in filter(None, uniqify([password] + self.getPasswords(False))):
+ try:
+ self.logDebug("Try password: %s" % pw)
+
+ ispw = archive.isPassword(pw)
+ if ispw or ispw is None:
+ archive.extract(pw)
+ self.addPassword(pw)
+ break
+
+ except PasswordError:
+ self.logDebug("Password was wrong")
+ else:
+ raise PasswordError
+
+ pyfile.setProgress(100)
+ pyfile.setCustomStatus(_("finalizing"))
+
+ if self.core.debug:
+ self.logDebug("Would delete: %s" % ", ".join(archive.getDeleteFiles()))
+
+ if self.getConfig("delete"):
+ files = archive.getDeleteFiles()
+ self.logInfo(_("Deleting %s files") % len(files))
+ for f in files:
+ file = fs_encode(f)
+ if os.path.exists(file):
+ os.remove(file)
+ else:
+ self.logDebug("%s does not exists" % f)
+
+ self.logInfo(name, _("Extracting finished"))
+
+ extracted_files = archive.files or archive.list()
+ self.manager.dispatchEvent("archive_extracted", pyfile, archive.out, archive.filename, extracted_files)
+
+ return extracted_files
+
+ except PasswordError:
+ self.logError(name, _("Wrong password" if password else "No password found"))
+
+ except CRCError, e:
+ self.logError(name, _("CRC mismatch"), e)
+
+ except ArchiveError, e:
+ self.logError(name, _("Archive error"), e)
+
+ except Exception, e:
+ self.logError(name, _("Unknown error"), e)
+ if self.core.debug:
+ print_exc()
+
+ finally:
+ pyfile.finishIfDone()
+
+ self.manager.dispatchEvent("archive_extract_failed", pyfile)
+
+ raise Exception(_("Extract failed"))
+
+
+ @Expose
+ def getPasswords(self, reload=True):
+ """ List of saved passwords """
+ if reload:
+ self.reloadPasswords()
+
+ return self.passwords
+
+
+ def reloadPasswords(self):
+ try:
+ passwords = []
+
+ file = fs_encode(self.getConfig("passwordfile"))
+ with open(file) as f:
+ for pw in f.read().splitlines():
+ passwords.append(pw)
+
+ except IOError, e:
+ self.logError(e)
+
+ else:
+ self.passwords = passwords
+
+
+ @Expose
+ def addPassword(self, password):
+ """ Adds a password to saved list"""
+ try:
+ self.passwords = uniqify([password] + self.passwords)
+
+ file = fs_encode(self.getConfig("passwordfile"))
+ with open(file, "wb") as f:
+ for pw in self.passwords:
+ f.write(pw + '\n')
+
+ except IOError, e:
+ self.logError(e)
+
+
+ def setPermissions(self, files):
+ for f in files:
+ if not os.path.exists(f):
+ continue
+
+ try:
+ if self.config['permission']['change_file']:
+ if os.path.isfile(f):
+ os.chmod(f, int(self.config['permission']['file'], 8))
+
+ elif os.path.isdir(f):
+ os.chmod(f, int(self.config['permission']['folder'], 8))
+
+ if self.config['permission']['change_dl'] and os.name != "nt":
+ uid = getpwnam(self.config['permission']['user'])[2]
+ gid = getgrnam(self.config['permission']['group'])[2]
+ os.chown(f, uid, gid)
+
+ except Exception, e:
+ self.logWarning(_("Setting User and Group failed"), e)
diff --git a/pyload/plugin/addon/HotFolder.py b/pyload/plugin/addon/HotFolder.py
new file mode 100644
index 000000000..eb607ac7e
--- /dev/null
+++ b/pyload/plugin/addon/HotFolder.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import time
+
+from os import listdir, makedirs
+from os.path import exists, isfile, join
+from shutil import move
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import fs_encode, safe_join
+
+
+class HotFolder(Addon):
+ __name__ = "HotFolder"
+ __type__ = "addon"
+ __version__ = "0.12"
+
+ __config__ = [("folder" , "str" , "Folder to observe" , "container"),
+ ("watch_file", "bool", "Observe link file" , False ),
+ ("keep" , "bool", "Keep added containers", True ),
+ ("file" , "str" , "Link file" , "links.txt")]
+
+ __description__ = """Observe folder and file for changes and add container and links"""
+ __license__ = "GPLv3"
+ __authors__ = [("RaNaN", "RaNaN@pyload.de")]
+
+
+ def setup(self):
+ self.interval = 10
+
+
+ def activate(self):
+ self.initPeriodical()
+
+
+ def periodical(self):
+ folder = fs_encode(self.getConfig("folder"))
+
+ try:
+ if not exists(join(folder, "finished")):
+ makedirs(join(folder, "finished"))
+
+ if self.getConfig("watch_file"):
+ file = fs_encode(self.getConfig("file"))
+ with open(file, "a+") as f:
+ content = f.read().strip()
+
+ if content:
+ name = "%s_%s.txt" % (self.getConfig("file"), time.strftime("%H-%M-%S_%d%b%Y"))
+
+ with open(safe_join(folder, "finished", name), "wb") as f:
+ f.write(content)
+
+ self.core.api.addPackage(f.name, [f.name], 1)
+
+ for f in listdir(folder):
+ path = join(folder, f)
+
+ if not isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."):
+ continue
+
+ newpath = join(folder, "finished", f if self.getConfig("keep") else "tmp_" + f)
+ move(path, newpath)
+
+ self.logInfo(_("Added %s from HotFolder") % f)
+ self.core.api.addPackage(f, [newpath], 1)
+
+ except IOError, e:
+ self.logError(e)
diff --git a/pyload/plugin/addon/IRCInterface.py b/pyload/plugin/addon/IRCInterface.py
new file mode 100644
index 000000000..86d9ea688
--- /dev/null
+++ b/pyload/plugin/addon/IRCInterface.py
@@ -0,0 +1,431 @@
+# -*- coding: utf-8 -*-
+
+import re
+import socket
+import ssl
+import time
+
+from pycurl import FORM_FILE
+from select import select
+from threading import Thread
+from time import sleep
+from traceback import print_exc
+
+from pyload.api import PackageDoesNotExists, FileDoesNotExists
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Addon
+from pyload.utils import formatSize
+
+
+class IRCInterface(Thread, Addon):
+ __name__ = "IRCInterface"
+ __type__ = "addon"
+ __version__ = "0.13"
+
+ __config__ = [("host" , "str" , "IRC-Server Address" , "Enter your server here!"),
+ ("port" , "int" , "IRC-Server Port" , 6667 ),
+ ("ident" , "str" , "Clients ident" , "pyload-irc" ),
+ ("realname" , "str" , "Realname" , "pyload-irc" ),
+ ("ssl" , "bool", "Use SSL" , False ),
+ ("nick" , "str" , "Nickname the Client will take" , "pyLoad-IRC" ),
+ ("owner" , "str" , "Nickname the Client will accept commands from", "Enter your nick here!" ),
+ ("info_file", "bool", "Inform about every file finished" , False ),
+ ("info_pack", "bool", "Inform about every package finished" , True ),
+ ("captcha" , "bool", "Send captcha requests" , True )]
+
+ __description__ = """Connect to irc and let owner perform different tasks"""
+ __license__ = "GPLv3"
+ __authors__ = [("Jeix", "Jeix@hasnomail.com")]
+
+
+ def __init__(self, core, manager):
+ Thread.__init__(self)
+ Addon.__init__(self, core, manager)
+ self.setDaemon(True)
+
+
+ def activate(self):
+ self.abort = False
+ self.more = []
+ self.new_package = {}
+
+ self.start()
+
+
+ def packageFinished(self, pypack):
+ try:
+ if self.getConfig("info_pack"):
+ self.response(_("Package finished: %s") % pypack.name)
+ except Exception:
+ pass
+
+
+ def downloadFinished(self, pyfile):
+ try:
+ if self.getConfig("info_file"):
+ self.response(
+ _("Download finished: %(name)s @ %(plugin)s ") % {"name": pyfile.name, "plugin": pyfile.pluginname})
+ except Exception:
+ pass
+
+
+ def captchaTask(self, task):
+ if self.getConfig("captcha") and task.isTextual():
+ task.handler.append(self)
+ task.setWaiting(60)
+
+ html = getURL("http://www.freeimagehosting.net/upload.php",
+ post={"attached": (FORM_FILE, task.captchaFile)}, multipart=True)
+
+ url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", html).group(1)
+ self.response(_("New Captcha Request: %s") % url)
+ self.response(_("Answer with 'c %s text on the captcha'") % task.id)
+
+
+ def run(self):
+ # connect to IRC etc.
+ self.sock = socket.socket()
+ host = self.getConfig("host")
+ self.sock.connect((host, self.getConfig("port")))
+
+ if self.getConfig("ssl"):
+ self.sock = ssl.wrap_socket(self.sock, cert_reqs=ssl.CERT_NONE) #@TODO: support custom certificate
+
+ nick = self.getConfig("nick")
+ self.sock.send("NICK %s\r\n" % nick)
+ self.sock.send("USER %s %s bla :%s\r\n" % (nick, host, nick))
+ for t in self.getConfig("owner").split():
+ if t.strip().startswith("#"):
+ self.sock.send("JOIN %s\r\n" % t.strip())
+ self.logInfo(_("Connected to"), host)
+ self.logInfo(_("Switching to listening mode!"))
+ try:
+ self.main_loop()
+
+ except IRCError, ex:
+ self.sock.send("QUIT :byebye\r\n")
+ print_exc()
+ self.sock.close()
+
+
+ def main_loop(self):
+ readbuffer = ""
+ while True:
+ sleep(1)
+ fdset = select([self.sock], [], [], 0)
+ if self.sock not in fdset[0]:
+ continue
+
+ if self.abort:
+ raise IRCError("quit")
+
+ readbuffer += self.sock.recv(1024)
+ temp = readbuffer.split("\n")
+ readbuffer = temp.pop()
+
+ for line in temp:
+ line = line.rstrip()
+ first = line.split()
+
+ if first[0] == "PING":
+ self.sock.send("PONG %s\r\n" % first[1])
+
+ if first[0] == "ERROR":
+ raise IRCError(line)
+
+ msg = line.split(None, 3)
+ if len(msg) < 4:
+ continue
+
+ msg = {
+ "origin": msg[0][1:],
+ "action": msg[1],
+ "target": msg[2],
+ "text": msg[3][1:]
+ }
+
+ self.handle_events(msg)
+
+
+ def handle_events(self, msg):
+ if not msg['origin'].split("!", 1)[0] in self.getConfig("owner").split():
+ return
+
+ if msg['target'].split("!", 1)[0] != self.getConfig("nick"):
+ return
+
+ if msg['action'] != "PRIVMSG":
+ return
+
+ # HANDLE CTCP ANTI FLOOD/BOT PROTECTION
+ if msg['text'] == "\x01VERSION\x01":
+ self.logDebug("Sending CTCP VERSION")
+ self.sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
+ return
+ elif msg['text'] == "\x01TIME\x01":
+ self.logDebug("Sending CTCP TIME")
+ self.sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
+ return
+ elif msg['text'] == "\x01LAG\x01":
+ self.logDebug("Received CTCP LAG") #: don't know how to answer
+ return
+
+ trigger = "pass"
+ args = None
+
+ try:
+ temp = msg['text'].split()
+ trigger = temp[0]
+ if len(temp) > 1:
+ args = temp[1:]
+ except Exception:
+ pass
+
+ handler = getattr(self, "event_%s" % trigger, self.event_pass)
+ try:
+ res = handler(args)
+ for line in res:
+ self.response(line, msg['origin'])
+ except Exception, e:
+ self.logError(e)
+
+
+ def response(self, msg, origin=""):
+ if origin == "":
+ for t in self.getConfig("owner").split():
+ self.sock.send("PRIVMSG %s :%s\r\n" % (t.strip(), msg))
+ else:
+ self.sock.send("PRIVMSG %s :%s\r\n" % (origin.split("!", 1)[0], msg))
+
+
+ #### Events
+
+ def event_pass(self, args):
+ return []
+
+
+ def event_status(self, args):
+ downloads = self.core.api.statusDownloads()
+ if not downloads:
+ return ["INFO: There are no active downloads currently."]
+
+ temp_progress = ""
+ lines = ["ID - Name - Status - Speed - ETA - Progress"]
+ for data in downloads:
+
+ if data.status == 5:
+ temp_progress = data.format_wait
+ else:
+ temp_progress = "%d%% (%s)" % (data.percent, data.format_size)
+
+ lines.append("#%d - %s - %s - %s - %s - %s" %
+ (
+ data.fid,
+ data.name,
+ data.statusmsg,
+ "%s/s" % formatSize(data.speed),
+ "%s" % data.format_eta,
+ temp_progress
+ ))
+ return lines
+
+
+ def event_queue(self, args):
+ ps = self.core.api.getQueueData()
+
+ if not ps:
+ return ["INFO: There are no packages in queue."]
+
+ lines = []
+ for pack in ps:
+ lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
+
+ return lines
+
+
+ def event_collector(self, args):
+ ps = self.core.api.getCollectorData()
+ if not ps:
+ return ["INFO: No packages in collector!"]
+
+ lines = []
+ for pack in ps:
+ lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
+
+ return lines
+
+
+ def event_info(self, args):
+ if not args:
+ return ["ERROR: Use info like this: info <id>"]
+
+ info = None
+ try:
+ info = self.core.api.getFileData(int(args[0]))
+
+ except FileDoesNotExists:
+ return ["ERROR: Link doesn't exists."]
+
+ return ['LINK #%s: %s (%s) [%s][%s]' % (info.fid, info.name, info.format_size, info.statusmsg, info.plugin)]
+
+
+ def event_packinfo(self, args):
+ if not args:
+ return ["ERROR: Use packinfo like this: packinfo <id>"]
+
+ lines = []
+ pack = None
+ try:
+ pack = self.core.api.getPackageData(int(args[0]))
+
+ except PackageDoesNotExists:
+ return ["ERROR: Package doesn't exists."]
+
+ id = args[0]
+
+ self.more = []
+
+ lines.append('PACKAGE #%s: "%s" with %d links' % (id, pack.name, len(pack.links)))
+ for pyfile in pack.links:
+ self.more.append('LINK #%s: %s (%s) [%s][%s]' % (pyfile.fid, pyfile.name, pyfile.format_size,
+ pyfile.statusmsg, pyfile.plugin))
+
+ if len(self.more) < 6:
+ lines.extend(self.more)
+ self.more = []
+ else:
+ lines.extend(self.more[:6])
+ self.more = self.more[6:]
+ lines.append("%d more links do display." % len(self.more))
+
+ return lines
+
+
+ def event_more(self, args):
+ if not self.more:
+ return ["No more information to display."]
+
+ lines = self.more[:6]
+ self.more = self.more[6:]
+ lines.append("%d more links do display." % len(self.more))
+
+ return lines
+
+
+ def event_start(self, args):
+ self.core.api.unpauseServer()
+ return ["INFO: Starting downloads."]
+
+
+ def event_stop(self, args):
+ self.core.api.pauseServer()
+ return ["INFO: No new downloads will be started."]
+
+
+ def event_add(self, args):
+ if len(args) < 2:
+ return ['ERROR: Add links like this: "add <packagename|id> links". ',
+ "This will add the link <link> to to the package <package> / the package with id <id>!"]
+
+ pack = args[0].strip()
+ links = [x.strip() for x in args[1:]]
+
+ count_added = 0
+ count_failed = 0
+ try:
+ id = int(pack)
+ pack = self.core.api.getPackageData(id)
+ if not pack:
+ return ["ERROR: Package doesn't exists."]
+
+ #TODO add links
+
+ return ["INFO: Added %d links to Package %s [#%d]" % (len(links), pack['name'], id)]
+
+ except Exception:
+ # create new package
+ id = self.core.api.addPackage(pack, links, 1)
+ return ["INFO: Created new Package %s [#%d] with %d links." % (pack, id, len(links))]
+
+
+ def event_del(self, args):
+ if len(args) < 2:
+ return ["ERROR: Use del command like this: del -p|-l <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
+
+ if args[0] == "-p":
+ ret = self.core.api.deletePackages(map(int, args[1:]))
+ return ["INFO: Deleted %d packages!" % len(args[1:])]
+
+ elif args[0] == "-l":
+ ret = self.core.api.delLinks(map(int, args[1:]))
+ return ["INFO: Deleted %d links!" % len(args[1:])]
+
+ else:
+ return ["ERROR: Use del command like this: del <-p|-l> <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
+
+
+ def event_push(self, args):
+ if not args:
+ return ["ERROR: Push package to queue like this: push <package id>"]
+
+ id = int(args[0])
+ try:
+ info = self.core.api.getPackageInfo(id)
+ except PackageDoesNotExists:
+ return ["ERROR: Package #%d does not exist." % id]
+
+ self.core.api.pushToQueue(id)
+ return ["INFO: Pushed package #%d to queue." % id]
+
+
+ def event_pull(self, args):
+ if not args:
+ return ["ERROR: Pull package from queue like this: pull <package id>."]
+
+ id = int(args[0])
+ if not self.core.api.getPackageData(id):
+ return ["ERROR: Package #%d does not exist." % id]
+
+ self.core.api.pullFromQueue(id)
+ return ["INFO: Pulled package #%d from queue to collector." % id]
+
+
+ def event_c(self, args):
+ """ captcha answer """
+ if not args:
+ return ["ERROR: Captcha ID missing."]
+
+ task = self.core.captchaManager.getTaskByID(args[0])
+ if not task:
+ return ["ERROR: Captcha Task with ID %s does not exists." % args[0]]
+
+ task.setResult(" ".join(args[1:]))
+ return ["INFO: Result %s saved." % " ".join(args[1:])]
+
+
+ def event_help(self, args):
+ lines = ["The following commands are available:",
+ "add <package|packid> <links> [...] Adds link to package. (creates new package if it does not exist)",
+ "queue Shows all packages in the queue",
+ "collector Shows all packages in collector",
+ "del -p|-l <id> [...] Deletes all packages|links with the ids specified",
+ "info <id> Shows info of the link with id <id>",
+ "packinfo <id> Shows info of the package with id <id>",
+ "more Shows more info when the result was truncated",
+ "start Starts all downloads",
+ "stop Stops the download (but not abort active downloads)",
+ "push <id> Push package to queue",
+ "pull <id> Pull package from queue",
+ "status Show general download status",
+ "help Shows this help message"]
+ return lines
+
+
+class IRCError(Exception):
+
+ def __init__(self, value):
+ self.value = value
+
+
+ def __str__(self):
+ return repr(self.value)
diff --git a/pyload/plugin/addon/JustPremium.py b/pyload/plugin/addon/JustPremium.py
new file mode 100644
index 000000000..d3c4d8eff
--- /dev/null
+++ b/pyload/plugin/addon/JustPremium.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Addon import Addon
+
+
+class JustPremium(Addon):
+ __name__ = "JustPremium"
+ __type__ = "addon"
+ __version__ = "0.21"
+
+ __config__ = [("excluded", "str", "Exclude hosters (comma separated)", "")]
+
+ __description__ = """Remove all not premium links from urls added"""
+ __license__ = "GPLv3"
+ __authors__ = [("mazleu", "mazleica@gmail.com"),
+ ("Walter Purcaro", "vuolter@gmail.com"),
+ ("immenz", "immenz@gmx.net")]
+
+
+ event_list = ["linksAdded"]
+
+
+ def linksAdded(self, links, pid):
+ hosterdict = self.core.pluginManager.hosterPlugins
+ linkdict = self.core.api.checkURLs(links)
+
+ premiumplugins = set(account.type for account in self.core.api.getAccounts(False) \
+ if account.valid and account.premium)
+ multihosters = set(hoster for hoster in self.core.pluginManager.hosterPlugins \
+ if 'new_name' in hosterdict[hoster] \
+ and hosterdict[hoster]['new_name'] in premiumplugins)
+
+ #: Found at least one hoster with account or multihoster
+ if not any(True for pluginname in linkdict if pluginname in premiumplugins | multihosters):
+ return
+
+ excluded = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
+ self.getConfig('excluded').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
+
+ for pluginname in set(linkdict.keys()) - (premiumplugins | multihosters).union(excluded):
+ self.logInfo(_("Remove links of plugin: %s") % pluginname)
+ for link in linkdict[pluginname]:
+ self.logDebug("Remove link: %s" % link)
+ links.remove(link)
diff --git a/pyload/plugin/addon/MergeFiles.py b/pyload/plugin/addon/MergeFiles.py
new file mode 100644
index 000000000..11e869aee
--- /dev/null
+++ b/pyload/plugin/addon/MergeFiles.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import os
+import re
+
+from traceback import print_exc
+
+from pyload.plugin.Addon import Addon, threaded
+from pyload.utils import safe_join
+
+
+class MergeFiles(Addon):
+ __name__ = "MergeFiles"
+ __type__ = "addon"
+ __version__ = "0.14"
+
+ __config__ = [("activated", "bool", "Activated", True)]
+
+ __description__ = """Merges parts splitted with hjsplit"""
+ __license__ = "GPLv3"
+ __authors__ = [("and9000", "me@has-no-mail.com")]
+
+
+ BUFFER_SIZE = 4096
+
+
+ def setup(self):
+ pass
+
+
+ @threaded
+ def packageFinished(self, pack):
+ files = {}
+ fid_dict = {}
+ for fid, data in pack.getChildren().iteritems():
+ if re.search("\.\d{3}$", data['name']):
+ if data['name'][:-4] not in files:
+ files[data['name'][:-4]] = []
+ files[data['name'][:-4]].append(data['name'])
+ files[data['name'][:-4]].sort()
+ fid_dict[data['name']] = fid
+
+ download_folder = self.config['general']['download_folder']
+
+ if self.config['general']['folder_per_package']:
+ download_folder = safe_join(download_folder, pack.folder)
+
+ for name, file_list in files.iteritems():
+ self.logInfo(_("Starting merging of"), name)
+
+ final_file = open(safe_join(download_folder, name), "wb")
+ for splitted_file in file_list:
+ self.logDebug("Merging part", splitted_file)
+
+ pyfile = self.core.files.getFile(fid_dict[splitted_file])
+
+ pyfile.setStatus("processing")
+
+ try:
+ with open(safe_join(download_folder, splitted_file), "rb") as s_file:
+ size_written = 0
+ s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file)))
+
+ while True:
+ f_buffer = s_file.read(self.BUFFER_SIZE)
+ if f_buffer:
+ final_file.write(f_buffer)
+ size_written += self.BUFFER_SIZE
+ pyfile.setProgress((size_written * 100) / s_file_size)
+ else:
+ break
+
+ self.logDebug("Finished merging part", splitted_file)
+
+ except Exception, e:
+ print_exc()
+
+ finally:
+ pyfile.setProgress(100)
+ pyfile.setStatus("finished")
+ pyfile.release()
+
+ self.logInfo(_("Finished merging of"), name)
diff --git a/pyload/plugin/addon/MultiHome.py b/pyload/plugin/addon/MultiHome.py
new file mode 100644
index 000000000..521749fc8
--- /dev/null
+++ b/pyload/plugin/addon/MultiHome.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+from time import time
+
+from pyload.plugin.Addon import Addon
+
+
+class MultiHome(Addon):
+ __name__ = "MultiHome"
+ __type__ = "addon"
+ __version__ = "0.12"
+
+ __config__ = [("interfaces", "str", "Interfaces", "None")]
+
+ __description__ = """Ip address changer"""
+ __license__ = "GPLv3"
+ __authors__ = [("mkaay", "mkaay@mkaay.de")]
+
+
+ def setup(self):
+ self.register = {}
+ self.interfaces = []
+ self.parseInterfaces(self.getConfig("interfaces").split(";"))
+ if not self.interfaces:
+ self.parseInterfaces([self.config['download']['interface']])
+ self.setConfig("interfaces", self.toConfig())
+
+
+ def toConfig(self):
+ return ";".join(i.adress for i in self.interfaces)
+
+
+ def parseInterfaces(self, interfaces):
+ for interface in interfaces:
+ if not interface or str(interface).lower() == "none":
+ continue
+ self.interfaces.append(Interface(interface))
+
+
+ def activate(self):
+ requestFactory = self.core.requestFactory
+ oldGetRequest = requestFactory.getRequest
+
+ def getRequest(pluginName, account=None):
+ iface = self.bestInterface(pluginName, account)
+ if iface:
+ iface.useFor(pluginName, account)
+ requestFactory.iface = lambda: iface.adress
+ self.logDebug("Using address", iface.adress)
+ return oldGetRequest(pluginName, account)
+
+ requestFactory.getRequest = getRequest
+
+
+ def bestInterface(self, pluginName, account):
+ best = None
+ for interface in self.interfaces:
+ if not best or interface.lastPluginAccess(pluginName, account) < best.lastPluginAccess(pluginName, account):
+ best = interface
+ return best
+
+
+class Interface(object):
+
+ def __init__(self, adress):
+ self.adress = adress
+ self.history = {}
+
+
+ def lastPluginAccess(self, pluginName, account):
+ if (pluginName, account) in self.history:
+ return self.history[(pluginName, account)]
+ return 0
+
+
+ def useFor(self, pluginName, account):
+ self.history[(pluginName, account)] = time()
+
+
+ def __repr__(self):
+ return "<Interface - %s>" % self.adress
diff --git a/pyload/plugin/addon/RestartFailed.py b/pyload/plugin/addon/RestartFailed.py
new file mode 100644
index 000000000..2fe5f13bf
--- /dev/null
+++ b/pyload/plugin/addon/RestartFailed.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Addon import Addon
+
+
+class RestartFailed(Addon):
+ __name__ = "RestartFailed"
+ __type__ = "addon"
+ __version__ = "1.57"
+
+ __config__ = [("activated", "bool", "Activated" , True),
+ ("interval" , "int" , "Check interval in minutes", 90 )]
+
+ __description__ = """Periodically restart all failed downloads in queue"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+ MIN_INTERVAL = 15 * 60 #: 15m minimum check interval (value is in seconds)
+
+
+ def pluginConfigChanged(self, plugin, name, value):
+ if name == "interval":
+ interval = value * 60
+ if self.MIN_INTERVAL <= interval != self.interval:
+ self.core.scheduler.removeJob(self.cb)
+ self.interval = interval
+ self.initPeriodical()
+ else:
+ self.logDebug("Invalid interval value, kept current")
+
+
+ def periodical(self):
+ self.logDebug(_("Restart failed downloads"))
+ self.core.api.restartFailed()
+
+
+ def setup(self):
+ self.interval = 0
+
+
+ def activate(self):
+ self.pluginConfigChanged(self.__name__, "interval", self.getConfig("interval"))
diff --git a/pyload/plugin/addon/RestartSlow.py b/pyload/plugin/addon/RestartSlow.py
new file mode 100644
index 000000000..332047da7
--- /dev/null
+++ b/pyload/plugin/addon/RestartSlow.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+import pycurl
+
+from pyload.plugin.Addon import Addon
+
+
+class RestartSlow(Addon):
+ __name__ = "RestartSlow"
+ __type__ = "addon"
+ __version__ = "0.04"
+
+ __config__ = [("free_limit" , "int" , "Transfer speed threshold in kilobytes" , 100 ),
+ ("free_time" , "int" , "Sample interval in minutes" , 5 ),
+ ("premium_limit", "int" , "Transfer speed threshold for premium download in kilobytes", 300 ),
+ ("premium_time" , "int" , "Sample interval for premium download in minutes" , 2 ),
+ ("safe_mode" , "bool", "Don't restart if download is not resumable" , True)]
+
+ __description__ = """Restart slow downloads"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ event_map = {'download-start': "downloadStarts"}
+
+
+ def setup(self):
+ self.info = {'chunk': {}}
+
+
+ def periodical(self):
+ if not self.pyfile.plugin.req.dl:
+ return
+
+ if self.getConfig("safe_mode") and not self.pyfile.plugin.resumeDownload:
+ time = 30
+ limit = 5
+ else:
+ type = "premium" if self.pyfile.plugin.premium else "free"
+ time = max(30, self.getConfig("%s_time" % type) * 60)
+ limit = max(5, self.getConfig("%s_limit" % type) * 1024)
+
+ chunks = [chunk for chunk in self.pyfile.plugin.req.dl.chunks \
+ if chunk.id not in self.info['chunk'] or self.info['chunk'][chunk.id] is not (time, limit)]
+
+ for chunk in chunks:
+ chunk.c.setopt(pycurl.LOW_SPEED_TIME , time)
+ chunk.c.setopt(pycurl.LOW_SPEED_LIMIT, limit)
+
+ self.info['chunk'][chunk.id] = (time, limit)
+
+
+ def downloadStarts(self, pyfile, url, filename):
+ if self.cb or (self.getConfig("safe_mode") and not pyfile.plugin.resumeDownload):
+ return
+ self.pyfile = pyfile
+ self.initPeriodical()
diff --git a/pyload/plugin/addon/SkipRev.py b/pyload/plugin/addon/SkipRev.py
new file mode 100644
index 000000000..efc96cb7b
--- /dev/null
+++ b/pyload/plugin/addon/SkipRev.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+
+from types import MethodType
+from urllib import unquote
+from urlparse import urlparse
+
+from pyload.datatype.File import PyFile
+from pyload.plugin.Addon import Addon
+from pyload.plugin.Plugin import SkipDownload
+
+
+def _setup(self):
+ self.pyfile.plugin._setup()
+ if self.pyfile.hasStatus("skipped"):
+ raise SkipDownload(self.pyfile.statusname or self.pyfile.pluginname)
+
+
+class SkipRev(Addon):
+ __name__ = "SkipRev"
+ __type__ = "addon"
+ __version__ = "0.25"
+
+ __config__ = [("tokeep", "int", "Number of rev files to keep for package (-1 to auto)", -1)]
+
+ __description__ = """Skip files ending with extension rev"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def _pyname(self, pyfile):
+ if hasattr(pyfile.pluginmodule, "getInfo"):
+ return getattr(pyfile.pluginmodule, "getInfo")([pyfile.url]).next()[0]
+ else:
+ self.logWarning("Unable to grab file name")
+ return urlparse(unquote(pyfile.url)).path.split('/')[-1]
+
+
+ def _pyfile(self, link):
+ return PyFile(self.core.files,
+ link.fid,
+ link.url,
+ link.name,
+ link.size,
+ link.status,
+ link.error,
+ link.plugin,
+ link.packageID,
+ link.order)
+
+
+ def downloadPreparing(self, pyfile):
+ if pyfile.statusname is "unskipped" or not self._pyname(pyfile).endswith(".rev"):
+ return
+
+ tokeep = self.getConfig("tokeep")
+
+ if tokeep:
+ status_list = (1, 4, 8, 9, 14) if tokeep < 0 else (1, 3, 4, 8, 9, 14)
+
+ queued = [True for link in self.core.api.getPackageData(pyfile.package().id).links \
+ if link.name.endswith(".rev") and link.status not in status_list].count(True)
+
+ if not queued or queued < tokeep: #: keep one rev at least in auto mode
+ return
+
+ pyfile.setCustomStatus("SkipRev", "skipped")
+ pyfile.plugin._setup = pyfile.plugin.setup
+ pyfile.plugin.setup = MethodType(_setup, pyfile.plugin) #: work-around: inject status checker inside the preprocessing routine of the plugin
+
+
+ def downloadFailed(self, pyfile):
+ #: Check if pyfile is still "failed",
+ # maybe might has been restarted in meantime
+ if pyfile.status != 8:
+ return
+
+ tokeep = self.getConfig("tokeep")
+
+ if not tokeep:
+ return
+
+ for link in self.core.api.getPackageData(pyfile.package().id).links:
+ if link.status is 4 and link.name.endswith(".rev"):
+ pylink = self._pyfile(link)
+
+ if tokeep > -1 or pyfile.name.endswith(".rev"):
+ pylink.setStatus("queued")
+ else:
+ pylink.setCustomStatus("unskipped", "queued")
+
+ self.core.files.save()
+ pylink.release()
+ return
diff --git a/pyload/plugin/addon/UnSkipOnFail.py b/pyload/plugin/addon/UnSkipOnFail.py
new file mode 100644
index 000000000..7d787d1ed
--- /dev/null
+++ b/pyload/plugin/addon/UnSkipOnFail.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+from pyload.datatype.File import PyFile
+from pyload.plugin.Addon import Addon
+
+
+class UnSkipOnFail(Addon):
+ __name__ = "UnSkipOnFail"
+ __type__ = "addon"
+ __version__ = "0.05"
+
+ __config__ = [("activated", "bool", "Activated", True)]
+
+ __description__ = """Queue skipped duplicates when download fails"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def downloadFailed(self, pyfile):
+ #: Check if pyfile is still "failed",
+ # maybe might has been restarted in meantime
+ if pyfile.status != 8:
+ return
+
+ msg = _("Looking for skipped duplicates of: %s (pid:%s)")
+ self.logInfo(msg % (pyfile.name, pyfile.package().id))
+
+ dup = self.findDuplicate(pyfile)
+ if dup:
+ self.logInfo(_("Queue found duplicate: %s (pid:%s)") % (dup.name, dup.packageID))
+
+ #: Change status of "link" to "new_status".
+ # "link" has to be a valid FileData object,
+ # "new_status" has to be a valid status name
+ # (i.e. "queued" for this Plugin)
+ # It creates a temporary PyFile object using
+ # "link" data, changes its status, and tells
+ # the core.files-manager to save its data.
+ pylink = _pyfile(link)
+
+ pylink.setCustomStatus("UnSkipOnFail", "queued")
+
+ self.core.files.save()
+ pylink.release()
+
+ else:
+ self.logInfo(_("No duplicates found"))
+
+
+ def findDuplicate(self, pyfile):
+ """ Search all packages for duplicate links to "pyfile".
+ Duplicates are links that would overwrite "pyfile".
+ To test on duplicity the package-folder and link-name
+ of twolinks are compared (link.name).
+ So this method returns a list of all links with equal
+ package-folders and filenames as "pyfile", but except
+ the data for "pyfile" iotselöf.
+ It does MOT check the link's status.
+ """
+ queue = self.core.api.getQueue() #: get packages (w/o files, as most file data is useless here)
+
+ for package in queue:
+ #: check if package-folder equals pyfile's package folder
+ if package.folder != pyfile.package().folder:
+ continue
+
+ #: now get packaged data w/ files/links
+ pdata = self.core.api.getPackageData(package.pid)
+ for link in pdata.links:
+ #: check if link is "skipped"
+ if link.status != 4:
+ continue
+
+ #: check if link name collides with pdata's name
+ #: AND at last check if it is not pyfile itself
+ if link.name == pyfile.name and link.fid != pyfile.id:
+ return link
+
+
+ def _pyfile(self, link):
+ return PyFile(self.core.files,
+ link.fid,
+ link.url,
+ link.name,
+ link.size,
+ link.status,
+ link.error,
+ link.plugin,
+ link.packageID,
+ link.order)
diff --git a/pyload/plugin/addon/UpdateManager.py b/pyload/plugin/addon/UpdateManager.py
new file mode 100644
index 000000000..cf138f64a
--- /dev/null
+++ b/pyload/plugin/addon/UpdateManager.py
@@ -0,0 +1,306 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import re
+import sys
+
+from operator import itemgetter
+from os import path, remove, stat
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Expose, Addon, threaded
+from pyload.utils import safe_join
+
+
+class UpdateManager(Addon):
+ __name__ = "UpdateManager"
+ __type__ = "addon"
+ __version__ = "0.43"
+
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
+ ("interval" , "int" , "Check interval in hours" , 8 ),
+ ("autorestart" , "bool" , "Automatically restart pyLoad when required" , True ),
+ ("reloadplugins", "bool" , "Monitor plugins for code changes in debug mode", True ),
+ ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , False )]
+
+ __description__ = """Check for updates"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+ SERVER_URL = "http://updatemanager.pyload.org"
+ VERSION = re.compile(r'__version__.*=.*("|\')([\d.]+)')
+ MIN_INTERVAL = 3 * 60 * 60 #: 3h minimum check interval (value is in seconds)
+
+
+ def pluginConfigChanged(self, plugin, name, value):
+ if name == "interval":
+ interval = value * 60 * 60
+ if self.MIN_INTERVAL <= interval != self.interval:
+ self.core.scheduler.removeJob(self.cb)
+ self.interval = interval
+ self.initPeriodical()
+ else:
+ self.logDebug("Invalid interval value, kept current")
+
+ elif name == "reloadplugins":
+ if self.cb2:
+ self.core.scheduler.removeJob(self.cb2)
+ if value is True and self.core.debug:
+ self.periodical2()
+
+
+ def activate(self):
+ self.pluginConfigChanged(self.__name__, "interval", self.getConfig("interval"))
+ x = lambda: self.pluginConfigChanged(self.__name__, "reloadplugins", self.getConfig("reloadplugins"))
+ self.core.scheduler.addJob(10, x, threaded=False)
+
+
+ def deactivate(self):
+ self.pluginConfigChanged(self.__name__, "reloadplugins", False)
+
+
+ def setup(self):
+ self.cb2 = None
+ self.interval = 0
+ self.updating = False
+ self.info = {'pyload': False, 'version': None, 'plugins': False}
+ self.mtimes = {} #: store modification time for each plugin
+
+
+ def periodical2(self):
+ if not self.updating:
+ self.autoreloadPlugins()
+
+ self.cb2 = self.core.scheduler.addJob(4, self.periodical2, threaded=False)
+
+
+ @Expose
+ def autoreloadPlugins(self):
+ """ reload and reindex all modified plugins """
+ modules = filter(
+ lambda m: m and (m.__name__.startswith("pyload.plugin.") or
+ m.__name__.startswith("userplugins.")) and
+ m.__name__.count(".") >= 2, sys.modules.itervalues()
+ )
+
+ reloads = []
+
+ for m in modules:
+ root, type, name = m.__name__.rsplit(".", 2)
+ id = (type, name)
+ if type in self.core.pluginManager.plugins:
+ f = m.__file__.replace(".pyc", ".py")
+ if not path.isfile(f):
+ continue
+
+ mtime = stat(f).st_mtime
+
+ if id not in self.mtimes:
+ self.mtimes[id] = mtime
+ elif self.mtimes[id] < mtime:
+ reloads.append(id)
+ self.mtimes[id] = mtime
+
+ return True if self.core.pluginManager.reloadPlugins(reloads) else False
+
+
+ def periodical(self):
+ if self.info['pyload'] or self.getConfig("nodebugupdate") and self.core.debug:
+ return
+
+ self.updateThread()
+
+
+ def server_request(self):
+ try:
+ return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines()
+ except Exception:
+ self.logWarning(_("Unable to contact server to get updates"))
+
+
+ @threaded
+ def updateThread(self):
+ self.updating = True
+
+ status = self.update(onlyplugin=self.getConfig("mode") == "plugins only")
+
+ if status is 2 and self.getConfig("autorestart"):
+ self.core.api.restart()
+ else:
+ self.updating = False
+
+
+ @Expose
+ def updatePlugins(self):
+ """ simple wrapper for calling plugin update quickly """
+ return self.update(onlyplugin=True)
+
+
+ @Expose
+ def update(self, onlyplugin=False):
+ """ check for updates """
+ data = self.server_request()
+
+ if not data:
+ exitcode = 0
+
+ elif data[0] == "None":
+ self.logInfo(_("No new pyLoad version available"))
+ updates = data[1:]
+ exitcode = self._updatePlugins(updates)
+
+ elif onlyplugin:
+ exitcode = 0
+
+ else:
+ newversion = data[0]
+ self.logInfo(_("*** New pyLoad Version %s available ***") % newversion)
+ self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***"))
+ exitcode = 3
+ self.info['pyload'] = True
+ self.info['version'] = newversion
+
+ return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required; 3 = No plugins updated, new pyLoad version available
+
+
+ def _updatePlugins(self, updates):
+ """ check for plugin updates """
+
+ if self.info['plugins']:
+ return False #: plugins were already updated
+
+ exitcode = 0
+ updated = []
+
+ url = updates[0]
+ schema = updates[1].split('|')
+
+ if "BLACKLIST" in updates:
+ blacklist = updates[updates.index('BLACKLIST') + 1:]
+ updates = updates[2:updates.index('BLACKLIST')]
+ else:
+ blacklist = None
+ updates = updates[2:]
+
+ upgradable = [dict(zip(schema, x.split('|'))) for x in updates]
+ blacklisted = [(x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]) for x in blacklist] if blacklist else []
+
+ if blacklist:
+ # Protect UpdateManager from self-removing
+ try:
+ blacklisted.remove(("addon", "UpdateManager"))
+ except Exception:
+ pass
+
+ for t, n in blacklisted:
+ for idx, plugin in enumerate(upgradable):
+ if n == plugin['name'] and t == plugin['type']:
+ upgradable.pop(idx)
+ break
+
+ for t, n in self.removePlugins(sorted(blacklisted)):
+ self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % {
+ 'type': t,
+ 'name': n,
+ })
+
+ for plugin in sorted(upgradable, key=itemgetter("type", "name")):
+ filename = plugin['name']
+ type = plugin['type']
+ version = plugin['version']
+
+ if filename.endswith(".pyc"):
+ name = filename[:filename.find("_")]
+ else:
+ name = filename.replace(".py", "")
+
+ plugins = getattr(self.core.pluginManager, "%sPlugins" % type)
+
+ oldver = float(plugins[name]['version']) if name in plugins else None
+ newver = float(version)
+
+ if not oldver:
+ msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)"
+ elif newver > oldver:
+ msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)"
+ else:
+ continue
+
+ self.logInfo(_(msg) % {'type' : type,
+ 'name' : name,
+ 'oldver': oldver,
+ 'newver': newver})
+ try:
+ content = getURL(url % plugin)
+ m = self.VERSION.search(content)
+
+ if m and m.group(2) == version:
+ with open(safe_join("userplugins", prefix, filename), "wb") as f:
+ f.write(content)
+
+ updated.append((prefix, name))
+ else:
+ raise Exception, _("Version mismatch")
+
+ except Exception, e:
+ self.logError(_("Error updating plugin: %s") % filename, str(e))
+
+ if updated:
+ reloaded = self.core.pluginManager.reloadPlugins(updated)
+ if reloaded:
+ self.logInfo(_("Plugins updated and reloaded"))
+ exitcode = 1
+ else:
+ self.logInfo(_("*** Plugins have been updated, but need a pyLoad restart to be reloaded ***"))
+ self.info['plugins'] = True
+ exitcode = 2
+ else:
+ self.logInfo(_("No plugin updates available"))
+
+ return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required
+
+
+ @Expose
+ def removePlugins(self, type_plugins):
+ """ delete plugins from disk """
+
+ if not type_plugins:
+ return
+
+ self.logDebug("Requested deletion of plugins: %s" % type_plugins)
+
+ removed = []
+
+ for type, name in type_plugins:
+ err = False
+ file = name + ".py"
+
+ for root in ("userplugins", path.join(pypath, "pyload", "plugins")):
+
+ filename = safe_join(root, type, file)
+ try:
+ remove(filename)
+ except Exception, e:
+ self.logDebug("Error deleting: %s" % path.basename(filename), e)
+ err = True
+
+ filename += "c"
+ if path.isfile(filename):
+ try:
+ if type == "addon":
+ self.manager.deactivateAddon(name)
+ remove(filename)
+ except Exception, e:
+ self.logDebug("Error deleting: %s" % path.basename(filename), e)
+ err = True
+
+ if not err:
+ id = (type, name)
+ removed.append(id)
+
+ return removed #: return a list of the plugins successfully removed
diff --git a/pyload/plugin/addon/WindowsPhoneNotify.py b/pyload/plugin/addon/WindowsPhoneNotify.py
new file mode 100644
index 000000000..b9710c2f0
--- /dev/null
+++ b/pyload/plugin/addon/WindowsPhoneNotify.py
@@ -0,0 +1,91 @@
+# -*- coding: utf-8 -*-
+
+import httplib
+
+from time import time
+
+from pyload.plugin.Addon import Addon
+
+
+class WindowsPhoneNotify(Addon):
+ __name__ = "WindowsPhoneNotify"
+ __type__ = "addon"
+ __version__ = "0.07"
+
+ __config__ = [("id" , "str" , "Push ID" , "" ),
+ ("url" , "str" , "Push url" , "" ),
+ ("notifycaptcha" , "bool", "Notify captcha request" , True ),
+ ("notifypackage" , "bool", "Notify package finished" , True ),
+ ("notifyprocessed", "bool", "Notify processed packages status" , True ),
+ ("timeout" , "int" , "Timeout between captchas in seconds" , 5 ),
+ ("force" , "bool", "Send notifications if client is connected", False)]
+
+ __description__ = """Send push notifications to Windows Phone"""
+ __license__ = "GPLv3"
+ __authors__ = [("Andy Voigt", "phone-support@hotmail.de"),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ event_list = ["allDownloadsProcessed"]
+
+
+ def setup(self):
+ self.info = {} #@TODO: Remove in 0.4.10
+ self.last_notify = 0
+
+
+ def newCaptchaTask(self, task):
+ if not self.getConfig("notifycaptcha"):
+ return False
+
+ if time() - self.last_notify < self.getConf("timeout"):
+ return False
+
+ self.notify(_("Captcha"), _("New request waiting user input"))
+
+
+ def packageFinished(self, pypack):
+ if self.getConfig("notifypackage"):
+ self.notify(_("Package finished"), pypack.name)
+
+
+ def allDownloadsProcessed(self):
+ if not self.getConfig("notifyprocessed"):
+ return False
+
+ if any(True for pdata in self.core.api.getQueue() if pdata.linksdone < pdata.linkstotal):
+ self.notify(_("Package failed"), _("One or more packages was not completed successfully"))
+ else:
+ self.notify(_("All packages finished"))
+
+
+ def getXmlData(self, msg):
+ return ("<?xml version='1.0' encoding='utf-8'?> <wp:Notification xmlns:wp='WPNotification'> "
+ "<wp:Toast> <wp:Text1>pyLoad</wp:Text1> <wp:Text2>%s</wp:Text2> "
+ "</wp:Toast> </wp:Notification>" % msg)
+
+
+ def notify(self, event, msg=""):
+ id = self.getConfig("id")
+ url = self.getConfig("url")
+
+ if not id or not url:
+ return False
+
+ if self.core.isClientConnected() and not self.getConfig("force"):
+ return False
+
+ request = self.getXmlData("%s: %s" % (event, msg) if msg else event)
+ webservice = httplib.HTTP(url)
+
+ webservice.putrequest("POST", id)
+ webservice.putheader("Host", url)
+ webservice.putheader("Content-type", "text/xml")
+ webservice.putheader("X-NotificationClass", "2")
+ webservice.putheader("X-WindowsPhone-Target", "toast")
+ webservice.putheader("Content-length", "%d" % len(request))
+ webservice.endheaders()
+ webservice.send(request)
+ webservice.close()
+
+ self.last_notify = time()
diff --git a/pyload/plugin/addon/XMPPInterface.py b/pyload/plugin/addon/XMPPInterface.py
new file mode 100644
index 000000000..77a49af6f
--- /dev/null
+++ b/pyload/plugin/addon/XMPPInterface.py
@@ -0,0 +1,252 @@
+# -*- coding: utf-8 -*-
+
+from pyxmpp import streamtls
+from pyxmpp.all import JID, Message
+from pyxmpp.interface import implements
+from pyxmpp.interfaces import *
+from pyxmpp.jabber.client import JabberClient
+
+from pyload.plugin.addon.IRCInterface import IRCInterface
+
+
+class XMPPInterface(IRCInterface, JabberClient):
+ __name__ = "XMPPInterface"
+ __type__ = "addon"
+ __version__ = "0.11"
+
+ __config__ = [("jid" , "str" , "Jabber ID" , "user@exmaple-jabber-server.org" ),
+ ("pw" , "str" , "Password" , "" ),
+ ("tls" , "bool", "Use TLS" , False ),
+ ("owners" , "str" , "List of JIDs accepting commands from", "me@icq-gateway.org;some@msn-gateway.org"),
+ ("info_file", "bool", "Inform about every file finished" , False ),
+ ("info_pack", "bool", "Inform about every package finished" , True ),
+ ("captcha" , "bool", "Send captcha requests" , True )]
+
+ __description__ = """Connect to jabber and let owner perform different tasks"""
+ __license__ = "GPLv3"
+ __authors__ = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ implements(IMessageHandlersProvider)
+
+
+ def __init__(self, core, manager):
+ IRCInterface.__init__(self, core, manager)
+
+ self.jid = JID(self.getConfig("jid"))
+ password = self.getConfig("pw")
+
+ # if bare JID is provided add a resource -- it is required
+ if not self.jid.resource:
+ self.jid = JID(self.jid.node, self.jid.domain, "pyLoad")
+
+ if self.getConfig("tls"):
+ tls_settings = streamtls.TLSSettings(require=True, verify_peer=False)
+ auth = ("sasl:PLAIN", "sasl:DIGEST-MD5")
+ else:
+ tls_settings = None
+ auth = ("sasl:DIGEST-MD5", "digest")
+
+ # setup client with provided connection information
+ # and identity data
+ JabberClient.__init__(self, self.jid, password,
+ disco_name="pyLoad XMPP Client", disco_type="bot",
+ tls_settings=tls_settings, auth_methods=auth)
+
+ self.interface_providers = [
+ VersionHandler(self),
+ self,
+ ]
+
+
+ def activate(self):
+ self.new_package = {}
+
+ self.start()
+
+
+ def packageFinished(self, pypack):
+ try:
+ if self.getConfig("info_pack"):
+ self.announce(_("Package finished: %s") % pypack.name)
+ except Exception:
+ pass
+
+
+ def downloadFinished(self, pyfile):
+ try:
+ if self.getConfig("info_file"):
+ self.announce(
+ _("Download finished: %(name)s @ %(plugin)s") % {"name": pyfile.name, "plugin": pyfile.pluginname})
+ except Exception:
+ pass
+
+
+ def run(self):
+ # connect to IRC etc.
+ self.connect()
+ try:
+ self.loop()
+ except Exception, ex:
+ self.logError(ex)
+
+
+ def stream_state_changed(self, state, arg):
+ """This one is called when the state of stream connecting the component
+ to a server changes. This will usually be used to let the user
+ know what is going on."""
+ self.logDebug("*** State changed: %s %r ***" % (state, arg))
+
+
+ def disconnected(self):
+ self.logDebug("Client was disconnected")
+
+
+ def stream_closed(self, stream):
+ self.logDebug("Stream was closed", stream)
+
+
+ def stream_error(self, err):
+ self.logDebug("Stream Error", err)
+
+
+ def get_message_handlers(self):
+ """Return list of (message_type, message_handler) tuples.
+
+ The handlers returned will be called when matching message is received
+ in a client session."""
+ return [("normal", self.message)]
+
+
+ def message(self, stanza):
+ """Message handler for the component."""
+ subject = stanza.get_subject()
+ body = stanza.get_body()
+ t = stanza.get_type()
+ self.logDebug("Message from %s received." % unicode(stanza.get_from()))
+ self.logDebug("Body: %s Subject: %s Type: %s" % (body, subject, t))
+
+ if t == "headline":
+ # 'headline' messages should never be replied to
+ return True
+ if subject:
+ subject = u"Re: " + subject
+
+ to_jid = stanza.get_from()
+ from_jid = stanza.get_to()
+
+ #j = JID()
+ to_name = to_jid.as_utf8()
+ from_name = from_jid.as_utf8()
+
+ names = self.getConfig("owners").split(";")
+
+ if to_name in names or to_jid.node + "@" + to_jid.domain in names:
+ messages = []
+
+ trigger = "pass"
+ args = None
+
+ try:
+ temp = body.split()
+ trigger = temp[0]
+ if len(temp) > 1:
+ args = temp[1:]
+ except Exception:
+ pass
+
+ handler = getattr(self, "event_%s" % trigger, self.event_pass)
+ try:
+ res = handler(args)
+ for line in res:
+ m = Message(
+ to_jid=to_jid,
+ from_jid=from_jid,
+ stanza_type=stanza.get_type(),
+ subject=subject,
+ body=line)
+
+ messages.append(m)
+ except Exception, e:
+ self.logError(e)
+
+ return messages
+
+ else:
+ return True
+
+
+ def response(self, msg, origin=""):
+ return self.announce(msg)
+
+
+ def announce(self, message):
+ """ send message to all owners"""
+ for user in self.getConfig("owners").split(";"):
+ self.logDebug("Send message to", user)
+
+ to_jid = JID(user)
+
+ m = Message(from_jid=self.jid,
+ to_jid=to_jid,
+ stanza_type="chat",
+ body=message)
+
+ stream = self.get_stream()
+ if not stream:
+ self.connect()
+ stream = self.get_stream()
+
+ stream.send(m)
+
+
+ def beforeReconnecting(self, ip):
+ self.disconnect()
+
+
+ def afterReconnecting(self, ip):
+ self.connect()
+
+
+class VersionHandler(object):
+ """Provides handler for a version query.
+
+ This class will answer version query and announce 'jabber:iq:version' namespace
+ in the client's disco#info results."""
+
+ implements(IIqHandlersProvider, IFeaturesProvider)
+
+
+ def __init__(self, client):
+ """Just remember who created this."""
+ self.client = client
+
+
+ def get_features(self):
+ """Return namespace which should the client include in its reply to a
+ disco#info query."""
+ return ["jabber:iq:version"]
+
+
+ def get_iq_get_handlers(self):
+ """Return list of tuples (element_name, namespace, handler) describing
+ handlers of <iq type='get'/> stanzas"""
+ return [("query", "jabber:iq:version", self.get_version)]
+
+
+ def get_iq_set_handlers(self):
+ """Return empty list, as this class provides no <iq type='set'/> stanza handler."""
+ return []
+
+
+ def get_version(self, iq):
+ """Handler for jabber:iq:version queries.
+
+ jabber:iq:version queries are not supported directly by PyXMPP, so the
+ XML node is accessed directly through the libxml2 API. This should be
+ used very carefully!"""
+ iq = iq.make_result_response()
+ q = iq.new_query("jabber:iq:version")
+ q.newTextChild(q.ns(), "name", "Echo component")
+ q.newTextChild(q.ns(), "version", "1.0")
+ return iq
diff --git a/pyload/plugin/addon/__init__.py b/pyload/plugin/addon/__init__.py
new file mode 100644
index 000000000..40a96afc6
--- /dev/null
+++ b/pyload/plugin/addon/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-