summaryrefslogtreecommitdiffstats
path: root/pyload/plugin/addon
diff options
context:
space:
mode:
Diffstat (limited to 'pyload/plugin/addon')
-rw-r--r--pyload/plugin/addon/AndroidPhoneNotify.py108
-rw-r--r--pyload/plugin/addon/AntiVirus.py115
-rw-r--r--pyload/plugin/addon/Checksum.py197
-rw-r--r--pyload/plugin/addon/ClickAndLoad.py87
-rw-r--r--pyload/plugin/addon/DeleteFinished.py81
-rw-r--r--pyload/plugin/addon/DownloadScheduler.py72
-rw-r--r--pyload/plugin/addon/ExternalScripts.py216
-rw-r--r--pyload/plugin/addon/ExtractArchive.py564
-rw-r--r--pyload/plugin/addon/HotFolder.py74
-rw-r--r--pyload/plugin/addon/IRCInterface.py430
-rw-r--r--pyload/plugin/addon/JustPremium.py51
-rw-r--r--pyload/plugin/addon/MergeFiles.py80
-rw-r--r--pyload/plugin/addon/MultiHome.py83
-rw-r--r--pyload/plugin/addon/RestartFailed.py43
-rw-r--r--pyload/plugin/addon/SkipRev.py105
-rw-r--r--pyload/plugin/addon/UnSkipOnFail.py90
-rw-r--r--pyload/plugin/addon/UpdateManager.py308
-rw-r--r--pyload/plugin/addon/WindowsPhoneNotify.py124
-rw-r--r--pyload/plugin/addon/XMPPInterface.py252
-rw-r--r--pyload/plugin/addon/__init__.py1
20 files changed, 3081 insertions, 0 deletions
diff --git a/pyload/plugin/addon/AndroidPhoneNotify.py b/pyload/plugin/addon/AndroidPhoneNotify.py
new file mode 100644
index 000000000..d3b390e6e
--- /dev/null
+++ b/pyload/plugin/addon/AndroidPhoneNotify.py
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+
+import time
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Addon, Expose
+
+
+class AndroidPhoneNotify(Addon):
+ __name__ = "AndroidPhoneNotify"
+ __type__ = "addon"
+ __version__ = "0.07"
+
+ __config__ = [("apikey" , "str" , "API key" , "" ),
+ ("notifycaptcha" , "bool", "Notify captcha request" , True ),
+ ("notifypackage" , "bool", "Notify package finished" , True ),
+ ("notifyprocessed", "bool", "Notify packages processed" , True ),
+ ("notifyupdate" , "bool", "Notify plugin updates" , True ),
+ ("notifyexit" , "bool", "Notify pyLoad shutdown" , True ),
+ ("sendtimewait" , "int" , "Timewait in seconds between notifications", 5 ),
+ ("sendpermin" , "int" , "Max notifications per minute" , 12 ),
+ ("ignoreclient" , "bool", "Send notifications if client is connected", False)]
+
+ __description__ = """Send push notifications to your Android Phone (using notifymyandroid.com)"""
+ __license__ = "GPLv3"
+ __authors__ = [("Steven Kosyra" , "steven.kosyra@gmail.com"),
+ ("Walter Purcaro", "vuolter@gmail.com" )]
+
+
+ event_list = ["allDownloadsProcessed", "plugin_updated"]
+
+
+ def setup(self):
+ self.last_notify = 0
+ self.notifications = 0
+
+
+ def plugin_updated(self, type_plugins):
+ if not self.getConfig('notifyupdate'):
+ return
+
+ self.notify(_("Plugins updated"), str(type_plugins))
+
+
+ def exit(self):
+ if not self.getConfig('notifyexit'):
+ return
+
+ if self.core.do_restart:
+ self.notify(_("Restarting pyLoad"))
+ else:
+ self.notify(_("Exiting pyLoad"))
+
+
+ def newCaptchaTask(self, task):
+ if not self.getConfig('notifycaptcha'):
+ return
+
+ self.notify(_("Captcha"), _("New request waiting user input"))
+
+
+ def packageFinished(self, pypack):
+ if self.getConfig('notifypackage'):
+ self.notify(_("Package finished"), pypack.name)
+
+
+ def allDownloadsProcessed(self):
+ if not self.getConfig('notifyprocessed'):
+ return
+
+ if any(True for pdata in self.core.api.getQueue() if pdata.linksdone < pdata.linkstotal):
+ self.notify(_("Package failed"), _("One or more packages was not completed successfully"))
+ else:
+ self.notify(_("All packages finished"))
+
+
+ @Expose
+ def notify(self,
+ event,
+ msg="",
+ key=self.getConfig('apikey')):
+
+ if not key:
+ return
+
+ if self.core.isClientConnected() and not self.getConfig('ignoreclient'):
+ return
+
+ elapsed_time = time.time() - self.last_notify
+
+ if elapsed_time < self.getConf("sendtimewait"):
+ return
+
+ if elapsed_time > 60:
+ self.notifications = 0
+
+ elif self.notifications >= self.getConf("sendpermin"):
+ return
+
+
+ getURL("http://www.notifymyandroid.com/publicapi/notify",
+ get={'apikey' : key,
+ 'application': "pyLoad",
+ 'event' : event,
+ 'description': msg})
+
+ self.last_notify = time.time()
+ self.notifications += 1
diff --git a/pyload/plugin/addon/AntiVirus.py b/pyload/plugin/addon/AntiVirus.py
new file mode 100644
index 000000000..2213cddc1
--- /dev/null
+++ b/pyload/plugin/addon/AntiVirus.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+
+import os
+import shutil
+import subprocess
+
+from pyload.plugin.Addon import Addon, Expose, threaded
+from pyload.utils import fs_encode, fs_join
+
+
+class AntiVirus(Addon):
+ __name__ = "AntiVirus"
+ __type__ = "addon"
+ __version__ = "0.07"
+
+ #@TODO: add trash option (use Send2Trash lib)
+ __config__ = [("action" , "Antivirus default;Delete;Quarantine", "Manage infected files" , "Antivirus default"),
+ ("quardir" , "folder" , "Quarantine folder" , "" ),
+ ("deltotrash", "bool" , "Move to trash (recycle bin) instead delete", True ),
+ ("scanfailed", "bool" , "Scan incompleted files (failed downloads)" , False ),
+ ("cmdfile" , "file" , "Antivirus executable" , "" ),
+ ("cmdargs" , "str" , "Scan options" , "" ),
+ ("ignore-err", "bool" , "Ignore scan errors" , False )]
+
+ __description__ = """Scan downloaded files with antivirus program"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ interval = 0 #@TODO: Remove in 0.4.10
+
+
+ def setup(self):
+ self.info = {} #@TODO: Remove in 0.4.10
+
+ try:
+ import send2trash
+
+ except ImportError:
+ self.logDebug("Send2Trash lib not found")
+ self.trashable = False
+
+ else:
+ self.trashable = True
+
+
+ @Expose
+ @threaded
+ def scan(self, pyfile, thread):
+ file = fs_encode(pyfile.plugin.lastDownload)
+ filename = os.path.basename(pyfile.plugin.lastDownload)
+ cmdfile = fs_encode(self.getConfig('cmdfile'))
+ cmdargs = fs_encode(self.getConfig('cmdargs').strip())
+
+ if not os.path.isfile(file) or not os.path.isfile(cmdfile):
+ return
+
+ thread.addActive(pyfile)
+ pyfile.setCustomStatus(_("virus scanning"))
+ pyfile.setProgress(0)
+
+ try:
+ p = subprocess.Popen([cmdfile, cmdargs, file], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ out, err = map(str.strip, p.communicate())
+
+ if out:
+ self.logInfo(filename, out)
+
+ if err:
+ self.logWarning(filename, err)
+ if not self.getConfig('ignore-err'):
+ self.logDebug("Delete/Quarantine task is aborted")
+ return
+
+ if p.returncode:
+ pyfile.error = _("infected file")
+ action = self.getConfig('action')
+ try:
+ if action == "Delete":
+ if not self.getConfig('deltotrash'):
+ os.remove(file)
+
+ elif self.trashable:
+ send2trash.send2trash(file)
+
+ else:
+ self.logWarning(_("Unable to move file to trash, move to quarantine instead"))
+ pyfile.setCustomStatus(_("file moving"))
+ shutil.move(file, self.getConfig('quardir'))
+
+ elif action == "Quarantine":
+ pyfile.setCustomStatus(_("file moving"))
+ shutil.move(file, self.getConfig('quardir'))
+
+ except (IOError, shutil.Error), e:
+ self.logError(filename, action + " action failed!", e)
+
+ elif not out and not err:
+ self.logDebug(filename, "No infected file found")
+
+ finally:
+ pyfile.setProgress(100)
+ thread.finishFile(pyfile)
+
+
+ def downloadFinished(self, pyfile):
+ return self.scan(pyfile)
+
+
+ def downloadFailed(self, pyfile):
+ #: Check if pyfile is still "failed",
+ # maybe might has been restarted in meantime
+ if pyfile.status == 8 and self.getConfig('scanfailed'):
+ return self.scan(pyfile)
diff --git a/pyload/plugin/addon/Checksum.py b/pyload/plugin/addon/Checksum.py
new file mode 100644
index 000000000..4b1380506
--- /dev/null
+++ b/pyload/plugin/addon/Checksum.py
@@ -0,0 +1,197 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import hashlib
+import re
+import zlib
+
+from os import remove
+from os.path import getsize, isfile, splitext
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import fs_join, fs_encode
+
+
+def computeChecksum(local_file, algorithm):
+ if algorithm in getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")):
+ h = getattr(hashlib, algorithm)()
+
+ with open(local_file, 'rb') as f:
+ for chunk in iter(lambda: f.read(128 * h.block_size), ''):
+ h.update(chunk)
+
+ return h.hexdigest()
+
+ elif algorithm in ("adler32", "crc32"):
+ hf = getattr(zlib, algorithm)
+ last = 0
+
+ with open(local_file, 'rb') as f:
+ for chunk in iter(lambda: f.read(8192), ''):
+ last = hf(chunk, last)
+
+ return "%x" % last
+
+ else:
+ return None
+
+
+class Checksum(Addon):
+ __name__ = "Checksum"
+ __type__ = "addon"
+ __version__ = "0.16"
+
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("check_checksum", "bool" , "Check checksum? (If False only size will be verified)", True ),
+ ("check_action" , "fail;retry;nothing", "What to do if check fails?" , "retry"),
+ ("max_tries" , "int" , "Number of retries" , 2 ),
+ ("retry_action" , "fail;nothing" , "What to do if all retries fail?" , "fail" ),
+ ("wait_time" , "int" , "Time to wait before each retry (seconds)" , 1 )]
+
+ __description__ = """Verify downloaded file size and checksum"""
+ __license__ = "GPLv3"
+ __authors__ = [("zoidberg" , "zoidberg@mujmail.cz"),
+ ("Walter Purcaro", "vuolter@gmail.com" ),
+ ("stickell" , "l.stickell@yahoo.it")]
+
+
+ methods = {'sfv' : 'crc32',
+ 'crc' : 'crc32',
+ 'hash': 'md5'}
+ regexps = {'sfv' : r'^(?P<NAME>[^;].+)\s+(?P<HASH>[0-9A-Fa-f]{8})$',
+ 'md5' : r'^(?P<NAME>[0-9A-Fa-f]{32}) (?P<FILE>.+)$',
+ 'crc' : r'filename=(?P<NAME>.+)\nsize=(?P<SIZE>\d+)\ncrc32=(?P<HASH>[0-9A-Fa-f]{8})$',
+ 'default': r'^(?P<HASH>[0-9A-Fa-f]+)\s+\*?(?P<NAME>.+)$'}
+
+
+ def activate(self):
+ if not self.getConfig('check_checksum'):
+ self.logInfo(_("Checksum validation is disabled in plugin configuration"))
+
+
+ def setup(self):
+ self.algorithms = sorted(
+ getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")), reverse=True)
+
+ self.algorithms.extend(["crc32", "adler32"])
+
+ self.formats = self.algorithms + ["sfv", "crc", "hash"]
+
+
+ def downloadFinished(self, pyfile):
+ """
+ Compute checksum for the downloaded file and compare it with the hash provided by the hoster.
+ pyfile.plugin.check_data should be a dictionary which can contain:
+ a) if known, the exact filesize in bytes (e.g. "size": 123456789)
+ b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307")
+ """
+ if hasattr(pyfile.plugin, "check_data") and isinstance(pyfile.plugin.check_data, dict):
+ data = pyfile.plugin.check_data.copy()
+
+ elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict):
+ data = pyfile.plugin.api_data.copy()
+
+ elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict):
+ data = pyfile.plugin.info.copy()
+ data.pop('size', None) #@NOTE: Don't check file size until a similary matcher will be implemented
+
+ else:
+ return
+
+ self.logDebug(data)
+
+ if not pyfile.plugin.lastDownload:
+ self.checkFailed(pyfile, None, "No file downloaded")
+
+ local_file = fs_encode(pyfile.plugin.lastDownload)
+ #download_folder = self.config['general']['download_folder']
+ #local_file = fs_encode(fs_join(download_folder, pyfile.package().folder, pyfile.name))
+
+ if not isfile(local_file):
+ self.checkFailed(pyfile, None, "File does not exist")
+
+ # validate file size
+ if "size" in data:
+ api_size = int(data['size'])
+ file_size = getsize(local_file)
+
+ if api_size != file_size:
+ self.logWarning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size))
+ self.checkFailed(pyfile, local_file, "Incorrect file size")
+
+ data.pop('size', None)
+
+ # validate checksum
+ if data and self.getConfig('check_checksum'):
+
+ if not 'md5' in data:
+ for type in ("checksum", "hashsum", "hash"):
+ if type in data:
+ data['md5'] = data[type] #@NOTE: What happens if it's not an md5 hash?
+ break
+
+ for key in self.algorithms:
+ if key in data:
+ checksum = computeChecksum(local_file, key.replace("-", "").lower())
+ if checksum:
+ if checksum == data[key].lower():
+ self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
+ (pyfile.name, key.upper(), checksum))
+ break
+ else:
+ self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
+ (key.upper(), pyfile.name, checksum, data[key]))
+ self.checkFailed(pyfile, local_file, "Checksums do not match")
+ else:
+ self.logWarning(_("Unsupported hashing algorithm"), key.upper())
+ else:
+ self.logWarning(_("Unable to validate checksum for file: ") + pyfile.name)
+
+
+ def checkFailed(self, pyfile, local_file, msg):
+ check_action = self.getConfig('check_action')
+ if check_action == "retry":
+ max_tries = self.getConfig('max_tries')
+ retry_action = self.getConfig('retry_action')
+ if pyfile.plugin.retries < max_tries:
+ if local_file:
+ remove(local_file)
+ pyfile.plugin.retry(max_tries, self.getConfig('wait_time'), msg)
+ elif retry_action == "nothing":
+ return
+ elif check_action == "nothing":
+ return
+ pyfile.plugin.fail(reason=msg)
+
+
+ def packageFinished(self, pypack):
+ download_folder = fs_join(self.config['general']['download_folder'], pypack.folder, "")
+
+ for link in pypack.getChildren().itervalues():
+ file_type = splitext(link['name'])[1][1:].lower()
+
+ if file_type not in self.formats:
+ continue
+
+ hash_file = fs_encode(fs_join(download_folder, link['name']))
+ if not isfile(hash_file):
+ self.logWarning(_("File not found"), link['name'])
+ continue
+
+ with open(hash_file) as f:
+ text = f.read()
+
+ for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
+ data = m.groupdict()
+ self.logDebug(link['name'], data)
+
+ local_file = fs_encode(fs_join(download_folder, data['NAME']))
+ algorithm = self.methods.get(file_type, file_type)
+ checksum = computeChecksum(local_file, algorithm)
+ if checksum == data['HASH']:
+ self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
+ (data['NAME'], algorithm, checksum))
+ else:
+ self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
+ (algorithm, data['NAME'], checksum, data['HASH']))
diff --git a/pyload/plugin/addon/ClickAndLoad.py b/pyload/plugin/addon/ClickAndLoad.py
new file mode 100644
index 000000000..73976d7e2
--- /dev/null
+++ b/pyload/plugin/addon/ClickAndLoad.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+
+import socket
+import time
+
+from threading import Lock
+
+from pyload.plugin.Addon import Addon, threaded
+
+
+def forward(source, destination):
+ try:
+ bufsize = 1024
+ bufdata = source.recv(bufsize)
+ while bufdata:
+ destination.sendall(bufdata)
+ bufdata = source.recv(bufsize)
+ finally:
+ destination.shutdown(socket.SHUT_WR)
+ # destination.close()
+
+
+#@TODO: IPv6 support
+class ClickAndLoad(Addon):
+ __name__ = "ClickAndLoad"
+ __type__ = "addon"
+ __version__ = "0.41"
+
+ __config__ = [("activated", "bool", "Activated" , True),
+ ("port" , "int" , "Port" , 9666),
+ ("extern" , "bool", "Listen on the public network interface", True)]
+
+ __description__ = """Click'n'Load addon plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("RaNaN" , "RaNaN@pyload.de" ),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def activate(self):
+ if not self.core.config['webui']['activated']:
+ return
+
+ ip = "" if self.getConfig('extern') else "127.0.0.1"
+ webport = self.core.config['webui']['port']
+ cnlport = self.getConfig('port')
+
+ self.proxy(ip, webport, cnlport)
+
+
+ @threaded
+ def proxy(self, ip, webport, cnlport):
+ time.sleep(10) #@TODO: Remove in 0.4.10 (implement addon delay on startup)
+
+ self.logInfo(_("Proxy listening on %s:%s") % (ip or "0.0.0.0", cnlport))
+
+ self._server(ip, webport, cnlport)
+
+ lock = Lock()
+ lock.acquire()
+ lock.acquire()
+
+
+ @threaded
+ def _server(self, ip, webport, cnlport):
+ try:
+ dock_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ dock_socket.bind((ip, cnlport))
+ dock_socket.listen(5)
+
+ while True:
+ client_socket, client_addr = dock_socket.accept()
+ self.logDebug("Connection from %s:%s" % client_addr)
+
+ server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ server_socket.connect(("127.0.0.1", webport))
+
+ self.manager.startThread(forward, client_socket, server_socket)
+ self.manager.startThread(forward, server_socket, client_socket)
+
+ except socket.timeout:
+ self.logDebug("Connection timed out, retrying...")
+ return self._server(ip, webport, cnlport)
+
+ except socket.error, e:
+ self.logError(e)
+ time.sleep(240)
+ return self._server(ip, webport, cnlport)
diff --git a/pyload/plugin/addon/DeleteFinished.py b/pyload/plugin/addon/DeleteFinished.py
new file mode 100644
index 000000000..801e48ed6
--- /dev/null
+++ b/pyload/plugin/addon/DeleteFinished.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+from pyload.database import style
+from pyload.plugin.Addon import Addon
+
+
+class DeleteFinished(Addon):
+ __name__ = "DeleteFinished"
+ __type__ = "addon"
+ __version__ = "1.12"
+
+ __config__ = [("interval" , "int" , "Check interval in hours" , 72 ),
+ ("deloffline", "bool", "Delete package with offline links", False)]
+
+ __description__ = """Automatically delete all finished packages from queue"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+ MIN_CHECK_INTERVAL = 1 * 60 * 60 #: 1 hour
+
+
+ ## overwritten methods ##
+ def setup(self):
+ self.interval = self.MIN_CHECK_INTERVAL
+
+
+ def periodical(self):
+ if not self.info['sleep']:
+ deloffline = self.getConfig('deloffline')
+ mode = '0,1,4' if deloffline else '0,4'
+ msg = _('delete all finished packages in queue list (%s packages with offline links)')
+ self.logInfo(msg % (_('including') if deloffline else _('excluding')))
+ self.deleteFinished(mode)
+ self.info['sleep'] = True
+ self.addEvent('packageFinished', self.wakeup)
+
+
+ # def pluginConfigChanged(self, plugin, name, value):
+ # if name == "interval" and value != self.interval:
+ # self.interval = value * 3600
+ # self.initPeriodical()
+
+
+ def deactivate(self):
+ self.manager.removeEvent('packageFinished', self.wakeup)
+
+
+ def activate(self):
+ self.info['sleep'] = True
+ # interval = self.getConfig('interval')
+ # self.pluginConfigChanged(self.__class__.__name__, 'interval', interval)
+ self.interval = max(self.MIN_CHECK_INTERVAL, self.getConfig('interval') * 60 * 60)
+ self.addEvent('packageFinished', self.wakeup)
+ self.initPeriodical()
+
+
+ ## own methods ##
+ @style.queue
+ def deleteFinished(self, mode):
+ self.c.execute('DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE package=packages.id AND status NOT IN (%s))' % mode)
+ self.c.execute('DELETE FROM links WHERE NOT EXISTS(SELECT 1 FROM packages WHERE id=links.package)')
+
+
+ def wakeup(self, pypack):
+ self.manager.removeEvent('packageFinished', self.wakeup)
+ self.info['sleep'] = False
+
+
+ ## event managing ##
+ def addEvent(self, event, func):
+ """Adds an event listener for event name"""
+ if event in self.manager.events:
+ if func in self.manager.events[event]:
+ self.logDebug("Function already registered", func)
+ else:
+ self.manager.events[event].append(func)
+ else:
+ self.manager.events[event] = [func]
diff --git a/pyload/plugin/addon/DownloadScheduler.py b/pyload/plugin/addon/DownloadScheduler.py
new file mode 100644
index 000000000..de961cc1f
--- /dev/null
+++ b/pyload/plugin/addon/DownloadScheduler.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+
+import re
+import time
+
+from pyload.plugin.Addon import Addon
+
+
+class DownloadScheduler(Addon):
+ __name__ = "DownloadScheduler"
+ __type__ = "addon"
+ __version__ = "0.22"
+
+ __config__ = [("timetable", "str" , "List time periods as hh:mm full or number(kB/s)" , "0:00 full, 7:00 250, 10:00 0, 17:00 150"),
+ ("abort" , "bool", "Abort active downloads when start period with speed 0", False )]
+
+ __description__ = """Download Scheduler"""
+ __license__ = "GPLv3"
+ __authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
+ ("stickell", "l.stickell@yahoo.it")]
+
+
+ def activate(self):
+ self.updateSchedule()
+
+
+ def updateSchedule(self, schedule=None):
+ if schedule is None:
+ schedule = self.getConfig('timetable')
+
+ schedule = re.findall("(\d{1,2}):(\d{2})[\s]*(-?\d+)",
+ schedule.lower().replace("full", "-1").replace("none", "0"))
+ if not schedule:
+ self.logError(_("Invalid schedule"))
+ return
+
+ t0 = time.localtime()
+ now = (t0.tm_hour, t0.tm_min, t0.tm_sec, "X")
+ schedule = sorted([(int(x[0]), int(x[1]), 0, int(x[2])) for x in schedule] + [now])
+
+ self.logDebug("Schedule", schedule)
+
+ for i, v in enumerate(schedule):
+ if v[3] == "X":
+ last, next = schedule[i - 1], schedule[(i + 1) % len(schedule)]
+ self.logDebug("Now/Last/Next", now, last, next)
+
+ self.setDownloadSpeed(last[3])
+
+ next_time = (((24 + next[0] - now[0]) * 60 + next[1] - now[1]) * 60 + next[2] - now[2]) % 86400
+ self.core.scheduler.removeJob(self.cb)
+ self.cb = self.core.scheduler.addJob(next_time, self.updateSchedule, threaded=False)
+
+
+ def setDownloadSpeed(self, speed):
+ if speed == 0:
+ abort = self.getConfig('abort')
+ self.logInfo(_("Stopping download server. (Running downloads will %sbe aborted.)") % '' if abort else _('not '))
+ self.core.api.pauseServer()
+ if abort:
+ self.core.api.stopAllDownloads()
+ else:
+ self.core.api.unpauseServer()
+
+ if speed > 0:
+ self.logInfo(_("Setting download speed to %d kB/s") % speed)
+ self.core.api.setConfigValue("download", "limit_speed", 1)
+ self.core.api.setConfigValue("download", "max_speed", speed)
+ else:
+ self.logInfo(_("Setting download speed to FULL"))
+ self.core.api.setConfigValue("download", "limit_speed", 0)
+ self.core.api.setConfigValue("download", "max_speed", -1)
diff --git a/pyload/plugin/addon/ExternalScripts.py b/pyload/plugin/addon/ExternalScripts.py
new file mode 100644
index 000000000..05b1d7b65
--- /dev/null
+++ b/pyload/plugin/addon/ExternalScripts.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+
+import os
+import subprocess
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import fs_encode, fs_join
+
+
+class ExternalScripts(Addon):
+ __name__ = "ExternalScripts"
+ __type__ = "addon"
+ __version__ = "0.39"
+
+ __config__ = [("activated", "bool", "Activated" , True ),
+ ("waitend" , "bool", "Wait script ending", False)]
+
+ __description__ = """Run external scripts"""
+ __license__ = "GPLv3"
+ __authors__ = [("mkaay" , "mkaay@mkaay.de" ),
+ ("RaNaN" , "ranan@pyload.org" ),
+ ("spoob" , "spoob@pyload.org" ),
+ ("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ event_list = ["archive_extract_failed", "archive_extracted" ,
+ "package_extract_failed", "package_extracted" ,
+ "all_archives_extracted", "all_archives_processed",
+ "allDownloadsFinished" , "allDownloadsProcessed" ,
+ "packageDeleted"]
+
+
+ def setup(self):
+ self.info = {'oldip': None}
+ self.scripts = {}
+
+ folders = ["pyload_start", "pyload_restart", "pyload_stop",
+ "before_reconnect", "after_reconnect",
+ "download_preparing", "download_failed", "download_finished",
+ "archive_extract_failed", "archive_extracted",
+ "package_finished", "package_deleted", "package_extract_failed", "package_extracted",
+ "all_downloads_processed", "all_downloads_finished", #@TODO: Invert `all_downloads_processed`, `all_downloads_finished` order in 0.4.10
+ "all_archives_extracted", "all_archives_processed"]
+
+ for folder in folders:
+ self.scripts[folder] = []
+ for dir in (pypath, ''):
+ self.initPluginType(folder, os.path.join(dir, 'scripts', folder))
+
+ for script_type, names in self.scripts.iteritems():
+ if names:
+ self.logInfo(_("Installed scripts for: ") + script_type, ", ".join(map(os.path.basename, names)))
+
+ self.pyload_start()
+
+
+ def initPluginType(self, name, dir):
+ if not os.path.isdir(dir):
+ try:
+ os.makedirs(dir)
+
+ except OSError, e:
+ self.logDebug(e)
+ return
+
+ for filename in os.listdir(dir):
+ file = fs_join(dir, filename)
+
+ if not os.path.isfile(file):
+ continue
+
+ if filename[0] in ("#", "_") or filename.endswith("~") or filename.endswith(".swp"):
+ continue
+
+ if not os.access(file, os.X_OK):
+ self.logWarning(_("Script not executable:") + " %s/%s" % (name, filename))
+
+ self.scripts[name].append(file)
+
+
+ def callScript(self, script, *args):
+ try:
+ cmd_args = [fs_encode(str(x) if not isinstance(x, basestring) else x) for x in args]
+ cmd = [script] + cmd_args
+
+ self.logDebug("Executing: %s" % os.path.abspath(script), "Args: " + ' '.join(cmd_args))
+
+ p = subprocess.Popen(cmd, bufsize=-1) #@NOTE: output goes to pyload
+ if self.getConfig('waitend'):
+ p.communicate()
+
+ except Exception, e:
+ try:
+ self.logError(_("Runtime error: %s") % os.path.abspath(script), e)
+ except Exception:
+ self.logError(_("Runtime error: %s") % os.path.abspath(script), _("Unknown error"))
+
+
+ def pyload_start(self):
+ for script in self.scripts['pyload_start']:
+ self.callScript(script)
+
+
+ def exit(self):
+ for script in self.scripts['pyload_restart' if self.core.do_restart else 'pyload_stop']:
+ self.callScript(script)
+
+
+ def beforeReconnecting(self, ip):
+ for script in self.scripts['before_reconnect']:
+ self.callScript(script, ip)
+ self.info['oldip'] = ip
+
+
+ def afterReconnecting(self, ip):
+ for script in self.scripts['after_reconnect']:
+ self.callScript(script, ip, self.info['oldip']) #@TODO: Use built-in oldip in 0.4.10
+
+
+ def downloadPreparing(self, pyfile):
+ for script in self.scripts['download_preparing']:
+ self.callScript(script, pyfile.id, pyfile.name, None, pyfile.pluginname, pyfile.url)
+
+
+ def downloadFailed(self, pyfile):
+ if self.config['general']['folder_per_package']:
+ download_folder = fs_join(self.config['general']['download_folder'], pyfile.package().folder)
+ else:
+ download_folder = self.config['general']['download_folder']
+
+ for script in self.scripts['download_failed']:
+ file = fs_join(download_folder, pyfile.name)
+ self.callScript(script, pyfile.id, pyfile.name, file, pyfile.pluginname, pyfile.url)
+
+
+ def downloadFinished(self, pyfile):
+ if self.config['general']['folder_per_package']:
+ download_folder = fs_join(self.config['general']['download_folder'], pyfile.package().folder)
+ else:
+ download_folder = self.config['general']['download_folder']
+
+ for script in self.scripts['download_finished']:
+ file = fs_join(download_folder, pyfile.name)
+ self.callScript(script, pyfile.id, pyfile.name, file, pyfile.pluginname, pyfile.url)
+
+
+ def archive_extract_failed(self, pyfile, archive):
+ for script in self.scripts['archive_extract_failed']:
+ self.callScript(script, pyfile.id, pyfile.name, archive.filename, archive.out, archive.files)
+
+
+ def archive_extracted(self, pyfile, archive):
+ for script in self.scripts['archive_extracted']:
+ self.callScript(script, pyfile.id, pyfile.name, archive.filename, archive.out, archive.files)
+
+
+ def packageFinished(self, pypack):
+ if self.config['general']['folder_per_package']:
+ download_folder = fs_join(self.config['general']['download_folder'], pypack.folder)
+ else:
+ download_folder = self.config['general']['download_folder']
+
+ for script in self.scripts['package_finished']:
+ self.callScript(script, pypack.id, pypack.name, download_folder, pypack.password)
+
+
+ def packageDeleted(self, pid):
+ pack = self.core.api.getPackageInfo(pid)
+
+ if self.config['general']['folder_per_package']:
+ download_folder = fs_join(self.config['general']['download_folder'], pack.folder)
+ else:
+ download_folder = self.config['general']['download_folder']
+
+ for script in self.scripts['package_deleted']:
+ self.callScript(script, pack.id, pack.name, download_folder, pack.password)
+
+
+ def package_extract_failed(self, pypack):
+ if self.config['general']['folder_per_package']:
+ download_folder = fs_join(self.config['general']['download_folder'], pypack.folder)
+ else:
+ download_folder = self.config['general']['download_folder']
+
+ for script in self.scripts['package_extract_failed']:
+ self.callScript(script, pypack.id, pypack.name, download_folder, pypack.password)
+
+
+ def package_extracted(self, pypack):
+ if self.config['general']['folder_per_package']:
+ download_folder = fs_join(self.config['general']['download_folder'], pypack.folder)
+ else:
+ download_folder = self.config['general']['download_folder']
+
+ for script in self.scripts['package_extracted']:
+ self.callScript(script, pypack.id, pypack.name, download_folder)
+
+
+ def allDownloadsFinished(self):
+ for script in self.scripts['all_downloads_finished']:
+ self.callScript(script)
+
+
+ def allDownloadsProcessed(self):
+ for script in self.scripts['all_downloads_processed']:
+ self.callScript(script)
+
+
+ def all_archives_extracted(self):
+ for script in self.scripts['all_archives_extracted']:
+ self.callScript(script)
+
+
+ def all_archives_processed(self):
+ for script in self.scripts['all_archives_processed']:
+ self.callScript(script)
diff --git a/pyload/plugin/addon/ExtractArchive.py b/pyload/plugin/addon/ExtractArchive.py
new file mode 100644
index 000000000..07b388ecd
--- /dev/null
+++ b/pyload/plugin/addon/ExtractArchive.py
@@ -0,0 +1,564 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import os
+import sys
+import traceback
+
+from copy import copy
+
+# monkey patch bug in python 2.6 and lower
+# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
+if sys.version_info < (2, 7) and os.name != "nt":
+ import errno
+ import subprocess
+
+ def _eintr_retry_call(func, *args):
+ while True:
+ try:
+ return func(*args)
+
+ except OSError, e:
+ if e.errno == errno.EINTR:
+ continue
+ raise
+
+
+ # unsued timeout option for older python version
+ def wait(self, timeout=0):
+ """Wait for child process to terminate. Returns returncode
+ attribute."""
+ if self.returncode is None:
+ try:
+ pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
+
+ except OSError, e:
+ if e.errno != errno.ECHILD:
+ raise
+ # This happens if SIGCLD is set to be ignored or waiting
+ # for child processes has otherwise been disabled for our
+ # process. This child is dead, we can't get the status.
+ sts = 0
+ self._handle_exitstatus(sts)
+ return self.returncode
+
+ subprocess.Popen.wait = wait
+
+if os.name != "nt":
+ from grp import getgrnam
+ from pwd import getpwnam
+
+from pyload.plugin.Addon import Addon, threaded, Expose
+from pyload.plugin.Extractor import ArchiveError, CRCError, PasswordError
+from pyload.plugin.internal.SimpleHoster import replace_patterns
+from pyload.utils import fs_encode, fs_join, uniqify
+
+
+class ArchiveQueue(object):
+
+ def __init__(self, plugin, storage):
+ self.plugin = plugin
+ self.storage = storage
+
+
+ def get(self):
+ try:
+ return [int(pid) for pid in self.plugin.getStorage("ExtractArchive:%s" % self.storage, "").decode('base64').split()]
+ except Exception:
+ return []
+
+
+ def set(self, value):
+ if isinstance(value, list):
+ item = str(value)[1:-1].replace(' ', '').replace(',', ' ')
+ else:
+ item = str(value).strip()
+ return self.plugin.setStorage("ExtractArchive:%s" % self.storage, item.encode('base64')[:-1])
+
+
+ def delete(self):
+ return self.plugin.delStorage("ExtractArchive:%s" % self.storage)
+
+
+ def add(self, item):
+ queue = self.get()
+ if item not in queue:
+ return self.set(queue + [item])
+ else:
+ return True
+
+
+ def remove(self, item):
+ queue = self.get()
+ try:
+ queue.remove(item)
+
+ except ValueError:
+ pass
+
+ if queue == []:
+ return self.delete()
+
+ return self.set(queue)
+
+
+class ExtractArchive(Addon):
+ __name__ = "ExtractArchive"
+ __type__ = "addon"
+ __version__ = "1.41"
+
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("fullpath" , "bool" , "Extract with full paths" , True ),
+ ("overwrite" , "bool" , "Overwrite files" , False ),
+ ("keepbroken" , "bool" , "Try to extract broken archives" , False ),
+ ("repair" , "bool" , "Repair broken archives (RAR required)" , False ),
+ ("test" , "bool" , "Test archive before extracting" , False ),
+ ("usepasswordfile", "bool" , "Use password file" , True ),
+ ("passwordfile" , "file" , "Password file" , "archive_password.txt" ),
+ ("delete" , "bool" , "Delete archive after extraction" , True ),
+ ("deltotrash" , "bool" , "Move to trash (recycle bin) instead delete", True ),
+ ("subfolder" , "bool" , "Create subfolder for each package" , False ),
+ ("destination" , "folder" , "Extract files to folder" , "" ),
+ ("extensions" , "str" , "Extract archives ending with extension" , "7z,bz2,bzip2,gz,gzip,lha,lzh,lzma,rar,tar,taz,tbz,tbz2,tgz,xar,xz,z,zip"),
+ ("excludefiles" , "str" , "Don't extract the following files" , "*.nfo,*.DS_Store,index.dat,thumb.db" ),
+ ("recursive" , "bool" , "Extract archives in archives" , True ),
+ ("waitall" , "bool" , "Run after all downloads was processed" , False ),
+ ("renice" , "int" , "CPU priority" , 0 )]
+
+ __description__ = """Extract different kind of archives"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com"),
+ ("Immenz" , "immenz@gmx.net" )]
+
+
+ event_list = ["allDownloadsProcessed","packageDeleted"]
+
+ NAME_REPLACEMENTS = [(r'\.part\d+\.rar$', ".part.rar")]
+
+
+ def setup(self):
+ self.queue = ArchiveQueue(self, "Queue")
+ self.failed = ArchiveQueue(self, "Failed")
+
+ self.interval = 60
+ self.extracting = False
+ self.lastPackage = False
+ self.extractors = []
+ self.passwords = []
+ self.repair = False
+
+ try:
+ import send2trash
+
+ except ImportError:
+ self.logDebug("Send2Trash lib not found")
+ self.trashable = False
+
+ else:
+ self.trashable = True
+
+
+ def activate(self):
+ for p in ("UnRar", "SevenZip", "UnZip"):
+ try:
+ module = self.core.pluginManager.loadModule("extractor", p)
+ klass = getattr(module, p)
+ if klass.isUsable():
+ self.extractors.append(klass)
+ if klass.REPAIR:
+ self.repair = self.getConfig('repair')
+
+ except OSError, e:
+ if e.errno == 2:
+ self.logWarning(_("No %s installed") % p)
+ else:
+ self.logWarning(_("Could not activate: %s") % p, e)
+ if self.core.debug:
+ traceback.print_exc()
+
+ except Exception, e:
+ self.logWarning(_("Could not activate: %s") % p, e)
+ if self.core.debug:
+ traceback.print_exc()
+
+ if self.extractors:
+ self.logDebug(*["Found %s %s" % (Extractor.__name__, Extractor.VERSION) for Extractor in self.extractors])
+ self.extractQueued() #: Resume unfinished extractions
+ else:
+ self.logInfo(_("No Extract plugins activated"))
+
+
+ @threaded
+ def extractQueued(self, thread):
+ packages = self.queue.get()
+ while packages:
+ if self.lastPackage: #: called from allDownloadsProcessed
+ self.lastPackage = False
+ if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
+ self.manager.dispatchEvent("all_archives_extracted")
+ self.manager.dispatchEvent("all_archives_processed")
+ else:
+ if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
+ pass
+
+ packages = self.queue.get() #: check for packages added during extraction
+
+
+ @Expose
+ def extractPackage(self, *ids):
+ """ Extract packages with given id"""
+ for id in ids:
+ self.queue.add(id)
+ if not self.getConfig('waitall') and not self.extracting:
+ self.extractQueued()
+
+
+ def packageDeleted(self, pid):
+ self.queue.remove(pid)
+
+
+ def packageFinished(self, pypack):
+ self.queue.add(pypack.id)
+ if not self.getConfig('waitall') and not self.extracting:
+ self.extractQueued()
+
+
+ def allDownloadsProcessed(self):
+ self.lastPackage = True
+ if not self.extracting:
+ self.extractQueued()
+
+
+ @Expose
+ def extract(self, ids, thread=None): #@TODO: Use pypack, not pid to improve method usability
+ if not ids:
+ return False
+
+ self.extracting = True
+
+ processed = []
+ extracted = []
+ failed = []
+
+ toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|')
+
+ destination = self.getConfig('destination')
+ subfolder = self.getConfig('subfolder')
+ fullpath = self.getConfig('fullpath')
+ overwrite = self.getConfig('overwrite')
+ renice = self.getConfig('renice')
+ recursive = self.getConfig('recursive')
+ delete = self.getConfig('delete')
+ keepbroken = self.getConfig('keepbroken')
+
+ extensions = [x.lstrip('.').lower() for x in toList(self.getConfig('extensions'))]
+ excludefiles = toList(self.getConfig('excludefiles'))
+
+ if extensions:
+ self.logDebug("Use for extensions: %s" % "|.".join(extensions))
+
+ # reload from txt file
+ self.reloadPasswords()
+
+ download_folder = self.config['general']['download_folder']
+
+ # iterate packages -> extractors -> targets
+ for pid in ids:
+ pypack = self.core.files.getPackage(pid)
+
+ if not pypack:
+ self.queue.remove(pid)
+ continue
+
+ self.logInfo(_("Check package: %s") % pypack.name)
+
+ # determine output folder
+ out = fs_join(download_folder, pypack.folder, destination, "") #: force trailing slash
+
+ if subfolder:
+ out = fs_join(out, pypack.folder)
+
+ if not os.path.exists(out):
+ os.makedirs(out)
+
+ matched = False
+ success = True
+ files_ids = dict((pylink['name'],((fs_join(download_folder, pypack.folder, pylink['name'])), pylink['id'], out)) for pylink \
+ in sorted(pypack.getChildren().itervalues(), key=lambda k: k['name'])).values() #: remove duplicates
+
+ # check as long there are unseen files
+ while files_ids:
+ new_files_ids = []
+
+ if extensions:
+ files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
+ if filter(lambda ext: fname.lower().endswith(ext), extensions)]
+
+ for Extractor in self.extractors:
+ targets = Extractor.getTargets(files_ids)
+ if targets:
+ self.logDebug("Targets for %s: %s" % (Extractor.__class__.__name__, targets))
+ matched = True
+
+ for fname, fid, fout in targets:
+ name = os.path.basename(fname)
+
+ if not os.path.exists(fname):
+ self.logDebug(name, "File not found")
+ continue
+
+ self.logInfo(name, _("Extract to: %s") % fout)
+ try:
+ pyfile = self.core.files.getFile(fid)
+ archive = Extractor(self,
+ fname,
+ fout,
+ fullpath,
+ overwrite,
+ excludefiles,
+ renice,
+ delete,
+ keepbroken,
+ fid)
+
+ thread.addActive(pyfile)
+ archive.init()
+
+ try:
+ new_files = self._extract(pyfile, archive, pypack.password)
+
+ finally:
+ pyfile.setProgress(100)
+ thread.finishFile(pyfile)
+
+ except Exception, e:
+ self.logError(name, e)
+ success = False
+ continue
+
+ # remove processed file and related multiparts from list
+ files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
+ if fname not in archive.getDeleteFiles()]
+ self.logDebug("Extracted files: %s" % new_files)
+ self.setPermissions(new_files)
+
+ for filename in new_files:
+ file = fs_encode(fs_join(os.path.dirname(archive.filename), filename))
+ if not os.path.exists(file):
+ self.logDebug("New file %s does not exists" % filename)
+ continue
+
+ if recursive and os.path.isfile(file):
+ new_files_ids.append((filename, fid, os.path.dirname(filename))) #: append as new target
+
+ self.manager.dispatchEvent("archive_extracted", pyfile, archive)
+
+ files_ids = new_files_ids #: also check extracted files
+
+ if matched:
+ if success:
+ extracted.append(pid)
+ self.manager.dispatchEvent("package_extracted", pypack)
+
+ else:
+ failed.append(pid)
+ self.manager.dispatchEvent("package_extract_failed", pypack)
+
+ self.failed.add(pid)
+ else:
+ self.logInfo(_("No files found to extract"))
+
+ if not matched or not success and subfolder:
+ try:
+ os.rmdir(out)
+
+ except OSError:
+ pass
+
+ self.queue.remove(pid)
+
+ self.extracting = False
+ return True if not failed else False
+
+
+ def _extract(self, pyfile, archive, password):
+ name = os.path.basename(archive.filename)
+
+ pyfile.setStatus("processing")
+
+ encrypted = False
+ try:
+ self.logDebug("Password: %s" % (password or "None provided"))
+ passwords = uniqify([password] + self.getPasswords(False)) if self.getConfig('usepasswordfile') else [password]
+ for pw in passwords:
+ try:
+ if self.getConfig('test') or self.repair:
+ pyfile.setCustomStatus(_("archive testing"))
+ if pw:
+ self.logDebug("Testing with password: %s" % pw)
+ pyfile.setProgress(0)
+ archive.verify(pw)
+ pyfile.setProgress(100)
+ else:
+ archive.check(pw)
+
+ self.addPassword(pw)
+ break
+
+ except PasswordError:
+ if not encrypted:
+ self.logInfo(name, _("Password protected"))
+ encrypted = True
+
+ except CRCError, e:
+ self.logDebug(name, e)
+ self.logInfo(name, _("CRC Error"))
+
+ if self.repair:
+ self.logWarning(name, _("Repairing..."))
+
+ pyfile.setCustomStatus(_("archive repairing"))
+ pyfile.setProgress(0)
+ repaired = archive.repair()
+ pyfile.setProgress(100)
+
+ if not repaired and not self.getConfig('keepbroken'):
+ raise CRCError("Archive damaged")
+
+ self.addPassword(pw)
+ break
+
+ raise CRCError("Archive damaged")
+
+ except ArchiveError, e:
+ raise ArchiveError(e)
+
+ pyfile.setCustomStatus(_("extracting"))
+ pyfile.setProgress(0)
+
+ if not encrypted or not self.getConfig('usepasswordfile'):
+ self.logDebug("Extracting using password: %s" % (password or "None"))
+ archive.extract(password)
+ else:
+ for pw in filter(None, uniqify([password] + self.getPasswords(False))):
+ try:
+ self.logDebug("Extracting using password: %s" % pw)
+
+ archive.extract(pw)
+ self.addPassword(pw)
+ break
+
+ except PasswordError:
+ self.logDebug("Password was wrong")
+ else:
+ raise PasswordError
+
+ pyfile.setProgress(100)
+ pyfile.setStatus("processing")
+
+ delfiles = archive.getDeleteFiles()
+ self.logDebug("Would delete: " + ", ".join(delfiles))
+
+ if self.getConfig('delete'):
+ self.logInfo(_("Deleting %s files") % len(delfiles))
+
+ deltotrash = self.getConfig('deltotrash')
+ for f in delfiles:
+ file = fs_encode(f)
+ if not os.path.exists(file):
+ continue
+
+ if not deltotrash:
+ os.remove(file)
+
+ elif self.trashable:
+ send2trash.send2trash(file)
+
+ else:
+ self.logWarning(_("Unable to move %s to trash") % os.path.basename(f))
+
+ self.logInfo(name, _("Extracting finished"))
+ extracted_files = archive.files or archive.list()
+
+ return extracted_files
+
+ except PasswordError:
+ self.logError(name, _("Wrong password" if password else "No password found"))
+
+ except CRCError, e:
+ self.logError(name, _("CRC mismatch"), e)
+
+ except ArchiveError, e:
+ self.logError(name, _("Archive error"), e)
+
+ except Exception, e:
+ self.logError(name, _("Unknown error"), e)
+ if self.core.debug:
+ traceback.print_exc()
+
+ self.manager.dispatchEvent("archive_extract_failed", pyfile, archive)
+
+ raise Exception(_("Extract failed"))
+
+
+ @Expose
+ def getPasswords(self, reload=True):
+ """ List of saved passwords """
+ if reload:
+ self.reloadPasswords()
+
+ return self.passwords
+
+
+ def reloadPasswords(self):
+ try:
+ passwords = []
+
+ file = fs_encode(self.getConfig('passwordfile'))
+ with open(file) as f:
+ for pw in f.read().splitlines():
+ passwords.append(pw)
+
+ except IOError, e:
+ self.logError(e)
+
+ else:
+ self.passwords = passwords
+
+
+ @Expose
+ def addPassword(self, password):
+ """ Adds a password to saved list"""
+ try:
+ self.passwords = uniqify([password] + self.passwords)
+
+ file = fs_encode(self.getConfig('passwordfile'))
+ with open(file, "wb") as f:
+ for pw in self.passwords:
+ f.write(pw + '\n')
+
+ except IOError, e:
+ self.logError(e)
+
+
+ def setPermissions(self, files):
+ for f in files:
+ if not os.path.exists(f):
+ continue
+
+ try:
+ if self.config['permission']['change_file']:
+ if os.path.isfile(f):
+ os.chmod(f, int(self.config['permission']['file'], 8))
+
+ elif os.path.isdir(f):
+ os.chmod(f, int(self.config['permission']['folder'], 8))
+
+ if self.config['permission']['change_dl'] and os.name != "nt":
+ uid = getpwnam(self.config['permission']['user'])[2]
+ gid = getgrnam(self.config['permission']['group'])[2]
+ os.chown(f, uid, gid)
+
+ except Exception, e:
+ self.logWarning(_("Setting User and Group failed"), e)
diff --git a/pyload/plugin/addon/HotFolder.py b/pyload/plugin/addon/HotFolder.py
new file mode 100644
index 000000000..0137514a8
--- /dev/null
+++ b/pyload/plugin/addon/HotFolder.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import os
+import time
+
+from shutil import move
+
+from pyload.plugin.Addon import Addon
+from pyload.utils import fs_encode, fs_join
+
+
+class HotFolder(Addon):
+ __name__ = "HotFolder"
+ __type__ = "addon"
+ __version__ = "0.14"
+
+ __config__ = [("folder" , "str" , "Folder to observe" , "container"),
+ ("watch_file", "bool", "Observe link file" , False ),
+ ("keep" , "bool", "Keep added containers", True ),
+ ("file" , "str" , "Link file" , "links.txt")]
+
+ __description__ = """Observe folder and file for changes and add container and links"""
+ __license__ = "GPLv3"
+ __authors__ = [("RaNaN", "RaNaN@pyload.de")]
+
+
+ def setup(self):
+ self.interval = 30
+
+
+ def activate(self):
+ self.initPeriodical()
+
+
+ def periodical(self):
+ folder = fs_encode(self.getConfig('folder'))
+ file = fs_encode(self.getConfig('file'))
+
+ try:
+ if not os.path.isdir(os.path.join(folder, "finished")):
+ os.makedirs(os.path.join(folder, "finished"))
+
+ if self.getConfig('watch_file'):
+ with open(file, "a+") as f:
+ f.seek(0)
+ content = f.read().strip()
+
+ if content:
+ f = open(file, "wb")
+ f.close()
+
+ name = "%s_%s.txt" % (file, time.strftime("%H-%M-%S_%d%b%Y"))
+
+ with open(fs_join(folder, "finished", name), "wb") as f:
+ f.write(content)
+
+ self.core.api.addPackage(f.name, [f.name], 1)
+
+ for f in os.listdir(folder):
+ path = os.path.join(folder, f)
+
+ if not os.path.isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."):
+ continue
+
+ newpath = os.path.join(folder, "finished", f if self.getConfig('keep') else "tmp_" + f)
+ move(path, newpath)
+
+ self.logInfo(_("Added %s from HotFolder") % f)
+ self.core.api.addPackage(f, [newpath], 1)
+
+ except (IOError, OSError), e:
+ self.logError(e)
diff --git a/pyload/plugin/addon/IRCInterface.py b/pyload/plugin/addon/IRCInterface.py
new file mode 100644
index 000000000..170055ee8
--- /dev/null
+++ b/pyload/plugin/addon/IRCInterface.py
@@ -0,0 +1,430 @@
+# -*- coding: utf-8 -*-
+
+import re
+import socket
+import ssl
+import time
+import traceback
+
+from pycurl import FORM_FILE
+from select import select
+from threading import Thread
+
+from pyload.api import PackageDoesNotExists, FileDoesNotExists
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Addon
+from pyload.utils import formatSize
+
+
+class IRCInterface(Thread, Addon):
+ __name__ = "IRCInterface"
+ __type__ = "addon"
+ __version__ = "0.13"
+
+ __config__ = [("host" , "str" , "IRC-Server Address" , "Enter your server here!"),
+ ("port" , "int" , "IRC-Server Port" , 6667 ),
+ ("ident" , "str" , "Clients ident" , "pyload-irc" ),
+ ("realname" , "str" , "Realname" , "pyload-irc" ),
+ ("ssl" , "bool", "Use SSL" , False ),
+ ("nick" , "str" , "Nickname the Client will take" , "pyLoad-IRC" ),
+ ("owner" , "str" , "Nickname the Client will accept commands from", "Enter your nick here!" ),
+ ("info_file", "bool", "Inform about every file finished" , False ),
+ ("info_pack", "bool", "Inform about every package finished" , True ),
+ ("captcha" , "bool", "Send captcha requests" , True )]
+
+ __description__ = """Connect to irc and let owner perform different tasks"""
+ __license__ = "GPLv3"
+ __authors__ = [("Jeix", "Jeix@hasnomail.com")]
+
+
+ def __init__(self, core, manager):
+ Thread.__init__(self)
+ Addon.__init__(self, core, manager)
+ self.setDaemon(True)
+
+
+ def activate(self):
+ self.abort = False
+ self.more = []
+ self.new_package = {}
+
+ self.start()
+
+
+ def packageFinished(self, pypack):
+ try:
+ if self.getConfig('info_pack'):
+ self.response(_("Package finished: %s") % pypack.name)
+ except Exception:
+ pass
+
+
+ def downloadFinished(self, pyfile):
+ try:
+ if self.getConfig('info_file'):
+ self.response(
+ _("Download finished: %(name)s @ %(plugin)s ") % {"name": pyfile.name, "plugin": pyfile.pluginname})
+ except Exception:
+ pass
+
+
+ def captchaTask(self, task):
+ if self.getConfig('captcha') and task.isTextual():
+ task.handler.append(self)
+ task.setWaiting(60)
+
+ html = getURL("http://www.freeimagehosting.net/upload.php",
+ post={"attached": (FORM_FILE, task.captchaFile)}, multipart=True)
+
+ url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", html).group(1)
+ self.response(_("New Captcha Request: %s") % url)
+ self.response(_("Answer with 'c %s text on the captcha'") % task.id)
+
+
+ def run(self):
+ # connect to IRC etc.
+ self.sock = socket.socket()
+ host = self.getConfig('host')
+ self.sock.connect((host, self.getConfig('port')))
+
+ if self.getConfig('ssl'):
+ self.sock = ssl.wrap_socket(self.sock, cert_reqs=ssl.CERT_NONE) #@TODO: support certificate
+
+ nick = self.getConfig('nick')
+ self.sock.send("NICK %s\r\n" % nick)
+ self.sock.send("USER %s %s bla :%s\r\n" % (nick, host, nick))
+ for t in self.getConfig('owner').split():
+ if t.strip().startswith("#"):
+ self.sock.send("JOIN %s\r\n" % t.strip())
+ self.logInfo(_("Connected to"), host)
+ self.logInfo(_("Switching to listening mode!"))
+ try:
+ self.main_loop()
+
+ except IRCError, ex:
+ self.sock.send("QUIT :byebye\r\n")
+ traceback.print_exc()
+ self.sock.close()
+
+
+ def main_loop(self):
+ readbuffer = ""
+ while True:
+ time.sleep(1)
+ fdset = select([self.sock], [], [], 0)
+ if self.sock not in fdset[0]:
+ continue
+
+ if self.abort:
+ raise IRCError("quit")
+
+ readbuffer += self.sock.recv(1024)
+ temp = readbuffer.split("\n")
+ readbuffer = temp.pop()
+
+ for line in temp:
+ line = line.rstrip()
+ first = line.split()
+
+ if first[0] == "PING":
+ self.sock.send("PONG %s\r\n" % first[1])
+
+ if first[0] == "ERROR":
+ raise IRCError(line)
+
+ msg = line.split(None, 3)
+ if len(msg) < 4:
+ continue
+
+ msg = {
+ "origin": msg[0][1:],
+ "action": msg[1],
+ "target": msg[2],
+ "text": msg[3][1:]
+ }
+
+ self.handle_events(msg)
+
+
+ def handle_events(self, msg):
+ if not msg['origin'].split("!", 1)[0] in self.getConfig('owner').split():
+ return
+
+ if msg['target'].split("!", 1)[0] != self.getConfig('nick'):
+ return
+
+ if msg['action'] != "PRIVMSG":
+ return
+
+ # HANDLE CTCP ANTI FLOOD/BOT PROTECTION
+ if msg['text'] == "\x01VERSION\x01":
+ self.logDebug("Sending CTCP VERSION")
+ self.sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
+ return
+ elif msg['text'] == "\x01TIME\x01":
+ self.logDebug("Sending CTCP TIME")
+ self.sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
+ return
+ elif msg['text'] == "\x01LAG\x01":
+ self.logDebug("Received CTCP LAG") #: don't know how to answer
+ return
+
+ trigger = "pass"
+ args = None
+
+ try:
+ temp = msg['text'].split()
+ trigger = temp[0]
+ if len(temp) > 1:
+ args = temp[1:]
+ except Exception:
+ pass
+
+ handler = getattr(self, "event_%s" % trigger, self.event_pass)
+ try:
+ res = handler(args)
+ for line in res:
+ self.response(line, msg['origin'])
+ except Exception, e:
+ self.logError(e)
+
+
+ def response(self, msg, origin=""):
+ if origin == "":
+ for t in self.getConfig('owner').split():
+ self.sock.send("PRIVMSG %s :%s\r\n" % (t.strip(), msg))
+ else:
+ self.sock.send("PRIVMSG %s :%s\r\n" % (origin.split("!", 1)[0], msg))
+
+
+ #### Events
+
+ def event_pass(self, args):
+ return []
+
+
+ def event_status(self, args):
+ downloads = self.core.api.statusDownloads()
+ if not downloads:
+ return ["INFO: There are no active downloads currently."]
+
+ temp_progress = ""
+ lines = ["ID - Name - Status - Speed - ETA - Progress"]
+ for data in downloads:
+
+ if data.status == 5:
+ temp_progress = data.format_wait
+ else:
+ temp_progress = "%d%% (%s)" % (data.percent, data.format_size)
+
+ lines.append("#%d - %s - %s - %s - %s - %s" %
+ (
+ data.fid,
+ data.name,
+ data.statusmsg,
+ "%s/s" % formatSize(data.speed),
+ "%s" % data.format_eta,
+ temp_progress
+ ))
+ return lines
+
+
+ def event_queue(self, args):
+ ps = self.core.api.getQueueData()
+
+ if not ps:
+ return ["INFO: There are no packages in queue."]
+
+ lines = []
+ for pack in ps:
+ lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
+
+ return lines
+
+
+ def event_collector(self, args):
+ ps = self.core.api.getCollectorData()
+ if not ps:
+ return ["INFO: No packages in collector!"]
+
+ lines = []
+ for pack in ps:
+ lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
+
+ return lines
+
+
+ def event_info(self, args):
+ if not args:
+ return ["ERROR: Use info like this: info <id>"]
+
+ info = None
+ try:
+ info = self.core.api.getFileData(int(args[0]))
+
+ except FileDoesNotExists:
+ return ["ERROR: Link doesn't exists."]
+
+ return ['LINK #%s: %s (%s) [%s][%s]' % (info.fid, info.name, info.format_size, info.statusmsg, info.plugin)]
+
+
+ def event_packinfo(self, args):
+ if not args:
+ return ["ERROR: Use packinfo like this: packinfo <id>"]
+
+ lines = []
+ pack = None
+ try:
+ pack = self.core.api.getPackageData(int(args[0]))
+
+ except PackageDoesNotExists:
+ return ["ERROR: Package doesn't exists."]
+
+ id = args[0]
+
+ self.more = []
+
+ lines.append('PACKAGE #%s: "%s" with %d links' % (id, pack.name, len(pack.links)))
+ for pyfile in pack.links:
+ self.more.append('LINK #%s: %s (%s) [%s][%s]' % (pyfile.fid, pyfile.name, pyfile.format_size,
+ pyfile.statusmsg, pyfile.plugin))
+
+ if len(self.more) < 6:
+ lines.extend(self.more)
+ self.more = []
+ else:
+ lines.extend(self.more[:6])
+ self.more = self.more[6:]
+ lines.append("%d more links do display." % len(self.more))
+
+ return lines
+
+
+ def event_more(self, args):
+ if not self.more:
+ return ["No more information to display."]
+
+ lines = self.more[:6]
+ self.more = self.more[6:]
+ lines.append("%d more links do display." % len(self.more))
+
+ return lines
+
+
+ def event_start(self, args):
+ self.core.api.unpauseServer()
+ return ["INFO: Starting downloads."]
+
+
+ def event_stop(self, args):
+ self.core.api.pauseServer()
+ return ["INFO: No new downloads will be started."]
+
+
+ def event_add(self, args):
+ if len(args) < 2:
+ return ['ERROR: Add links like this: "add <packagename|id> links". ',
+ "This will add the link <link> to to the package <package> / the package with id <id>!"]
+
+ pack = args[0].strip()
+ links = [x.strip() for x in args[1:]]
+
+ count_added = 0
+ count_failed = 0
+ try:
+ id = int(pack)
+ pack = self.core.api.getPackageData(id)
+ if not pack:
+ return ["ERROR: Package doesn't exists."]
+
+ #TODO add links
+
+ return ["INFO: Added %d links to Package %s [#%d]" % (len(links), pack['name'], id)]
+
+ except Exception:
+ # create new package
+ id = self.core.api.addPackage(pack, links, 1)
+ return ["INFO: Created new Package %s [#%d] with %d links." % (pack, id, len(links))]
+
+
+ def event_del(self, args):
+ if len(args) < 2:
+ return ["ERROR: Use del command like this: del -p|-l <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
+
+ if args[0] == "-p":
+ ret = self.core.api.deletePackages(map(int, args[1:]))
+ return ["INFO: Deleted %d packages!" % len(args[1:])]
+
+ elif args[0] == "-l":
+ ret = self.core.api.delLinks(map(int, args[1:]))
+ return ["INFO: Deleted %d links!" % len(args[1:])]
+
+ else:
+ return ["ERROR: Use del command like this: del <-p|-l> <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
+
+
+ def event_push(self, args):
+ if not args:
+ return ["ERROR: Push package to queue like this: push <package id>"]
+
+ id = int(args[0])
+ try:
+ info = self.core.api.getPackageInfo(id)
+ except PackageDoesNotExists:
+ return ["ERROR: Package #%d does not exist." % id]
+
+ self.core.api.pushToQueue(id)
+ return ["INFO: Pushed package #%d to queue." % id]
+
+
+ def event_pull(self, args):
+ if not args:
+ return ["ERROR: Pull package from queue like this: pull <package id>."]
+
+ id = int(args[0])
+ if not self.core.api.getPackageData(id):
+ return ["ERROR: Package #%d does not exist." % id]
+
+ self.core.api.pullFromQueue(id)
+ return ["INFO: Pulled package #%d from queue to collector." % id]
+
+
+ def event_c(self, args):
+ """ captcha answer """
+ if not args:
+ return ["ERROR: Captcha ID missing."]
+
+ task = self.core.captchaManager.getTaskByID(args[0])
+ if not task:
+ return ["ERROR: Captcha Task with ID %s does not exists." % args[0]]
+
+ task.setResult(" ".join(args[1:]))
+ return ["INFO: Result %s saved." % " ".join(args[1:])]
+
+
+ def event_help(self, args):
+ lines = ["The following commands are available:",
+ "add <package|packid> <links> [...] Adds link to package. (creates new package if it does not exist)",
+ "queue Shows all packages in the queue",
+ "collector Shows all packages in collector",
+ "del -p|-l <id> [...] Deletes all packages|links with the ids specified",
+ "info <id> Shows info of the link with id <id>",
+ "packinfo <id> Shows info of the package with id <id>",
+ "more Shows more info when the result was truncated",
+ "start Starts all downloads",
+ "stop Stops the download (but not abort active downloads)",
+ "push <id> Push package to queue",
+ "pull <id> Pull package from queue",
+ "status Show general download status",
+ "help Shows this help message"]
+ return lines
+
+
+class IRCError(Exception):
+
+ def __init__(self, value):
+ self.value = value
+
+
+ def __str__(self):
+ return repr(self.value)
diff --git a/pyload/plugin/addon/JustPremium.py b/pyload/plugin/addon/JustPremium.py
new file mode 100644
index 000000000..b878f302d
--- /dev/null
+++ b/pyload/plugin/addon/JustPremium.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from pyload.plugin.Addon import Addon
+
+
+class JustPremium(Addon):
+ __name__ = "JustPremium"
+ __type__ = "addon"
+ __version__ = "0.22"
+
+ __config__ = [("excluded", "str", "Exclude hosters (comma separated)", ""),
+ ("included", "str", "Include hosters (comma separated)", "")]
+
+ __description__ = """Remove not-premium links from added urls"""
+ __license__ = "GPLv3"
+ __authors__ = [("mazleu" , "mazleica@gmail.com"),
+ ("Walter Purcaro", "vuolter@gmail.com" ),
+ ("immenz" , "immenz@gmx.net" )]
+
+
+ event_list = ["linksAdded"]
+
+
+ def linksAdded(self, links, pid):
+ hosterdict = self.core.pluginManager.hosterPlugins
+ linkdict = self.core.api.checkURLs(links)
+
+ premiumplugins = set(account.type for account in self.core.api.getAccounts(False) \
+ if account.valid and account.premium)
+ multihosters = set(hoster for hoster in self.core.pluginManager.hosterPlugins \
+ if 'new_name' in hosterdict[hoster] \
+ and hosterdict[hoster]['new_name'] in premiumplugins)
+
+ excluded = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
+ self.getConfig('excluded').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
+ included = map(lambda domain: "".join(part.capitalize() for part in re.split(r'(\.|\d+)', domain) if part != '.'),
+ self.getConfig('included').replace(' ', '').replace(',', '|').replace(';', '|').split('|'))
+
+ hosterlist = (premiumplugins | multihosters).union(excluded).difference(included)
+
+ #: Found at least one hoster with account or multihoster
+ if not any( True for pluginname in linkdict if pluginname in hosterlist ):
+ return
+
+ for pluginname in set(linkdict.keys()) - hosterlist:
+ self.logInfo(_("Remove links of plugin: %s") % pluginname)
+ for link in linkdict[pluginname]:
+ self.logDebug("Remove link: %s" % link)
+ links.remove(link)
diff --git a/pyload/plugin/addon/MergeFiles.py b/pyload/plugin/addon/MergeFiles.py
new file mode 100644
index 000000000..ee6a86d9f
--- /dev/null
+++ b/pyload/plugin/addon/MergeFiles.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import os
+import re
+import traceback
+
+from pyload.plugin.Addon import Addon, threaded
+from pyload.utils import fs_join
+
+
+class MergeFiles(Addon):
+ __name__ = "MergeFiles"
+ __type__ = "addon"
+ __version__ = "0.14"
+
+ __config__ = [("activated", "bool", "Activated", True)]
+
+ __description__ = """Merges parts splitted with hjsplit"""
+ __license__ = "GPLv3"
+ __authors__ = [("and9000", "me@has-no-mail.com")]
+
+
+ BUFFER_SIZE = 4096
+
+
+ @threaded
+ def packageFinished(self, pack):
+ files = {}
+ fid_dict = {}
+ for fid, data in pack.getChildren().iteritems():
+ if re.search("\.\d{3}$", data['name']):
+ if data['name'][:-4] not in files:
+ files[data['name'][:-4]] = []
+ files[data['name'][:-4]].append(data['name'])
+ files[data['name'][:-4]].sort()
+ fid_dict[data['name']] = fid
+
+ download_folder = self.config['general']['download_folder']
+
+ if self.config['general']['folder_per_package']:
+ download_folder = fs_join(download_folder, pack.folder)
+
+ for name, file_list in files.iteritems():
+ self.logInfo(_("Starting merging of"), name)
+
+ with open(fs_join(download_folder, name), "wb") as final_file:
+ for splitted_file in file_list:
+ self.logDebug("Merging part", splitted_file)
+
+ pyfile = self.core.files.getFile(fid_dict[splitted_file])
+
+ pyfile.setStatus("processing")
+
+ try:
+ with open(fs_join(download_folder, splitted_file), "rb") as s_file:
+ size_written = 0
+ s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file)))
+
+ while True:
+ f_buffer = s_file.read(self.BUFFER_SIZE)
+ if f_buffer:
+ final_file.write(f_buffer)
+ size_written += self.BUFFER_SIZE
+ pyfile.setProgress((size_written * 100) / s_file_size)
+ else:
+ break
+
+ self.logDebug("Finished merging part", splitted_file)
+
+ except Exception, e:
+ traceback.print_exc()
+
+ finally:
+ pyfile.setProgress(100)
+ pyfile.setStatus("finished")
+ pyfile.release()
+
+ self.logInfo(_("Finished merging of"), name)
diff --git a/pyload/plugin/addon/MultiHome.py b/pyload/plugin/addon/MultiHome.py
new file mode 100644
index 000000000..03974d6c6
--- /dev/null
+++ b/pyload/plugin/addon/MultiHome.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+
+import time
+
+from pyload.plugin.Addon import Addon
+
+
+class MultiHome(Addon):
+ __name__ = "MultiHome"
+ __type__ = "addon"
+ __version__ = "0.12"
+
+ __config__ = [("interfaces", "str", "Interfaces", "None")]
+
+ __description__ = """Ip address changer"""
+ __license__ = "GPLv3"
+ __authors__ = [("mkaay", "mkaay@mkaay.de")]
+
+
+ def setup(self):
+ self.register = {}
+ self.interfaces = []
+
+ self.parseInterfaces(self.getConfig('interfaces').split(";"))
+
+ if not self.interfaces:
+ self.parseInterfaces([self.config['download']['interface']])
+ self.setConfig("interfaces", self.toConfig())
+
+
+ def toConfig(self):
+ return ";".join(i.adress for i in self.interfaces)
+
+
+ def parseInterfaces(self, interfaces):
+ for interface in interfaces:
+ if not interface or str(interface).lower() == "none":
+ continue
+ self.interfaces.append(Interface(interface))
+
+
+ def activate(self):
+ requestFactory = self.core.requestFactory
+ oldGetRequest = requestFactory.getRequest
+
+ def getRequest(pluginName, account=None):
+ iface = self.bestInterface(pluginName, account)
+ if iface:
+ iface.useFor(pluginName, account)
+ requestFactory.iface = lambda: iface.adress
+ self.logDebug("Using address", iface.adress)
+ return oldGetRequest(pluginName, account)
+
+ requestFactory.getRequest = getRequest
+
+
+ def bestInterface(self, pluginName, account):
+ best = None
+ for interface in self.interfaces:
+ if not best or interface.lastPluginAccess(pluginName, account) < best.lastPluginAccess(pluginName, account):
+ best = interface
+ return best
+
+
+class Interface(object):
+
+ def __init__(self, adress):
+ self.adress = adress
+ self.history = {}
+
+
+ def lastPluginAccess(self, pluginName, account):
+ if (pluginName, account) in self.history:
+ return self.history[(pluginName, account)]
+ return 0
+
+
+ def useFor(self, pluginName, account):
+ self.history[(pluginName, account)] = time.time()
+
+
+ def __repr__(self):
+ return "<Interface - %s>" % self.adress
diff --git a/pyload/plugin/addon/RestartFailed.py b/pyload/plugin/addon/RestartFailed.py
new file mode 100644
index 000000000..0b8f4d077
--- /dev/null
+++ b/pyload/plugin/addon/RestartFailed.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+from pyload.plugin.Addon import Addon
+
+
+class RestartFailed(Addon):
+ __name__ = "RestartFailed"
+ __type__ = "addon"
+ __version__ = "1.58"
+
+ __config__ = [("activated", "bool", "Activated" , True),
+ ("interval" , "int" , "Check interval in minutes", 90 )]
+
+ __description__ = """Restart all the failed downloads in queue"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ # event_list = ["pluginConfigChanged"]
+
+ MIN_CHECK_INTERVAL = 15 * 60 #: 15 minutes
+
+
+ # def pluginConfigChanged(self, plugin, name, value):
+ # if name == "interval":
+ # interval = value * 60
+ # if self.MIN_CHECK_INTERVAL <= interval != self.interval:
+ # self.core.scheduler.removeJob(self.cb)
+ # self.interval = interval
+ # self.initPeriodical()
+ # else:
+ # self.logDebug("Invalid interval value, kept current")
+
+
+ def periodical(self):
+ self.logDebug(_("Restart failed downloads"))
+ self.core.api.restartFailed()
+
+
+ def activate(self):
+ # self.pluginConfigChanged(self.__class__.__name__, "interval", self.getConfig('interval'))
+ self.interval = max(self.MIN_CHECK_INTERVAL, self.getConfig('interval') * 60)
+ self.initPeriodical()
diff --git a/pyload/plugin/addon/SkipRev.py b/pyload/plugin/addon/SkipRev.py
new file mode 100644
index 000000000..1c42ddfd8
--- /dev/null
+++ b/pyload/plugin/addon/SkipRev.py
@@ -0,0 +1,105 @@
+# -*- coding: utf-8 -*-
+
+import re
+
+from types import MethodType
+from urllib import unquote
+from urlparse import urlparse
+
+from pyload.datatype.File import PyFile
+from pyload.plugin.Addon import Addon
+from pyload.plugin.Plugin import SkipDownload
+
+
+class SkipRev(Addon):
+ __name__ = "SkipRev"
+ __type__ = "addon"
+ __version__ = "0.29"
+
+ __config__ = [("mode" , "Auto;Manual", "Choose recovery archives to skip" , "Auto"),
+ ("revtokeep", "int" , "Number of recovery archives to keep for package", 0 )]
+
+ __description__ = """Skip recovery archives (.rev)"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ @staticmethod
+ def _setup(self):
+ self.pyfile.plugin._setup()
+ if self.pyfile.hasStatus("skipped"):
+ raise SkipDownload(self.pyfile.statusname or self.pyfile.pluginname)
+
+
+ def _name(self, pyfile):
+ if hasattr(pyfile.pluginmodule, "getInfo"): #@NOTE: getInfo is deprecated in 0.4.10
+ return pyfile.pluginmodule.getInfo([pyfile.url]).next()[0]
+ else:
+ self.logWarning("Unable to grab file name")
+ return urlparse(unquote(pyfile.url)).path.split('/')[-1]
+
+
+ def _pyfile(self, link):
+ return PyFile(self.core.files,
+ link.fid,
+ link.url,
+ link.name,
+ link.size,
+ link.status,
+ link.error,
+ link.plugin,
+ link.packageID,
+ link.order)
+
+
+ def downloadPreparing(self, pyfile):
+ name = self._name(pyfile)
+
+ if pyfile.statusname is _("unskipped") or not name.endswith(".rev") or not ".part" in name:
+ return
+
+ revtokeep = -1 if self.getConfig('mode') == "Auto" else self.getConfig('revtokeep')
+
+ if revtokeep:
+ status_list = (1, 4, 8, 9, 14) if revtokeep < 0 else (1, 3, 4, 8, 9, 14)
+ pyname = re.compile(r'%s\.part\d+\.rev$' % name.rsplit('.', 2)[0].replace('.', '\.'))
+
+ queued = [True for link in self.core.api.getPackageData(pyfile.package().id).links \
+ if link.status not in status_list and pyname.match(link.name)].count(True)
+
+ if not queued or queued < revtokeep: #: keep one rev at least in auto mode
+ return
+
+ pyfile.setCustomStatus("SkipRev", "skipped")
+
+ if not hasattr(pyfile.plugin, "_setup"):
+ # Work-around: inject status checker inside the preprocessing routine of the plugin
+ pyfile.plugin._setup = pyfile.plugin.setup
+ pyfile.plugin.setup = MethodType(self._setup, pyfile.plugin)
+
+
+ def downloadFailed(self, pyfile):
+ #: Check if pyfile is still "failed",
+ # maybe might has been restarted in meantime
+ if pyfile.status != 8 or pyfile.name.rsplit('.', 1)[-1].strip() not in ("rar", "rev"):
+ return
+
+ revtokeep = -1 if self.getConfig('mode') == "Auto" else self.getConfig('revtokeep')
+
+ if not revtokeep:
+ return
+
+ pyname = re.compile(r'%s\.part\d+\.rev$' % pyfile.name.rsplit('.', 2)[0].replace('.', '\.'))
+
+ for link in self.core.api.getPackageData(pyfile.package().id).links:
+ if link.status is 4 and pyname.match(link.name):
+ pylink = self._pyfile(link)
+
+ if revtokeep > -1 or pyfile.name.endswith(".rev"):
+ pylink.setStatus("queued")
+ else:
+ pylink.setCustomStatus(_("unskipped"), "queued")
+
+ self.core.files.save()
+ pylink.release()
+ return
diff --git a/pyload/plugin/addon/UnSkipOnFail.py b/pyload/plugin/addon/UnSkipOnFail.py
new file mode 100644
index 000000000..f81066daa
--- /dev/null
+++ b/pyload/plugin/addon/UnSkipOnFail.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+from pyload.datatype.File import PyFile
+from pyload.plugin.Addon import Addon
+
+
+class UnSkipOnFail(Addon):
+ __name__ = "UnSkipOnFail"
+ __type__ = "addon"
+ __version__ = "0.05"
+
+ __config__ = [("activated", "bool", "Activated", True)]
+
+ __description__ = """Restart skipped duplicates when download fails"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def downloadFailed(self, pyfile):
+ #: Check if pyfile is still "failed",
+ # maybe might has been restarted in meantime
+ if pyfile.status != 8:
+ return
+
+ msg = _("Looking for skipped duplicates of: %s (pid:%s)")
+ self.logInfo(msg % (pyfile.name, pyfile.package().id))
+
+ dup = self.findDuplicate(pyfile)
+ if dup:
+ self.logInfo(_("Queue found duplicate: %s (pid:%s)") % (dup.name, dup.packageID))
+
+ #: Change status of "link" to "new_status".
+ # "link" has to be a valid FileData object,
+ # "new_status" has to be a valid status name
+ # (i.e. "queued" for this Plugin)
+ # It creates a temporary PyFile object using
+ # "link" data, changes its status, and tells
+ # the core.files-manager to save its data.
+ pylink = _pyfile(link)
+
+ pylink.setCustomStatus(_("unskipped"), "queued")
+
+ self.core.files.save()
+ pylink.release()
+
+ else:
+ self.logInfo(_("No duplicates found"))
+
+
+ def findDuplicate(self, pyfile):
+ """ Search all packages for duplicate links to "pyfile".
+ Duplicates are links that would overwrite "pyfile".
+ To test on duplicity the package-folder and link-name
+ of twolinks are compared (link.name).
+ So this method returns a list of all links with equal
+ package-folders and filenames as "pyfile", but except
+ the data for "pyfile" iotselöf.
+ It does MOT check the link's status.
+ """
+ queue = self.core.api.getQueue() #: get packages (w/o files, as most file data is useless here)
+
+ for package in queue:
+ #: check if package-folder equals pyfile's package folder
+ if package.folder != pyfile.package().folder:
+ continue
+
+ #: now get packaged data w/ files/links
+ pdata = self.core.api.getPackageData(package.pid)
+ for link in pdata.links:
+ #: check if link is "skipped"
+ if link.status != 4:
+ continue
+
+ #: check if link name collides with pdata's name
+ #: AND at last check if it is not pyfile itself
+ if link.name == pyfile.name and link.fid != pyfile.id:
+ return link
+
+
+ def _pyfile(self, link):
+ return PyFile(self.core.files,
+ link.fid,
+ link.url,
+ link.name,
+ link.size,
+ link.status,
+ link.error,
+ link.plugin,
+ link.packageID,
+ link.order)
diff --git a/pyload/plugin/addon/UpdateManager.py b/pyload/plugin/addon/UpdateManager.py
new file mode 100644
index 000000000..c5bee16a1
--- /dev/null
+++ b/pyload/plugin/addon/UpdateManager.py
@@ -0,0 +1,308 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import os
+import re
+import sys
+import time
+
+from operator import itemgetter
+
+from pyload.network.RequestFactory import getURL
+from pyload.plugin.Addon import Expose, Addon, threaded
+from pyload.utils import fs_join
+from pyload import __status_code__ as release_status
+
+
+# Case-sensitive os.path.exists
+def exists(path):
+ if os.path.exists(path):
+ if os.name == 'nt':
+ dir, name = os.path.split(path)
+ return name in os.listdir(dir)
+ else:
+ return True
+ else:
+ return False
+
+
+class UpdateManager(Addon):
+ __name__ = "UpdateManager"
+ __type__ = "addon"
+ __version__ = "0.50"
+
+ __config__ = [("activated", "bool", "Activated", True),
+ ("checkinterval", "int", "Check interval in hours", 8),
+ ("autorestart", "bool",
+ "Auto-restart pyLoad when required", True),
+ ("checkonstart", "bool", "Check for updates on startup", True),
+ ("checkperiod", "bool",
+ "Check for updates periodically", True),
+ ("reloadplugins", "bool",
+ "Monitor plugin code changes in debug mode", True),
+ ("nodebugupdate", "bool", "Don't update plugins in debug mode", False)]
+
+ __description__ = """ Check for updates """
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+ SERVER_URL = "http://updatemanager.pyload.org" if release_status == 5 else None
+ MIN_CHECK_INTERVAL = 3 * 60 * 60 #: 3 hours
+
+ def activate(self):
+ if self.checkonstart:
+ self.update()
+
+ self.initPeriodical()
+
+ def setup(self):
+ self.interval = 10
+ self.info = {'pyload': False, 'version': None, 'plugins': False, 'last_check': time.time()}
+ self.mtimes = {} #: store modification time for each plugin
+
+ if self.getConfig('checkonstart'):
+ self.core.api.pauseServer()
+ self.checkonstart = True
+ else:
+ self.checkonstart = False
+
+ def periodical(self):
+ if self.core.debug:
+ if self.getConfig('reloadplugins'):
+ self.autoreloadPlugins()
+
+ if self.getConfig('nodebugupdate'):
+ return
+
+ if self.getConfig('checkperiod') \
+ and time.time() - max(self.MIN_CHECK_INTERVAL, self.getConfig('checkinterval') * 60 * 60) > self.info['last_check']:
+ self.update()
+
+ @Expose
+ def autoreloadPlugins(self):
+ """ reload and reindex all modified plugins """
+ modules = filter(
+ lambda m: m and (m.__name__.startswith("module.plugins.") or
+ m.__name__.startswith("userplugins.")) and
+ m.__name__.count(".") >= 2, sys.modules.itervalues()
+ )
+
+ reloads = []
+
+ for m in modules:
+ root, type, name = m.__name__.rsplit(".", 2)
+ id = (type, name)
+ if type in self.core.pluginManager.plugins:
+ f = m.__file__.replace(".pyc", ".py")
+ if not os.path.isfile(f):
+ continue
+
+ mtime = os.stat(f).st_mtime
+
+ if id not in self.mtimes:
+ self.mtimes[id] = mtime
+ elif self.mtimes[id] < mtime:
+ reloads.append(id)
+ self.mtimes[id] = mtime
+
+ return True if self.core.pluginManager.reloadPlugins(reloads) else False
+
+ def server_response(self):
+ try:
+ return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines()
+
+ except Exception:
+ self.logWarning(_("Unable to retrieve server to get updates"))
+
+ @Expose
+ @threaded
+ def update(self):
+ """ check for updates """
+
+ self.core.api.pauseServer()
+
+ if self._update() is 2 and self.getConfig('autorestart'):
+ self.core.api.restart()
+ else:
+ self.core.api.unpauseServer()
+
+ def _update(self):
+ data = self.server_response()
+
+ self.info['last_check'] = time.time()
+
+ if not data:
+ exitcode = 0
+
+ elif data[0] == "None":
+ self.logInfo(_("No new pyLoad version available"))
+ exitcode = self._updatePlugins(data[1:])
+
+ elif onlyplugin:
+ exitcode = 0
+
+ else:
+ self.logInfo(_("*** New pyLoad Version %s available ***") % data[0])
+ self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***"))
+ self.info['pyload'] = True
+ self.info['version'] = data[0]
+ exitcode = 3
+
+ # Exit codes:
+ # -1 = No plugin updated, new pyLoad version available
+ # 0 = No plugin updated
+ # 1 = Plugins updated
+ # 2 = Plugins updated, but restart required
+ return exitcode
+
+ def _updatePlugins(self, data):
+ """ check for plugin updates """
+
+ exitcode = 0
+ updated = []
+
+ url = data[0]
+ schema = data[1].split('|')
+
+ VERSION = re.compile(r'__version__.*=.*("|\')([\d.]+)')
+
+ if "BLACKLIST" in data:
+ blacklist = data[data.index('BLACKLIST') + 1:]
+ updatelist = data[2:data.index('BLACKLIST')]
+ else:
+ blacklist = []
+ updatelist = data[2:]
+
+ updatelist = [dict(zip(schema, x.split('|'))) for x in updatelist]
+ blacklist = [dict(zip(schema, x.split('|'))) for x in blacklist]
+
+ if blacklist:
+ type_plugins = [(plugin['type'], plugin['name'].rsplit('.', 1)[0]) for plugin in blacklist]
+
+ # Protect UpdateManager from self-removing
+ try:
+ type_plugins.remove(("addon", "UpdateManager"))
+ except ValueError:
+ pass
+
+ for t, n in type_plugins:
+ for idx, plugin in enumerate(updatelist):
+ if n == plugin['name'] and t == plugin['type']:
+ updatelist.pop(idx)
+ break
+
+ for t, n in self.removePlugins(sorted(type_plugins)):
+ self.logInfo(_("Removed blacklisted plugin: [%(type)s] %(name)s") % {
+ 'type': t,
+ 'name': n,
+ })
+
+ for plugin in sorted(updatelist, key=itemgetter("type", "name")):
+ filename = plugin['name']
+ prefix = plugin['type']
+ version = plugin['version']
+
+ if filename.endswith(".pyc"):
+ name = filename[:filename.find("_")]
+ else:
+ name = filename.replace(".py", "")
+
+ #@TODO: Remove in 0.4.10
+ if prefix.endswith("s"):
+ type = prefix[:-1]
+ else:
+ type = prefix
+
+ plugins = getattr(self.core.pluginManager, "%sPlugins" % type)
+
+ oldver = float(plugins[name]['version']) if name in plugins else None
+ newver = float(version)
+
+ if not oldver:
+ msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)"
+ elif newver > oldver:
+ msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)"
+ else:
+ continue
+
+ self.logInfo(_(msg) % {'type': type,
+ 'name': name,
+ 'oldver': oldver,
+ 'newver': newver})
+ try:
+ content = getURL(url % plugin)
+ m = VERSION.search(content)
+
+ if m and m.group(2) == version:
+ with open(fs_join("userplugins", type, filename), "wb") as f:
+ f.write(content)
+
+ updated.append((type, name))
+ else:
+ raise Exception, _("Version mismatch")
+
+ except Exception, e:
+ self.logError(_("Error updating plugin: %s") % filename, e)
+
+ if updated:
+ self.logInfo(_("*** Plugins updated ***"))
+
+ if self.core.pluginManager.reloadPlugins(updated):
+ exitcode = 1
+ else:
+ self.logWarning(_("Restart pyLoad to reload the updated plugins"))
+ self.info['plugins'] = True
+ exitcode = 2
+
+ self.manager.dispatchEvent("plugin_updated", updated)
+ else:
+ self.logInfo(_("No plugin updates available"))
+
+ # Exit codes:
+ # 0 = No plugin updated
+ # 1 = Plugins updated
+ # 2 = Plugins updated, but restart required
+ return exitcode
+
+ @Expose
+ def removePlugins(self, type_plugins):
+ """ delete plugins from disk """
+
+ if not type_plugins:
+ return
+
+ removed = set()
+
+ self.logDebug("Requested deletion of plugins: %s" % type_plugins)
+
+ for type, name in type_plugins:
+ rootplugins = os.path.join(pypath, "module", "plugins")
+
+ for dir in ("userplugins", rootplugins):
+ py_filename = fs_join(dir, type, name + ".py")
+ pyc_filename = py_filename + "c"
+
+ if type == "addon":
+ try:
+ self.manager.deactivateAddon(name)
+
+ except Exception, e:
+ self.logDebug(e)
+
+ for filename in (py_filename, pyc_filename):
+ if not exists(filename):
+ continue
+
+ try:
+ os.remove(filename)
+
+ except OSError, e:
+ self.logError(_("Error removing: %s") % filename, e)
+
+ else:
+ id = (type, name)
+ removed.add(id)
+
+ #: return a list of the plugins successfully removed
+ return list(removed)
diff --git a/pyload/plugin/addon/WindowsPhoneNotify.py b/pyload/plugin/addon/WindowsPhoneNotify.py
new file mode 100644
index 000000000..341e682b2
--- /dev/null
+++ b/pyload/plugin/addon/WindowsPhoneNotify.py
@@ -0,0 +1,124 @@
+# -*- coding: utf-8 -*-
+
+import httplib
+import time
+
+from pyload.plugin.Addon import Addon, Expose
+
+
+class WindowsPhoneNotify(Addon):
+ __name__ = "WindowsPhoneNotify"
+ __type__ = "addon"
+ __version__ = "0.09"
+
+ __config__ = [("id" , "str" , "Push ID" , "" ),
+ ("url" , "str" , "Push url" , "" ),
+ ("notifycaptcha" , "bool", "Notify captcha request" , True ),
+ ("notifypackage" , "bool", "Notify package finished" , True ),
+ ("notifyprocessed", "bool", "Notify packages processed" , True ),
+ ("notifyupdate" , "bool", "Notify plugin updates" , True ),
+ ("notifyexit" , "bool", "Notify pyLoad shutdown" , True ),
+ ("sendtimewait" , "int" , "Timewait in seconds between notifications", 5 ),
+ ("sendpermin" , "int" , "Max notifications per minute" , 12 ),
+ ("ignoreclient" , "bool", "Send notifications if client is connected", False)]
+
+ __description__ = """Send push notifications to Windows Phone"""
+ __license__ = "GPLv3"
+ __authors__ = [("Andy Voigt" , "phone-support@hotmail.de"),
+ ("Walter Purcaro", "vuolter@gmail.com" )]
+
+
+ event_list = ["allDownloadsProcessed", "plugin_updated"]
+
+
+ def setup(self):
+ self.last_notify = 0
+ self.notifications = 0
+
+
+ def plugin_updated(self, type_plugins):
+ if not self.getConfig('notifyupdate'):
+ return
+
+ self.notify(_("Plugins updated"), str(type_plugins))
+
+
+ def exit(self):
+ if not self.getConfig('notifyexit'):
+ return
+
+ if self.core.do_restart:
+ self.notify(_("Restarting pyLoad"))
+ else:
+ self.notify(_("Exiting pyLoad"))
+
+
+ def newCaptchaTask(self, task):
+ if not self.getConfig('notifycaptcha'):
+ return
+
+ self.notify(_("Captcha"), _("New request waiting user input"))
+
+
+ def packageFinished(self, pypack):
+ if self.getConfig('notifypackage'):
+ self.notify(_("Package finished"), pypack.name)
+
+
+ def allDownloadsProcessed(self):
+ if not self.getConfig('notifyprocessed'):
+ return
+
+ if any(True for pdata in self.core.api.getQueue() if pdata.linksdone < pdata.linkstotal):
+ self.notify(_("Package failed"), _("One or more packages was not completed successfully"))
+ else:
+ self.notify(_("All packages finished"))
+
+
+ def getXmlData(self, msg):
+ return ("<?xml version='1.0' encoding='utf-8'?> <wp:Notification xmlns:wp='WPNotification'> "
+ "<wp:Toast> <wp:Text1>pyLoad</wp:Text1> <wp:Text2>%s</wp:Text2> "
+ "</wp:Toast> </wp:Notification>" % msg)
+
+
+ @Expose
+ def notify(self,
+ event,
+ msg="",
+ key=(self.getConfig('id'), self.getConfig('url'))):
+
+ id, url = key
+
+ if not id or not url:
+ return
+
+ if self.core.isClientConnected() and not self.getConfig('ignoreclient'):
+ return
+
+ elapsed_time = time.time() - self.last_notify
+
+ if elapsed_time < self.getConf("sendtimewait"):
+ return
+
+ if elapsed_time > 60:
+ self.notifications = 0
+
+ elif self.notifications >= self.getConf("sendpermin"):
+ return
+
+
+ request = self.getXmlData("%s: %s" % (event, msg) if msg else event)
+ webservice = httplib.HTTP(url)
+
+ webservice.putrequest("POST", id)
+ webservice.putheader("Host", url)
+ webservice.putheader("Content-type", "text/xml")
+ webservice.putheader("X-NotificationClass", "2")
+ webservice.putheader("X-WindowsPhone-Target", "toast")
+ webservice.putheader("Content-length", "%d" % len(request))
+ webservice.endheaders()
+ webservice.send(request)
+ webservice.close()
+
+ self.last_notify = time.time()
+ self.notifications += 1
diff --git a/pyload/plugin/addon/XMPPInterface.py b/pyload/plugin/addon/XMPPInterface.py
new file mode 100644
index 000000000..c0c31c738
--- /dev/null
+++ b/pyload/plugin/addon/XMPPInterface.py
@@ -0,0 +1,252 @@
+# -*- coding: utf-8 -*-
+
+from pyxmpp import streamtls
+from pyxmpp.all import JID, Message
+from pyxmpp.interface import implements
+from pyxmpp.interfaces import *
+from pyxmpp.jabber.client import JabberClient
+
+from pyload.plugin.addon.IRCInterface import IRCInterface
+
+
+class XMPPInterface(IRCInterface, JabberClient):
+ __name__ = "XMPPInterface"
+ __type__ = "addon"
+ __version__ = "0.11"
+
+ __config__ = [("jid" , "str" , "Jabber ID" , "user@exmaple-jabber-server.org" ),
+ ("pw" , "str" , "Password" , "" ),
+ ("tls" , "bool", "Use TLS" , False ),
+ ("owners" , "str" , "List of JIDs accepting commands from", "me@icq-gateway.org;some@msn-gateway.org"),
+ ("info_file", "bool", "Inform about every file finished" , False ),
+ ("info_pack", "bool", "Inform about every package finished" , True ),
+ ("captcha" , "bool", "Send captcha requests" , True )]
+
+ __description__ = """Connect to jabber and let owner perform different tasks"""
+ __license__ = "GPLv3"
+ __authors__ = [("RaNaN", "RaNaN@pyload.org")]
+
+
+ implements(IMessageHandlersProvider)
+
+
+ def __init__(self, core, manager):
+ IRCInterface.__init__(self, core, manager)
+
+ self.jid = JID(self.getConfig('jid'))
+ password = self.getConfig('pw')
+
+ # if bare JID is provided add a resource -- it is required
+ if not self.jid.resource:
+ self.jid = JID(self.jid.node, self.jid.domain, "pyLoad")
+
+ if self.getConfig('tls'):
+ tls_settings = streamtls.TLSSettings(require=True, verify_peer=False)
+ auth = ("sasl:PLAIN", "sasl:DIGEST-MD5")
+ else:
+ tls_settings = None
+ auth = ("sasl:DIGEST-MD5", "digest")
+
+ # setup client with provided connection information
+ # and identity data
+ JabberClient.__init__(self, self.jid, password,
+ disco_name="pyLoad XMPP Client", disco_type="bot",
+ tls_settings=tls_settings, auth_methods=auth)
+
+ self.interface_providers = [
+ VersionHandler(self),
+ self,
+ ]
+
+
+ def activate(self):
+ self.new_package = {}
+
+ self.start()
+
+
+ def packageFinished(self, pypack):
+ try:
+ if self.getConfig('info_pack'):
+ self.announce(_("Package finished: %s") % pypack.name)
+ except Exception:
+ pass
+
+
+ def downloadFinished(self, pyfile):
+ try:
+ if self.getConfig('info_file'):
+ self.announce(
+ _("Download finished: %(name)s @ %(plugin)s") % {"name": pyfile.name, "plugin": pyfile.pluginname})
+ except Exception:
+ pass
+
+
+ def run(self):
+ # connect to IRC etc.
+ self.connect()
+ try:
+ self.loop()
+ except Exception, ex:
+ self.logError(ex)
+
+
+ def stream_state_changed(self, state, arg):
+ """This one is called when the state of stream connecting the component
+ to a server changes. This will usually be used to let the user
+ know what is going on."""
+ self.logDebug("*** State changed: %s %r ***" % (state, arg))
+
+
+ def disconnected(self):
+ self.logDebug("Client was disconnected")
+
+
+ def stream_closed(self, stream):
+ self.logDebug("Stream was closed", stream)
+
+
+ def stream_error(self, err):
+ self.logDebug("Stream Error", err)
+
+
+ def get_message_handlers(self):
+ """Return list of (message_type, message_handler) tuples.
+
+ The handlers returned will be called when matching message is received
+ in a client session."""
+ return [("normal", self.message)]
+
+
+ def message(self, stanza):
+ """Message handler for the component."""
+ subject = stanza.get_subject()
+ body = stanza.get_body()
+ t = stanza.get_type()
+ self.logDebug("Message from %s received." % unicode(stanza.get_from()))
+ self.logDebug("Body: %s Subject: %s Type: %s" % (body, subject, t))
+
+ if t == "headline":
+ # 'headline' messages should never be replied to
+ return True
+ if subject:
+ subject = u"Re: " + subject
+
+ to_jid = stanza.get_from()
+ from_jid = stanza.get_to()
+
+ #j = JID()
+ to_name = to_jid.as_utf8()
+ from_name = from_jid.as_utf8()
+
+ names = self.getConfig('owners').split(";")
+
+ if to_name in names or to_jid.node + "@" + to_jid.domain in names:
+ messages = []
+
+ trigger = "pass"
+ args = None
+
+ try:
+ temp = body.split()
+ trigger = temp[0]
+ if len(temp) > 1:
+ args = temp[1:]
+ except Exception:
+ pass
+
+ handler = getattr(self, "event_%s" % trigger, self.event_pass)
+ try:
+ res = handler(args)
+ for line in res:
+ m = Message(
+ to_jid=to_jid,
+ from_jid=from_jid,
+ stanza_type=stanza.get_type(),
+ subject=subject,
+ body=line)
+
+ messages.append(m)
+ except Exception, e:
+ self.logError(e)
+
+ return messages
+
+ else:
+ return True
+
+
+ def response(self, msg, origin=""):
+ return self.announce(msg)
+
+
+ def announce(self, message):
+ """ send message to all owners"""
+ for user in self.getConfig('owners').split(";"):
+ self.logDebug("Send message to", user)
+
+ to_jid = JID(user)
+
+ m = Message(from_jid=self.jid,
+ to_jid=to_jid,
+ stanza_type="chat",
+ body=message)
+
+ stream = self.get_stream()
+ if not stream:
+ self.connect()
+ stream = self.get_stream()
+
+ stream.send(m)
+
+
+ def beforeReconnecting(self, ip):
+ self.disconnect()
+
+
+ def afterReconnecting(self, ip):
+ self.connect()
+
+
+class VersionHandler(object):
+ """Provides handler for a version query.
+
+ This class will answer version query and announce 'jabber:iq:version' namespace
+ in the client's disco#info results."""
+
+ implements(IIqHandlersProvider, IFeaturesProvider)
+
+
+ def __init__(self, client):
+ """Just remember who created this."""
+ self.client = client
+
+
+ def get_features(self):
+ """Return namespace which should the client include in its reply to a
+ disco#info query."""
+ return ["jabber:iq:version"]
+
+
+ def get_iq_get_handlers(self):
+ """Return list of tuples (element_name, namespace, handler) describing
+ handlers of <iq type='get'/> stanzas"""
+ return [("query", "jabber:iq:version", self.get_version)]
+
+
+ def get_iq_set_handlers(self):
+ """Return empty list, as this class provides no <iq type='set'/> stanza handler."""
+ return []
+
+
+ def get_version(self, iq):
+ """Handler for jabber:iq:version queries.
+
+ jabber:iq:version queries are not supported directly by PyXMPP, so the
+ XML node is accessed directly through the libxml2 API. This should be
+ used very carefully!"""
+ iq = iq.make_result_response()
+ q = iq.new_query("jabber:iq:version")
+ q.newTextChild(q.ns(), "name", "Echo component")
+ q.newTextChild(q.ns(), "version", "1.0")
+ return iq
diff --git a/pyload/plugin/addon/__init__.py b/pyload/plugin/addon/__init__.py
new file mode 100644
index 000000000..40a96afc6
--- /dev/null
+++ b/pyload/plugin/addon/__init__.py
@@ -0,0 +1 @@
+# -*- coding: utf-8 -*-