summaryrefslogtreecommitdiffstats
path: root/pyload/plugin/addon
diff options
context:
space:
mode:
Diffstat (limited to 'pyload/plugin/addon')
-rw-r--r--pyload/plugin/addon/Checksum.py186
-rw-r--r--pyload/plugin/addon/ClickAndLoad.py74
-rw-r--r--pyload/plugin/addon/DeleteFinished.py79
-rw-r--r--pyload/plugin/addon/DownloadScheduler.py77
-rw-r--r--pyload/plugin/addon/ExternalScripts.py145
-rw-r--r--pyload/plugin/addon/ExtractArchive.py363
-rw-r--r--pyload/plugin/addon/HotFolder.py70
-rw-r--r--pyload/plugin/addon/IRCInterface.py431
-rw-r--r--pyload/plugin/addon/MergeFiles.py85
-rw-r--r--pyload/plugin/addon/MultiHome.py81
-rw-r--r--pyload/plugin/addon/RestartFailed.py45
-rw-r--r--pyload/plugin/addon/RestartSlow.py57
-rw-r--r--pyload/plugin/addon/SkipRev.py77
-rw-r--r--pyload/plugin/addon/UnSkipOnFail.py87
-rw-r--r--pyload/plugin/addon/UpdateManager.py305
-rw-r--r--pyload/plugin/addon/WindowsPhoneToastNotify.py57
-rw-r--r--pyload/plugin/addon/XMPPInterface.py252
-rw-r--r--pyload/plugin/addon/__init__.py1
18 files changed, 0 insertions, 2472 deletions
diff --git a/pyload/plugin/addon/Checksum.py b/pyload/plugin/addon/Checksum.py
deleted file mode 100644
index 0589bd55a..000000000
--- a/pyload/plugin/addon/Checksum.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import hashlib
-import re
-import zlib
-
-from os import remove
-from os.path import getsize, isfile, splitext
-
-from pyload.plugin.Addon import Addon
-from pyload.utils import safe_join, fs_encode
-
-
-def computeChecksum(local_file, algorithm):
- if algorithm in getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")):
- h = getattr(hashlib, algorithm)()
-
- with open(local_file, 'rb') as f:
- for chunk in iter(lambda: f.read(128 * h.block_size), ''):
- h.update(chunk)
-
- return h.hexdigest()
-
- elif algorithm in ("adler32", "crc32"):
- hf = getattr(zlib, algorithm)
- last = 0
-
- with open(local_file, 'rb') as f:
- for chunk in iter(lambda: f.read(8192), ''):
- last = hf(chunk, last)
-
- return "%x" % last
-
- else:
- return None
-
-
-class Checksum(Addon):
- __name__ = "Checksum"
- __type__ = "addon"
- __version__ = "0.15"
-
- __config__ = [("activated" , "bool" , "Activated" , True ),
- ("check_checksum", "bool" , "Check checksum? (If False only size will be verified)", True ),
- ("check_action" , "fail;retry;nothing", "What to do if check fails?" , "retry"),
- ("max_tries" , "int" , "Number of retries" , 2 ),
- ("retry_action" , "fail;nothing" , "What to do if all retries fail?" , "fail" ),
- ("wait_time" , "int" , "Time to wait before each retry (seconds)" , 1 )]
-
- __description__ = """Verify downloaded file size and checksum"""
- __license__ = "GPLv3"
- __authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
- ("Walter Purcaro", "vuolter@gmail.com"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- methods = {'sfv': 'crc32', 'crc': 'crc32', 'hash': 'md5'}
- regexps = {'sfv': r'^(?P<name>[^;].+)\s+(?P<hash>[0-9A-Fa-f]{8})$',
- 'md5': r'^(?P<name>[0-9A-Fa-f]{32}) (?P<file>.+)$',
- 'crc': r'filename=(?P<name>.+)\nsize=(?P<size>\d+)\ncrc32=(?P<hash>[0-9A-Fa-f]{8})$',
- 'default': r'^(?P<hash>[0-9A-Fa-f]+)\s+\*?(?P<name>.+)$'}
-
-
- def activate(self):
- if not self.getConfig("check_checksum"):
- self.logInfo(_("Checksum validation is disabled in plugin configuration"))
-
-
- def setup(self):
- self.algorithms = sorted(
- getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")), reverse=True)
- self.algorithms.extend(["crc32", "adler32"])
- self.formats = self.algorithms + ["sfv", "crc", "hash"]
-
-
- def downloadFinished(self, pyfile):
- """
- Compute checksum for the downloaded file and compare it with the hash provided by the hoster.
- pyfile.plugin.check_data should be a dictionary which can contain:
- a) if known, the exact filesize in bytes (e.g. "size": 123456789)
- b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307")
- """
- if hasattr(pyfile.plugin, "check_data") and isinstance(pyfile.plugin.check_data, dict):
- data = pyfile.plugin.check_data.copy()
-
- elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict):
- data = pyfile.plugin.api_data.copy()
-
- # elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict):
- # data = pyfile.plugin.info.copy()
-
- else:
- return
-
- self.logDebug(data)
-
- if not pyfile.plugin.lastDownload:
- self.checkFailed(pyfile, None, "No file downloaded")
-
- local_file = fs_encode(pyfile.plugin.lastDownload)
- #download_folder = self.config['general']['download_folder']
- #local_file = fs_encode(safe_join(download_folder, pyfile.package().folder, pyfile.name))
-
- if not isfile(local_file):
- self.checkFailed(pyfile, None, "File does not exist")
-
- # validate file size
- if "size" in data:
- api_size = int(data['size'])
- file_size = getsize(local_file)
- if api_size != file_size:
- self.logWarning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size))
- self.checkFailed(pyfile, local_file, "Incorrect file size")
- del data['size']
-
- # validate checksum
- if data and self.getConfig("check_checksum"):
- if "checksum" in data:
- data['md5'] = data['checksum']
-
- for key in self.algorithms:
- if key in data:
- checksum = computeChecksum(local_file, key.replace("-", "").lower())
- if checksum:
- if checksum == data[key].lower():
- self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
- (pyfile.name, key.upper(), checksum))
- break
- else:
- self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
- (key.upper(), pyfile.name, checksum, data[key]))
- self.checkFailed(pyfile, local_file, "Checksums do not match")
- else:
- self.logWarning(_("Unsupported hashing algorithm"), key.upper())
- else:
- self.logWarning(_("Unable to validate checksum for file: ") + pyfile.name)
-
-
- def checkFailed(self, pyfile, local_file, msg):
- check_action = self.getConfig("check_action")
- if check_action == "retry":
- max_tries = self.getConfig("max_tries")
- retry_action = self.getConfig("retry_action")
- if pyfile.plugin.retries < max_tries:
- if local_file:
- remove(local_file)
- pyfile.plugin.retry(max_tries, self.getConfig("wait_time"), msg)
- elif retry_action == "nothing":
- return
- elif check_action == "nothing":
- return
- pyfile.plugin.fail(reason=msg)
-
-
- def packageFinished(self, pypack):
- download_folder = safe_join(self.config['general']['download_folder'], pypack.folder, "")
-
- for link in pypack.getChildren().itervalues():
- file_type = splitext(link['name'])[1][1:].lower()
-
- if file_type not in self.formats:
- continue
-
- hash_file = fs_encode(safe_join(download_folder, link['name']))
- if not isfile(hash_file):
- self.logWarning(_("File not found"), link['name'])
- continue
-
- with open(hash_file) as f:
- text = f.read()
-
- for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
- data = m.groupdict()
- self.logDebug(link['name'], data)
-
- local_file = fs_encode(safe_join(download_folder, data['name']))
- algorithm = self.methods.get(file_type, file_type)
- checksum = computeChecksum(local_file, algorithm)
- if checksum == data['hash']:
- self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
- (data['name'], algorithm, checksum))
- else:
- self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
- (algorithm, data['name'], checksum, data['hash']))
diff --git a/pyload/plugin/addon/ClickAndLoad.py b/pyload/plugin/addon/ClickAndLoad.py
deleted file mode 100644
index 5fe6e4bec..000000000
--- a/pyload/plugin/addon/ClickAndLoad.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from socket import socket, error
-from threading import Thread
-
-from pyload.plugin.Addon import Addon
-
-
-def forward(source, destination):
- string = ' '
- while string:
- string = source.recv(1024)
- if string:
- destination.sendall(string)
- else:
- #source.shutdown(socket.SHUT_RD)
- destination.shutdown(socket.SHUT_WR)
-
-
-class ClickAndLoad(Addon):
- __name__ = "ClickAndLoad"
- __type__ = "addon"
- __version__ = "0.23"
-
- __config__ = [("activated", "bool", "Activated" , True ),
- ("port" , "int" , "Port" , 9666 ),
- ("extern" , "bool", "Allow external link adding", False)]
-
- __description__ = """Click'N'Load hook plugin"""
- __license__ = "GPLv3"
- __authors__ = [("RaNaN", "RaNaN@pyload.de"),
- ("mkaay", "mkaay@mkaay.de"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def setup(self):
- self.interval = 300
-
-
- def activate(self):
- self.initPeriodical()
-
-
- def periodical(self):
- webip = "0.0.0.0" if self.getConfig("extern") else "127.0.0.1"
- webport = self.config['webinterface']['port']
- cnlport = self.getConfig("port"))
-
- try:
- s = socket()
- s.bind((webip, cnlport))
- s.listen(5)
-
- client = s.accept()[0]
- server = socket()
-
- server.connect(("127.0.0.1", webport))
-
- except error, e:
- if hasattr(e, "errno"):
- errno = e.errno
- else:
- errno = e.args[0]
-
- if errno == 98:
- self.logWarning(_("Port %d already in use") % cnlport)
- else:
- self.logDebug(e)
-
- else:
- self.core.scheduler.removeJob(self.cb)
- t = Thread(target=forward, args=[client, server])
- t.setDaemon(True)
- t.start()
diff --git a/pyload/plugin/addon/DeleteFinished.py b/pyload/plugin/addon/DeleteFinished.py
deleted file mode 100644
index 59f2e3321..000000000
--- a/pyload/plugin/addon/DeleteFinished.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.database import style
-from pyload.plugin.Addon import Addon
-
-
-class DeleteFinished(Addon):
- __name__ = "DeleteFinished"
- __type__ = "addon"
- __version__ = "1.11"
-
- __config__ = [('interval' , 'int' , 'Delete every (hours)' , '72' ),
- ('deloffline', 'bool', 'Delete packages with offline links', 'False')]
-
- __description__ = """Automatically delete all finished packages from queue"""
- __license__ = "GPLv3"
- __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # event_list = ["pluginConfigChanged"]
-
-
- ## overwritten methods ##
- def periodical(self):
- if not self.info['sleep']:
- deloffline = self.getConfig('deloffline')
- mode = '0,1,4' if deloffline else '0,4'
- msg = _('delete all finished packages in queue list (%s packages with offline links)')
- self.logInfo(msg % (_('including') if deloffline else _('excluding')))
- self.deleteFinished(mode)
- self.info['sleep'] = True
- self.addEvent('packageFinished', self.wakeup)
-
-
- def pluginConfigChanged(self, plugin, name, value):
- if name == "interval" and value != self.interval:
- self.interval = value * 3600
- self.initPeriodical()
-
-
- def deactivate(self):
- self.removeEvent('packageFinished', self.wakeup)
-
-
- def activate(self):
- self.info = {'sleep': True}
- interval = self.getConfig('interval')
- self.pluginConfigChanged(self.__name__, 'interval', interval)
- self.addEvent('packageFinished', self.wakeup)
-
-
- ## own methods ##
- @style.queue
- def deleteFinished(self, mode):
- self.c.execute('DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE package=packages.id AND status NOT IN (%s))' % mode)
- self.c.execute('DELETE FROM links WHERE NOT EXISTS(SELECT 1 FROM packages WHERE id=links.package)')
-
-
- def wakeup(self, pypack):
- self.removeEvent('packageFinished', self.wakeup)
- self.info['sleep'] = False
-
-
- ## event managing ##
- def addEvent(self, event, func):
- """Adds an event listener for event name"""
- if event in self.m.events:
- if func in self.m.events[event]:
- self.logDebug("Function already registered", func)
- else:
- self.m.events[event].append(func)
- else:
- self.m.events[event] = [func]
-
-
- def setup(self):
- self.interval = 0
- self.m = self.manager
- self.removeEvent = self.m.removeEvent
diff --git a/pyload/plugin/addon/DownloadScheduler.py b/pyload/plugin/addon/DownloadScheduler.py
deleted file mode 100644
index e5e25e389..000000000
--- a/pyload/plugin/addon/DownloadScheduler.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-
-from time import localtime
-
-from pyload.plugin.Addon import Addon
-
-
-class DownloadScheduler(Addon):
- __name__ = "DownloadScheduler"
- __type__ = "addon"
- __version__ = "0.22"
-
- __config__ = [("timetable", "str" , "List time periods as hh:mm full or number(kB/s)" , "0:00 full, 7:00 250, 10:00 0, 17:00 150"),
- ("abort" , "bool", "Abort active downloads when start period with speed 0", False )]
-
- __description__ = """Download Scheduler"""
- __license__ = "GPLv3"
- __authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
- ("stickell", "l.stickell@yahoo.it")]
-
-
- def setup(self):
- self.cb = None #: callback to scheduler job; will be by removed AddonManager when addon unloaded
-
-
- def activate(self):
- self.updateSchedule()
-
-
- def updateSchedule(self, schedule=None):
- if schedule is None:
- schedule = self.getConfig("timetable")
-
- schedule = re.findall("(\d{1,2}):(\d{2})[\s]*(-?\d+)",
- schedule.lower().replace("full", "-1").replace("none", "0"))
- if not schedule:
- self.logError(_("Invalid schedule"))
- return
-
- t0 = localtime()
- now = (t0.tm_hour, t0.tm_min, t0.tm_sec, "X")
- schedule = sorted([(int(x[0]), int(x[1]), 0, int(x[2])) for x in schedule] + [now])
-
- self.logDebug("Schedule", schedule)
-
- for i, v in enumerate(schedule):
- if v[3] == "X":
- last, next = schedule[i - 1], schedule[(i + 1) % len(schedule)]
- self.logDebug("Now/Last/Next", now, last, next)
-
- self.setDownloadSpeed(last[3])
-
- next_time = (((24 + next[0] - now[0]) * 60 + next[1] - now[1]) * 60 + next[2] - now[2]) % 86400
- self.core.scheduler.removeJob(self.cb)
- self.cb = self.core.scheduler.addJob(next_time, self.updateSchedule, threaded=False)
-
-
- def setDownloadSpeed(self, speed):
- if speed == 0:
- abort = self.getConfig("abort")
- self.logInfo(_("Stopping download server. (Running downloads will %sbe aborted.)") % '' if abort else _('not '))
- self.core.api.pauseServer()
- if abort:
- self.core.api.stopAllDownloads()
- else:
- self.core.api.unpauseServer()
-
- if speed > 0:
- self.logInfo(_("Setting download speed to %d kB/s") % speed)
- self.core.api.setConfigValue("download", "limit_speed", 1)
- self.core.api.setConfigValue("download", "max_speed", speed)
- else:
- self.logInfo(_("Setting download speed to FULL"))
- self.core.api.setConfigValue("download", "limit_speed", 0)
- self.core.api.setConfigValue("download", "max_speed", -1)
diff --git a/pyload/plugin/addon/ExternalScripts.py b/pyload/plugin/addon/ExternalScripts.py
deleted file mode 100644
index 31283afc2..000000000
--- a/pyload/plugin/addon/ExternalScripts.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import subprocess
-
-from itertools import chain
-from os import listdir, access, X_OK, makedirs
-from os.path import join, exists, basename, abspath
-
-from pyload.plugin.Addon import Addon
-from pyload.utils import safe_join
-
-
-class ExternalScripts(Addon):
- __name__ = "ExternalScripts"
- __type__ = "addon"
- __version__ = "0.25"
-
- __config__ = [("activated", "bool", "Activated", True)]
-
- __description__ = """Run external scripts"""
- __license__ = "GPLv3"
- __authors__ = [("mkaay", "mkaay@mkaay.de"),
- ("RaNaN", "ranan@pyload.org"),
- ("spoob", "spoob@pyload.org"),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- event_map = {'archive-extracted' : "archive_extracted",
- 'package-extracted' : "package_extracted",
- 'all_archives-extracted' : "all_archives_extracted",
- 'all_archives-processed' : "all_archives_processed",
- 'all_downloads-finished' : "allDownloadsFinished",
- 'all_downloads-processed': "allDownloadsProcessed"}
-
-
- def setup(self):
- self.scripts = {}
-
- folders = ["download_preparing", "download_finished", "all_downloads_finished", "all_downloads_processed",
- "before_reconnect", "after_reconnect",
- "package_finished", "package_extracted",
- "archive_extracted", "all_archives_extracted", "all_archives_processed",
- # deprecated folders
- "unrar_finished", "all_dls_finished", "all_dls_processed"]
-
- for folder in folders:
- self.scripts[folder] = []
-
- self.initPluginType(folder, join(pypath, 'scripts', folder))
- self.initPluginType(folder, join('scripts', folder))
-
- for script_type, names in self.scripts.iteritems():
- if names:
- self.logInfo(_("Installed scripts for"), script_type, ", ".join([basename(x) for x in names]))
-
-
- def initPluginType(self, folder, path):
- if not exists(path):
- try:
- makedirs(path)
- except Exception:
- self.logDebug("Script folder %s not created" % folder)
- return
-
- for f in listdir(path):
- if f.startswith("#") or f.startswith(".") or f.startswith("_") or f.endswith("~") or f.endswith(".swp"):
- continue
-
- if not access(join(path, f), X_OK):
- self.logWarning(_("Script not executable:") + " %s/%s" % (folder, f))
-
- self.scripts[folder].append(join(path, f))
-
-
- def callScript(self, script, *args):
- try:
- cmd = [script] + [str(x) if not isinstance(x, basestring) else x for x in args]
- self.logDebug("Executing", abspath(script), " ".join(cmd))
- #output goes to pyload
- subprocess.Popen(cmd, bufsize=-1)
- except Exception, e:
- self.logError(_("Error in %(script)s: %(error)s") % {"script": basename(script), "error": e})
-
-
- def downloadPreparing(self, pyfile):
- for script in self.scripts['download_preparing']:
- self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.id)
-
-
- def downloadFinished(self, pyfile):
- download_folder = self.config['general']['download_folder']
- for script in self.scripts['download_finished']:
- filename = safe_join(download_folder, pyfile.package().folder, pyfile.name)
- self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.name, filename, pyfile.id)
-
-
- def packageFinished(self, pypack):
- download_folder = self.config['general']['download_folder']
- for script in self.scripts['package_finished']:
- folder = safe_join(download_folder, pypack.folder)
- self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
-
-
- def beforeReconnecting(self, ip):
- for script in self.scripts['before_reconnect']:
- self.callScript(script, ip)
-
-
- def afterReconnecting(self, ip):
- for script in self.scripts['after_reconnect']:
- self.callScript(script, ip)
-
-
- def archive_extracted(self, pyfile, folder, filename, files):
- for script in self.scripts['archive_extracted']:
- self.callScript(script, folder, filename, files)
- for script in self.scripts['unrar_finished']: #: deprecated
- self.callScript(script, folder, filename)
-
-
- def package_extracted(self, pypack):
- download_folder = self.config['general']['download_folder']
- for script in self.scripts['package_extracted']:
- folder = safe_join(download_folder, pypack.folder)
- self.callScript(script, pypack.name, folder, pypack.password, pypack.id)
-
-
- def all_archives_extracted(self):
- for script in self.scripts['all_archives_extracted']:
- self.callScript(script)
-
-
- def all_archives_processed(self):
- for script in self.scripts['all_archives_processed']:
- self.callScript(script)
-
-
- def allDownloadsFinished(self):
- for script in chain(self.scripts['all_downloads_finished'], self.scripts['all_dls_finished']):
- self.callScript(script)
-
-
- def allDownloadsProcessed(self):
- for script in chain(self.scripts['all_downloads_processed'], self.scripts['all_dls_processed']):
- self.callScript(script)
diff --git a/pyload/plugin/addon/ExtractArchive.py b/pyload/plugin/addon/ExtractArchive.py
deleted file mode 100644
index b24bb37a2..000000000
--- a/pyload/plugin/addon/ExtractArchive.py
+++ /dev/null
@@ -1,363 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import os
-import sys
-
-from copy import copy
-from os import remove, chmod, makedirs
-from os.path import exists, basename, isfile, isdir
-from traceback import print_exc
-
-# monkey patch bug in python 2.6 and lower
-# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
-if sys.version_info < (2, 7) and os.name != "nt":
- import errno
- from subprocess import Popen
-
-
- def _eintr_retry_call(func, *args):
- while True:
- try:
- return func(*args)
- except OSError, e:
- if e.errno == errno.EINTR:
- continue
- raise
-
-
- # unsued timeout option for older python version
- def wait(self, timeout=0):
- """Wait for child process to terminate. Returns returncode
- attribute."""
- if self.returncode is None:
- try:
- pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
- except OSError, e:
- if e.errno != errno.ECHILD:
- raise
- # This happens if SIGCLD is set to be ignored or waiting
- # for child processes has otherwise been disabled for our
- # process. This child is dead, we can't get the status.
- sts = 0
- self._handle_exitstatus(sts)
- return self.returncode
-
- Popen.wait = wait
-
-if os.name != "nt":
- from grp import getgrnam
- from os import chown
- from pwd import getpwnam
-
-from pyload.plugin.Addon import Addon, threaded, Expose
-from pyload.plugin.internal.AbstractExtractor import ArchiveError, CRCError, WrongPassword
-from pyload.utils import safe_join, fs_encode
-
-
-class ExtractArchive(Addon):
- __name__ = "ExtractArchive"
- __type__ = "addon"
- __version__ = "0.19"
-
- __config__ = [("activated" , "bool" , "Activated" , True ),
- ("fullpath" , "bool" , "Extract full path" , True ),
- ("overwrite" , "bool" , "Overwrite files" , True ),
- ("passwordfile" , "file" , "password file" , "archive_password.txt"),
- ("deletearchive", "bool" , "Delete archives when done" , False ),
- ("subfolder" , "bool" , "Create subfolder for each package" , False ),
- ("destination" , "folder", "Extract files to" , "" ),
- ("excludefiles" , "str" , "Exclude files from unpacking (seperated by ;)", "" ),
- ("recursive" , "bool" , "Extract archives in archvies" , True ),
- ("queue" , "bool" , "Wait for all downloads to be finished" , True ),
- ("renice" , "int" , "CPU Priority" , 0 )]
-
- __description__ = """Extract different kind of archives"""
- __license__ = "GPLv3"
- __authors__ = [("RaNaN", "ranan@pyload.org"),
- ("AndroKev", ""),
- ("Walter Purcaro", "vuolter@gmail.com")]
-
-
- event_map = {'all_downloads-processed': "allDownloadsProcessed"}
-
-
- def setup(self):
- self.plugins = []
- self.passwords = []
- names = []
-
- for p in ("UnRar", "UnZip"):
- try:
- module = self.core.pluginManager.loadModule("internal", p)
- klass = getattr(module, p)
- if klass.checkDeps():
- names.append(p)
- self.plugins.append(klass)
-
- except OSError, e:
- if e.errno == 2:
- self.logInfo(_("No %s installed") % p)
- else:
- self.logWarning(_("Could not activate %s") % p, e)
- if self.core.debug:
- print_exc()
-
- except Exception, e:
- self.logWarning(_("Could not activate %s") % p, e)
- if self.core.debug:
- print_exc()
-
- if names:
- self.logInfo(_("Activated") + " " + " ".join(names))
- else:
- self.logInfo(_("No Extract plugins activated"))
-
- # queue with package ids
- self.queue = []
-
-
- @Expose
- def extractPackage(self, id):
- """ Extract package with given id"""
- self.manager.startThread(self.extract, [id])
-
-
- def packageFinished(self, pypack):
- pid = pypack.id
- if self.getConfig("queue"):
- self.logInfo(_("Package %s queued for later extracting") % pypack.name)
- self.queue.append(pid)
- else:
- self.manager.startThread(self.extract, [pid])
-
-
- @threaded
- def allDownloadsProcessed(self, thread):
- local = copy(self.queue)
- del self.queue[:]
- if self.extract(local, thread): #: check only if all gone fine, no failed reporting for now
- self.manager.dispatchEvent("all_archives-extracted")
- self.manager.dispatchEvent("all_archives-processed")
-
-
- def extract(self, ids, thread=None):
- processed = []
- extracted = []
- failed = []
-
- destination = self.getConfig("destination")
- subfolder = self.getConfig("subfolder")
- fullpath = self.getConfig("fullpath")
- overwrite = self.getConfig("overwrite")
- excludefiles = self.getConfig("excludefiles")
- renice = self.getConfig("renice")
- recursive = self.getConfig("recursive")
-
- # reload from txt file
- self.reloadPasswords()
-
- # dl folder
- dl = self.config['general']['download_folder']
-
- #iterate packages -> plugins -> targets
- for pid in ids:
- p = self.core.files.getPackage(pid)
- self.logInfo(_("Check package %s") % p.name)
- if not p:
- continue
-
- # determine output folder
- out = safe_join(dl, p.folder, "")
-
- out = safe_join(dl, p.folder, self.getConfig("destination"), "")
- if subfolder:
- out = safe_join(out, fs_encode(p.folder))
-
- if not exists(out):
- makedirs(out)
-
- files_ids = [(safe_join(dl, p.folder, x['name']), x['id']) for x in p.getChildren().itervalues()]
- matched = False
- success = True
-
- # check as long there are unseen files
- while files_ids:
- new_files_ids = []
-
- for plugin in self.plugins:
- targets = plugin.getTargets(files_ids)
- if targets:
- self.logDebug("Targets for %s: %s" % (plugin.__name__, targets))
- matched = True
- for target, fid in targets:
- if target in processed:
- self.logDebug(basename(target), "skipped")
- continue
-
- processed.append(target) # prevent extracting same file twice
-
- self.logInfo(basename(target), _("Extract to %s") % out)
- try:
- klass = plugin(self, target, out, fullpath, overwrite, excludefiles, renice)
- klass.init()
- password = p.password.strip().splitlines()
- new_files = self._extract(klass, fid, password, thread)
- except Exception, e:
- self.logError(basename(target), e)
- success = False
- continue
-
- self.logDebug("Extracted", new_files)
- self.setPermissions(new_files)
-
- for file in new_files:
- if not exists(file):
- self.logDebug("New file %s does not exists" % file)
- continue
- if recursive and isfile(file):
- new_files_ids.append((file, fid)) # append as new target
-
- files_ids = new_files_ids # also check extracted files
-
- if matched:
- if success:
- extracted.append(pid)
- self.manager.dispatchEvent("package-extracted", p)
- else:
- failed.append(pid)
- self.manager.dispatchEvent("package-extract_failed", p)
- else:
- self.logInfo(_("No files found to extract"))
-
- return True if not failed else False
-
-
- def _extract(self, plugin, fid, passwords, thread):
- pyfile = self.core.files.getFile(fid)
- deletearchive = self.getConfig("deletearchive")
-
- pyfile.setCustomStatus(_("extracting"))
- thread.addActive(pyfile) # keep this file until everything is done
-
- try:
- progress = lambda x: pyfile.setProgress(x)
- success = False
-
- if not plugin.checkArchive():
- plugin.extract(progress)
- success = True
- else:
- self.logInfo(basename(plugin.file), _("Password protected"))
- self.logDebug("Passwords", passwords)
-
- pwlist = copy(self.getPasswords())
- # remove already supplied pws from list (only local)
- for pw in passwords:
- if pw in pwlist:
- pwlist.remove(pw)
-
- for pw in passwords + pwlist:
- try:
- self.logDebug("Try password", pw)
- if plugin.checkPassword(pw):
- plugin.extract(progress, pw)
- self.addPassword(pw)
- success = True
- break
- except WrongPassword:
- self.logDebug("Password was wrong")
-
- if not success:
- raise Exception(_("Wrong password"))
-
- if self.core.debug:
- self.logDebug("Would delete", ", ".join(plugin.getDeleteFiles()))
-
- if deletearchive:
- files = plugin.getDeleteFiles()
- self.logInfo(_("Deleting %s files") % len(files))
- for f in files:
- if exists(f):
- remove(f)
- else:
- self.logDebug("%s does not exists" % f)
-
- self.logInfo(basename(plugin.file), _("Extracting finished"))
-
- extracted_files = plugin.getExtractedFiles()
- self.manager.dispatchEvent("archive-extracted", pyfile, plugin.out, plugin.file, extracted_files)
-
- return extracted_files
-
- except ArchiveError, e:
- self.logError(basename(plugin.file), _("Archive Error"), e)
- except CRCError:
- self.logError(basename(plugin.file), _("CRC Mismatch"))
- except Exception, e:
- if self.core.debug:
- print_exc()
- self.logError(basename(plugin.file), _("Unknown Error"), e)
-
- self.manager.dispatchEvent("archive-extract_failed", pyfile)
- raise Exception(_("Extract failed"))
-
-
- @Expose
- def getPasswords(self):
- """ List of saved passwords """
- return self.passwords
-
-
- def reloadPasswords(self):
- passwordfile = self.getConfig("passwordfile")
-
- try:
- passwords = []
- with open(passwordfile, "a+") as f:
- for pw in f.read().splitlines():
- passwords.append(pw)
-
- except IOError, e:
- self.logError(e)
-
- else:
- self.passwords = passwords
-
-
- @Expose
- def addPassword(self, pw):
- """ Adds a password to saved list"""
- passwordfile = self.getConfig("passwordfile")
-
- if pw in self.passwords:
- self.passwords.remove(pw)
-
- self.passwords.insert(0, pw)
-
- try:
- with open(passwordfile, "wb") as f:
- for pw in self.passwords:
- f.write(pw + "\n")
- except IOError, e:
- self.logError(e)
-
-
- def setPermissions(self, files):
- for f in files:
- if not exists(f):
- continue
- try:
- if self.config['permission']['change_file']:
- if isfile(f):
- chmod(f, int(self.config['permission']['file'], 8))
- elif isdir(f):
- chmod(f, int(self.config['permission']['folder'], 8))
-
- if self.config['permission']['change_dl'] and os.name != "nt":
- uid = getpwnam(self.config['permission']['user'])[2]
- gid = getgrnam(self.config['permission']['group'])[2]
- chown(f, uid, gid)
- except Exception, e:
- self.logWarning(_("Setting User and Group failed"), e)
diff --git a/pyload/plugin/addon/HotFolder.py b/pyload/plugin/addon/HotFolder.py
deleted file mode 100644
index 3bbafd5ed..000000000
--- a/pyload/plugin/addon/HotFolder.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import time
-
-from os import listdir, makedirs
-from os.path import exists, isfile, join
-from shutil import move
-
-from pyload.plugin.Addon import Addon
-from pyload.utils import fs_encode, safe_join
-
-
-class HotFolder(Addon):
- __name__ = "HotFolder"
- __type__ = "addon"
- __version__ = "0.12"
-
- __config__ = [("folder" , "str" , "Folder to observe" , "container"),
- ("watch_file", "bool", "Observe link file" , False ),
- ("keep" , "bool", "Keep added containers", True ),
- ("file" , "str" , "Link file" , "links.txt")]
-
- __description__ = """Observe folder and file for changes and add container and links"""
- __license__ = "GPLv3"
- __authors__ = [("RaNaN", "RaNaN@pyload.de")]
-
-
- def setup(self):
- self.interval = 10
-
-
- def activate(self):
- self.initPeriodical()
-
-
- def periodical(self):
- folder = fs_encode(self.getConfig("folder"))
-
- try:
- if not exists(join(folder, "finished")):
- makedirs(join(folder, "finished"))
-
- if self.getConfig("watch_file"):
- with open(fs_encode(self.getConfig("file")), "a+") as f:
- content = f.read().strip()
-
- if content:
- name = "%s_%s.txt" % (self.getConfig("file"), time.strftime("%H-%M-%S_%d%b%Y"))
-
- with open(safe_join(folder, "finished", name), "wb") as f:
- f.write(content)
-
- self.core.api.addPackage(f.name, [f.name], 1)
-
- for f in listdir(folder):
- path = join(folder, f)
-
- if not isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."):
- continue
-
- newpath = join(folder, "finished", f if self.getConfig("keep") else "tmp_" + f)
- move(path, newpath)
-
- self.logInfo(_("Added %s from HotFolder") % f)
- self.core.api.addPackage(f, [newpath], 1)
-
- except IOError, e:
- self.logError(e)
diff --git a/pyload/plugin/addon/IRCInterface.py b/pyload/plugin/addon/IRCInterface.py
deleted file mode 100644
index a4d466319..000000000
--- a/pyload/plugin/addon/IRCInterface.py
+++ /dev/null
@@ -1,431 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import re
-import socket
-import ssl
-import time
-
-from pycurl import FORM_FILE
-from select import select
-from threading import Thread
-from time import sleep
-from traceback import print_exc
-
-from pyload.api import PackageDoesNotExists, FileDoesNotExists
-from pyload.network.RequestFactory import getURL
-from pyload.plugin.Addon import Addon
-from pyload.utils import formatSize
-
-
-class IRCInterface(Thread, Addon):
- __name__ = "IRCInterface"
- __type__ = "addon"
- __version__ = "0.13"
-
- __config__ = [("host" , "str" , "IRC-Server Address" , "Enter your server here!"),
- ("port" , "int" , "IRC-Server Port" , 6667 ),
- ("ident" , "str" , "Clients ident" , "pyload-irc" ),
- ("realname" , "str" , "Realname" , "pyload-irc" ),
- ("ssl" , "bool", "Use SSL" , False ),
- ("nick" , "str" , "Nickname the Client will take" , "pyLoad-IRC" ),
- ("owner" , "str" , "Nickname the Client will accept commands from", "Enter your nick here!" ),
- ("info_file", "bool", "Inform about every file finished" , False ),
- ("info_pack", "bool", "Inform about every package finished" , True ),
- ("captcha" , "bool", "Send captcha requests" , True )]
-
- __description__ = """Connect to irc and let owner perform different tasks"""
- __license__ = "GPLv3"
- __authors__ = [("Jeix", "Jeix@hasnomail.com")]
-
-
- def __init__(self, core, manager):
- Thread.__init__(self)
- Addon.__init__(self, core, manager)
- self.setDaemon(True)
-
-
- def activate(self):
- self.abort = False
- self.more = []
- self.new_package = {}
-
- self.start()
-
-
- def packageFinished(self, pypack):
- try:
- if self.getConfig("info_pack"):
- self.response(_("Package finished: %s") % pypack.name)
- except Exception:
- pass
-
-
- def downloadFinished(self, pyfile):
- try:
- if self.getConfig("info_file"):
- self.response(
- _("Download finished: %(name)s @ %(plugin)s ") % {"name": pyfile.name, "plugin": pyfile.pluginname})
- except Exception:
- pass
-
-
- def captchaTask(self, task):
- if self.getConfig("captcha") and task.isTextual():
- task.handler.append(self)
- task.setWaiting(60)
-
- page = getURL("http://www.freeimagehosting.net/upload.php",
- post={"attached": (FORM_FILE, task.captchaFile)}, multipart=True)
-
- url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", page).group(1)
- self.response(_("New Captcha Request: %s") % url)
- self.response(_("Answer with 'c %s text on the captcha'") % task.id)
-
-
- def run(self):
- # connect to IRC etc.
- self.sock = socket.socket()
- host = self.getConfig("host")
- self.sock.connect((host, self.getConfig("port")))
-
- if self.getConfig("ssl"):
- self.sock = ssl.wrap_socket(self.sock, cert_reqs=ssl.CERT_NONE) #@TODO: support custom certificate
-
- nick = self.getConfig("nick")
- self.sock.send("NICK %s\r\n" % nick)
- self.sock.send("USER %s %s bla :%s\r\n" % (nick, host, nick))
- for t in self.getConfig("owner").split():
- if t.strip().startswith("#"):
- self.sock.send("JOIN %s\r\n" % t.strip())
- self.logInfo(_("Connected to"), host)
- self.logInfo(_("Switching to listening mode!"))
- try:
- self.main_loop()
-
- except IRCError, ex:
- self.sock.send("QUIT :byebye\r\n")
- print_exc()
- self.sock.close()
-
-
- def main_loop(self):
- readbuffer = ""
- while True:
- sleep(1)
- fdset = select([self.sock], [], [], 0)
- if self.sock not in fdset[0]:
- continue
-
- if self.abort:
- raise IRCError("quit")
-
- readbuffer += self.sock.recv(1024)
- temp = readbuffer.split("\n")
- readbuffer = temp.pop()
-
- for line in temp:
- line = line.rstrip()
- first = line.split()
-
- if first[0] == "PING":
- self.sock.send("PONG %s\r\n" % first[1])
-
- if first[0] == "ERROR":
- raise IRCError(line)
-
- msg = line.split(None, 3)
- if len(msg) < 4:
- continue
-
- msg = {
- "origin": msg[0][1:],
- "action": msg[1],
- "target": msg[2],
- "text": msg[3][1:]
- }
-
- self.handle_events(msg)
-
-
- def handle_events(self, msg):
- if not msg['origin'].split("!", 1)[0] in self.getConfig("owner").split():
- return
-
- if msg['target'].split("!", 1)[0] != self.getConfig("nick"):
- return
-
- if msg['action'] != "PRIVMSG":
- return
-
- # HANDLE CTCP ANTI FLOOD/BOT PROTECTION
- if msg['text'] == "\x01VERSION\x01":
- self.logDebug("Sending CTCP VERSION")
- self.sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface"))
- return
- elif msg['text'] == "\x01TIME\x01":
- self.logDebug("Sending CTCP TIME")
- self.sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time()))
- return
- elif msg['text'] == "\x01LAG\x01":
- self.logDebug("Received CTCP LAG") #: don't know how to answer
- return
-
- trigger = "pass"
- args = None
-
- try:
- temp = msg['text'].split()
- trigger = temp[0]
- if len(temp) > 1:
- args = temp[1:]
- except Exception:
- pass
-
- handler = getattr(self, "event_%s" % trigger, self.event_pass)
- try:
- res = handler(args)
- for line in res:
- self.response(line, msg['origin'])
- except Exception, e:
- self.logError(e)
-
-
- def response(self, msg, origin=""):
- if origin == "":
- for t in self.getConfig("owner").split():
- self.sock.send("PRIVMSG %s :%s\r\n" % (t.strip(), msg))
- else:
- self.sock.send("PRIVMSG %s :%s\r\n" % (origin.split("!", 1)[0], msg))
-
-
- #### Events
-
- def event_pass(self, args):
- return []
-
-
- def event_status(self, args):
- downloads = self.core.api.statusDownloads()
- if not downloads:
- return ["INFO: There are no active downloads currently."]
-
- temp_progress = ""
- lines = ["ID - Name - Status - Speed - ETA - Progress"]
- for data in downloads:
-
- if data.status == 5:
- temp_progress = data.format_wait
- else:
- temp_progress = "%d%% (%s)" % (data.percent, data.format_size)
-
- lines.append("#%d - %s - %s - %s - %s - %s" %
- (
- data.fid,
- data.name,
- data.statusmsg,
- "%s/s" % formatSize(data.speed),
- "%s" % data.format_eta,
- temp_progress
- ))
- return lines
-
-
- def event_queue(self, args):
- ps = self.core.api.getQueueData()
-
- if not ps:
- return ["INFO: There are no packages in queue."]
-
- lines = []
- for pack in ps:
- lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
-
- return lines
-
-
- def event_collector(self, args):
- ps = self.core.api.getCollectorData()
- if not ps:
- return ["INFO: No packages in collector!"]
-
- lines = []
- for pack in ps:
- lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links)))
-
- return lines
-
-
- def event_info(self, args):
- if not args:
- return ["ERROR: Use info like this: info <id>"]
-
- info = None
- try:
- info = self.core.api.getFileData(int(args[0]))
-
- except FileDoesNotExists:
- return ["ERROR: Link doesn't exists."]
-
- return ['LINK #%s: %s (%s) [%s][%s]' % (info.fid, info.name, info.format_size, info.statusmsg, info.plugin)]
-
-
- def event_packinfo(self, args):
- if not args:
- return ["ERROR: Use packinfo like this: packinfo <id>"]
-
- lines = []
- pack = None
- try:
- pack = self.core.api.getPackageData(int(args[0]))
-
- except PackageDoesNotExists:
- return ["ERROR: Package doesn't exists."]
-
- id = args[0]
-
- self.more = []
-
- lines.append('PACKAGE #%s: "%s" with %d links' % (id, pack.name, len(pack.links)))
- for pyfile in pack.links:
- self.more.append('LINK #%s: %s (%s) [%s][%s]' % (pyfile.fid, pyfile.name, pyfile.format_size,
- pyfile.statusmsg, pyfile.plugin))
-
- if len(self.more) < 6:
- lines.extend(self.more)
- self.more = []
- else:
- lines.extend(self.more[:6])
- self.more = self.more[6:]
- lines.append("%d more links do display." % len(self.more))
-
- return lines
-
-
- def event_more(self, args):
- if not self.more:
- return ["No more information to display."]
-
- lines = self.more[:6]
- self.more = self.more[6:]
- lines.append("%d more links do display." % len(self.more))
-
- return lines
-
-
- def event_start(self, args):
- self.core.api.unpauseServer()
- return ["INFO: Starting downloads."]
-
-
- def event_stop(self, args):
- self.core.api.pauseServer()
- return ["INFO: No new downloads will be started."]
-
-
- def event_add(self, args):
- if len(args) < 2:
- return ['ERROR: Add links like this: "add <packagename|id> links". ',
- "This will add the link <link> to to the package <package> / the package with id <id>!"]
-
- pack = args[0].strip()
- links = [x.strip() for x in args[1:]]
-
- count_added = 0
- count_failed = 0
- try:
- id = int(pack)
- pack = self.core.api.getPackageData(id)
- if not pack:
- return ["ERROR: Package doesn't exists."]
-
- #TODO add links
-
- return ["INFO: Added %d links to Package %s [#%d]" % (len(links), pack['name'], id)]
-
- except Exception:
- # create new package
- id = self.core.api.addPackage(pack, links, 1)
- return ["INFO: Created new Package %s [#%d] with %d links." % (pack, id, len(links))]
-
-
- def event_del(self, args):
- if len(args) < 2:
- return ["ERROR: Use del command like this: del -p|-l <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
-
- if args[0] == "-p":
- ret = self.core.api.deletePackages(map(int, args[1:]))
- return ["INFO: Deleted %d packages!" % len(args[1:])]
-
- elif args[0] == "-l":
- ret = self.core.api.delLinks(map(int, args[1:]))
- return ["INFO: Deleted %d links!" % len(args[1:])]
-
- else:
- return ["ERROR: Use del command like this: del <-p|-l> <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"]
-
-
- def event_push(self, args):
- if not args:
- return ["ERROR: Push package to queue like this: push <package id>"]
-
- id = int(args[0])
- try:
- info = self.core.api.getPackageInfo(id)
- except PackageDoesNotExists:
- return ["ERROR: Package #%d does not exist." % id]
-
- self.core.api.pushToQueue(id)
- return ["INFO: Pushed package #%d to queue." % id]
-
-
- def event_pull(self, args):
- if not args:
- return ["ERROR: Pull package from queue like this: pull <package id>."]
-
- id = int(args[0])
- if not self.core.api.getPackageData(id):
- return ["ERROR: Package #%d does not exist." % id]
-
- self.core.api.pullFromQueue(id)
- return ["INFO: Pulled package #%d from queue to collector." % id]
-
-
- def event_c(self, args):
- """ captcha answer """
- if not args:
- return ["ERROR: Captcha ID missing."]
-
- task = self.core.captchaManager.getTaskByID(args[0])
- if not task:
- return ["ERROR: Captcha Task with ID %s does not exists." % args[0]]
-
- task.setResult(" ".join(args[1:]))
- return ["INFO: Result %s saved." % " ".join(args[1:])]
-
-
- def event_help(self, args):
- lines = ["The following commands are available:",
- "add <package|packid> <links> [...] Adds link to package. (creates new package if it does not exist)",
- "queue Shows all packages in the queue",
- "collector Shows all packages in collector",
- "del -p|-l <id> [...] Deletes all packages|links with the ids specified",
- "info <id> Shows info of the link with id <id>",
- "packinfo <id> Shows info of the package with id <id>",
- "more Shows more info when the result was truncated",
- "start Starts all downloads",
- "stop Stops the download (but not abort active downloads)",
- "push <id> Push package to queue",
- "pull <id> Pull package from queue",
- "status Show general download status",
- "help Shows this help message"]
- return lines
-
-
-class IRCError(Exception):
-
- def __init__(self, value):
- self.value = value
-
-
- def __str__(self):
- return repr(self.value)
diff --git a/pyload/plugin/addon/MergeFiles.py b/pyload/plugin/addon/MergeFiles.py
deleted file mode 100644
index 18836f6ac..000000000
--- a/pyload/plugin/addon/MergeFiles.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import os
-import re
-
-from traceback import print_exc
-
-from pyload.plugin.Addon import Addon, threaded
-from pyload.utils import safe_join, fs_encode
-
-
-class MergeFiles(Addon):
- __name__ = "MergeFiles"
- __type__ = "addon"
- __version__ = "0.13"
-
- __config__ = [("activated", "bool", "Activated", True)]
-
- __description__ = """Merges parts splitted with hjsplit"""
- __license__ = "GPLv3"
- __authors__ = [("and9000", "me@has-no-mail.com")]
-
-
- BUFFER_SIZE = 4096
-
-
- def setup(self):
- pass
-
-
- @threaded
- def packageFinished(self, pack):
- files = {}
- fid_dict = {}
- for fid, data in pack.getChildren().iteritems():
- if re.search("\.\d{3}$", data['name']):
- if data['name'][:-4] not in files:
- files[data['name'][:-4]] = []
- files[data['name'][:-4]].append(data['name'])
- files[data['name'][:-4]].sort()
- fid_dict[data['name']] = fid
-
- download_folder = self.config['general']['download_folder']
-
- if self.config['general']['folder_per_package']:
- download_folder = safe_join(download_folder, pack.folder)
-
- for name, file_list in files.iteritems():
- self.logInfo(_("Starting merging of"), name)
-
- final_file = open(safe_join(download_folder, name), "wb")
- for splitted_file in file_list:
- self.logDebug("Merging part", splitted_file)
-
- pyfile = self.core.files.getFile(fid_dict[splitted_file])
-
- pyfile.setStatus("processing")
-
- try:
- with open(os.path.join(download_folder, splitted_file), "rb") as s_file:
- size_written = 0
- s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file)))
-
- while True:
- f_buffer = s_file.read(self.BUFFER_SIZE)
- if f_buffer:
- final_file.write(f_buffer)
- size_written += self.BUFFER_SIZE
- pyfile.setProgress((size_written * 100) / s_file_size)
- else:
- break
-
- self.logDebug("Finished merging part", splitted_file)
-
- except Exception, e:
- print_exc()
-
- finally:
- pyfile.setProgress(100)
- pyfile.setStatus("finished")
- pyfile.release()
-
- self.logInfo(_("Finished merging of"), name)
diff --git a/pyload/plugin/addon/MultiHome.py b/pyload/plugin/addon/MultiHome.py
deleted file mode 100644
index 84b1e6ab7..000000000
--- a/pyload/plugin/addon/MultiHome.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from time import time
-
-from pyload.plugin.Addon import Addon
-
-
-class MultiHome(Addon):
- __name__ = "MultiHome"
- __type__ = "addon"
- __version__ = "0.12"
-
- __config__ = [("interfaces", "str", "Interfaces", "None")]
-
- __description__ = """Ip address changer"""
- __license__ = "GPLv3"
- __authors__ = [("mkaay", "mkaay@mkaay.de")]
-
-
- def setup(self):
- self.register = {}
- self.interfaces = []
- self.parseInterfaces(self.getConfig("interfaces").split(";"))
- if not self.interfaces:
- self.parseInterfaces([self.config['download']['interface']])
- self.setConfig("interfaces", self.toConfig())
-
-
- def toConfig(self):
- return ";".join([i.adress for i in self.interfaces])
-
-
- def parseInterfaces(self, interfaces):
- for interface in interfaces:
- if not interface or str(interface).lower() == "none":
- continue
- self.interfaces.append(Interface(interface))
-
-
- def activate(self):
- requestFactory = self.core.requestFactory
- oldGetRequest = requestFactory.getRequest
-
- def getRequest(pluginName, account=None):
- iface = self.bestInterface(pluginName, account)
- if iface:
- iface.useFor(pluginName, account)
- requestFactory.iface = lambda: iface.adress
- self.logDebug("Using address", iface.adress)
- return oldGetRequest(pluginName, account)
-
- requestFactory.getRequest = getRequest
-
-
- def bestInterface(self, pluginName, account):
- best = None
- for interface in self.interfaces:
- if not best or interface.lastPluginAccess(pluginName, account) < best.lastPluginAccess(pluginName, account):
- best = interface
- return best
-
-
-class Interface(object):
-
- def __init__(self, adress):
- self.adress = adress
- self.history = {}
-
-
- def lastPluginAccess(self, pluginName, account):
- if (pluginName, account) in self.history:
- return self.history[(pluginName, account)]
- return 0
-
-
- def useFor(self, pluginName, account):
- self.history[(pluginName, account)] = time()
-
-
- def __repr__(self):
- return "<Interface - %s>" % self.adress
diff --git a/pyload/plugin/addon/RestartFailed.py b/pyload/plugin/addon/RestartFailed.py
deleted file mode 100644
index 2fe5f13bf..000000000
--- a/pyload/plugin/addon/RestartFailed.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyload.plugin.Addon import Addon
-
-
-class RestartFailed(Addon):
- __name__ = "RestartFailed"
- __type__ = "addon"
- __version__ = "1.57"
-
- __config__ = [("activated", "bool", "Activated" , True),
- ("interval" , "int" , "Check interval in minutes", 90 )]
-
- __description__ = """Periodically restart all failed downloads in queue"""
- __license__ = "GPLv3"
- __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # event_list = ["pluginConfigChanged"]
-
- MIN_INTERVAL = 15 * 60 #: 15m minimum check interval (value is in seconds)
-
-
- def pluginConfigChanged(self, plugin, name, value):
- if name == "interval":
- interval = value * 60
- if self.MIN_INTERVAL <= interval != self.interval:
- self.core.scheduler.removeJob(self.cb)
- self.interval = interval
- self.initPeriodical()
- else:
- self.logDebug("Invalid interval value, kept current")
-
-
- def periodical(self):
- self.logDebug(_("Restart failed downloads"))
- self.core.api.restartFailed()
-
-
- def setup(self):
- self.interval = 0
-
-
- def activate(self):
- self.pluginConfigChanged(self.__name__, "interval", self.getConfig("interval"))
diff --git a/pyload/plugin/addon/RestartSlow.py b/pyload/plugin/addon/RestartSlow.py
deleted file mode 100644
index 8621ed80d..000000000
--- a/pyload/plugin/addon/RestartSlow.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import pycurl
-
-from pyload.plugin.Addon import Addon
-
-
-class RestartSlow(Addon):
- __name__ = "RestartSlow"
- __type__ = "addon"
- __version__ = "0.02"
-
- __config__ = [("free_limit" , "int" , "Transfer speed threshold in kilobytes" , 100 ),
- ("free_time" , "int" , "Sample interval in minutes" , 5 ),
- ("premium_limit", "int" , "Transfer speed threshold for premium download in kilobytes", 300 ),
- ("premium_time" , "int" , "Sample interval for premium download in minutes" , 2 ),
- ("safe_mode" , "bool", "Don't restart if download is not resumable" , True)]
-
- __description__ = """Restart slow downloads"""
- __license__ = "GPLv3"
- __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- event_map = {'download-start': "downloadStarts"}
-
-
- def setup(self):
- self.info = {'chunk': {}}
-
-
- def periodical(self):
- if not self.pyfile.req.dl:
- return
-
- if self.getConfig("safe_mode") and not self.pyfile.plugin.resumeDownload:
- time = 30
- limit = 5
- else:
- type = "premium" if self.pyfile.plugin.premium else "free"
- time = max(30, self.getConfig("%s_time" % type) * 60)
- limit = max(5, self.getConfig("%s_limit" % type) * 1024)
-
- chunks = [chunk for chunk in self.pyfile.req.dl.chunks \
- if chunk.id not in self.info['chunk'] or self.info['chunk'][chunk.id] not is (time, limit)]
-
- for chunk in chunks:
- chunk.c.setopt(pycurl.LOW_SPEED_TIME , time)
- chunk.c.setopt(pycurl.LOW_SPEED_LIMIT, limit)
-
- self.info['chunk'][chunk.id] = (time, limit)
-
-
- def downloadStarts(self, pyfile, url, filename):
- if self.cb or (self.getConfig("safe_mode") and not pyfile.plugin.resumeDownload):
- return
-
- self.initPeriodical()
diff --git a/pyload/plugin/addon/SkipRev.py b/pyload/plugin/addon/SkipRev.py
deleted file mode 100644
index d6908614d..000000000
--- a/pyload/plugin/addon/SkipRev.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from urllib import unquote
-from urlparse import urlparse
-
-from pyload.plugin.Addon import Addon
-from pyload.plugin.Plugin import SkipDownload
-
-
-class SkipRev(Adoon):
- __name__ = "SkipRev"
- __type__ = "addon"
- __version__ = "0.15"
-
- __config__ = [("tokeep", "int", "Number of rev files to keep for package (-1 to auto)", -1)]
-
- __description__ = """Skip files ending with extension rev"""
- __license__ = "GPLv3"
- __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- def _setup(self):
- super(self.pyfile.plugin, self).setup()
- if self.pyfile.hasStatus("skipped"):
- raise SkipDownload(self.pyfile.getStatusName() or self.pyfile.pluginname)
-
-
- def pyname(self, pyfile):
- url = pyfile.url
- plugin = pyfile.plugin
-
- if hasattr(plugin, "info") and 'name' in plugin.info and plugin.info['name']:
- name = plugin.info['name']
-
- elif hasattr(plugin, "parseInfos"):
- name = next(plugin.parseInfos([url]))['name']
-
- elif hasattr(plugin, "getInfo"): #@NOTE: if parseInfos was not found, getInfo should be missing too
- name = plugin.getInfo(url)['name']
-
- else:
- self.logWarning("Unable to grab file name")
- name = urlparse(unquote(url)).path.split('/')[-1])
-
- return name
-
-
- def downloadPreparing(self, pyfile):
- if pyfile.getStatusName() is "unskipped" or not pyname(pyfile).endswith(".rev"):
- return
-
- tokeep = self.getConfig("tokeep")
-
- if tokeep:
- saved = [True for link in pyfile.package().getChildren() \
- if link.name.endswith(".rev") and (link.hasStatus("finished") or link.hasStatus("downloading"))].count(True)
-
- if not saved or saved < tokeep: #: keep one rev at least in auto mode
- return
-
- pyfile.setCustomStatus("SkipRev", "skipped")
- pyfile.plugin.setup = _setup #: work-around: inject status checker inside the preprocessing routine of the plugin
-
-
- def downloadFailed(self, pyfile):
- tokeep = self.getConfig("tokeep")
-
- if not tokeep:
- return
-
- for link in pyfile.package().getChildren():
- if link.hasStatus("skipped") and link.name.endswith(".rev"):
- if tokeep > -1 or pyfile.name.endswith(".rev"):
- link.setStatus("queued")
- else:
- link.setCustomStatus("unskipped", "queued")
- return
diff --git a/pyload/plugin/addon/UnSkipOnFail.py b/pyload/plugin/addon/UnSkipOnFail.py
deleted file mode 100644
index bdbd4a5df..000000000
--- a/pyload/plugin/addon/UnSkipOnFail.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from os.path import basename
-
-from pyload.datatype.File import PyFile
-from pyload.plugin.Addon import Addon
-from pyload.utils import fs_encode
-
-
-class UnSkipOnFail(Addon):
- __name__ = "UnSkipOnFail"
- __type__ = "addon"
- __version__ = "0.02"
-
- __config__ = [("activated", "bool", "Activated", True)]
-
- __description__ = """When a download fails, restart skipped duplicates"""
- __license__ = "GPLv3"
- __authors__ = [("hagg", "")]
-
-
- def downloadFailed(self, pyfile):
- pyfile_name = basename(pyfile.name)
- pid = pyfile.package().id
- msg = _('look for skipped duplicates for %s (pid:%s)')
- self.logInfo(msg % (pyfile_name, pid))
- dups = self.findDuplicates(pyfile)
- for link in dups:
- # check if link is "skipped"(=4)
- if link.status == 4:
- lpid = link.packageID
- self.logInfo(_('restart "%s" (pid:%s)') % (pyfile_name, lpid))
- self.setLinkStatus(link, "queued")
-
-
- def findDuplicates(self, pyfile):
- """ Search all packages for duplicate links to "pyfile".
- Duplicates are links that would overwrite "pyfile".
- To test on duplicity the package-folder and link-name
- of twolinks are compared (basename(link.name)).
- So this method returns a list of all links with equal
- package-folders and filenames as "pyfile", but except
- the data for "pyfile" iotselöf.
- It does MOT check the link's status.
- """
- dups = []
- pyfile_name = fs_encode(basename(pyfile.name))
- # get packages (w/o files, as most file data is useless here)
- queue = self.core.api.getQueue()
- for package in queue:
- # check if package-folder equals pyfile's package folder
- if fs_encode(package.folder) == fs_encode(pyfile.package().folder):
- # now get packaged data w/ files/links
- pdata = self.core.api.getPackageData(package.pid)
- if pdata.links:
- for link in pdata.links:
- link_name = fs_encode(basename(link.name))
- # check if link name collides with pdata's name
- if link_name == pyfile_name:
- # at last check if it is not pyfile itself
- if link.fid != pyfile.id:
- dups.append(link)
- return dups
-
-
- def setLinkStatus(self, link, new_status):
- """ Change status of "link" to "new_status".
- "link" has to be a valid FileData object,
- "new_status" has to be a valid status name
- (i.e. "queued" for this Plugin)
- It creates a temporary PyFile object using
- "link" data, changes its status, and tells
- the core.files-manager to save its data.
- """
- pyfile = PyFile(self.core.files,
- link.fid,
- link.url,
- link.name,
- link.size,
- link.status,
- link.error,
- link.plugin,
- link.packageID,
- link.order)
- pyfile.setStatus(new_status)
- self.core.files.save()
- pyfile.release()
diff --git a/pyload/plugin/addon/UpdateManager.py b/pyload/plugin/addon/UpdateManager.py
deleted file mode 100644
index e31612c23..000000000
--- a/pyload/plugin/addon/UpdateManager.py
+++ /dev/null
@@ -1,305 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import with_statement
-
-import re
-import sys
-
-from operator import itemgetter
-from os import path, remove, stat
-
-from pyload.network.RequestFactory import getURL
-from pyload.plugin.Addon import Expose, Addon, threaded
-from pyload.utils import safe_join
-
-
-class UpdateManager(Addon):
- __name__ = "UpdateManager"
- __type__ = "addon"
- __version__ = "0.42"
-
- __config__ = [("activated" , "bool" , "Activated" , True ),
- ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
- ("interval" , "int" , "Check interval in hours" , 8 ),
- ("autorestart" , "bool" , "Automatically restart pyLoad when required" , True ),
- ("reloadplugins", "bool" , "Monitor plugins for code changes in debug mode", True ),
- ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , True )]
-
- __description__ = """Check for updates"""
- __license__ = "GPLv3"
- __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # event_list = ["pluginConfigChanged"]
-
- SERVER_URL = "http://updatemanager.pyload.org"
- VERSION = re.compile(r'__version__.*=.*("|\')([\d.]+)')
- MIN_INTERVAL = 3 * 60 * 60 #: 3h minimum check interval (value is in seconds)
-
-
- def pluginConfigChanged(self, plugin, name, value):
- if name == "interval":
- interval = value * 60 * 60
- if self.MIN_INTERVAL <= interval != self.interval:
- self.core.scheduler.removeJob(self.cb)
- self.interval = interval
- self.initPeriodical()
- else:
- self.logDebug("Invalid interval value, kept current")
-
- elif name == "reloadplugins":
- if self.cb2:
- self.core.scheduler.removeJob(self.cb2)
- if value is True and self.core.debug:
- self.periodical2()
-
-
- def activate(self):
- self.pluginConfigChanged(self.__name__, "interval", self.getConfig("interval"))
- x = lambda: self.pluginConfigChanged(self.__name__, "reloadplugins", self.getConfig("reloadplugins"))
- self.core.scheduler.addJob(10, x, threaded=False)
-
-
- def deactivate(self):
- self.pluginConfigChanged(self.__name__, "reloadplugins", False)
-
-
- def setup(self):
- self.cb2 = None
- self.interval = 0
- self.updating = False
- self.info = {'pyload': False, 'version': None, 'plugins': False}
- self.mtimes = {} #: store modification time for each plugin
-
-
- def periodical2(self):
- if not self.updating:
- self.autoreloadPlugins()
-
- self.cb2 = self.core.scheduler.addJob(4, self.periodical2, threaded=False)
-
-
- @Expose
- def autoreloadPlugins(self):
- """ reload and reindex all modified plugins """
- modules = filter(
- lambda m: m and (m.__name__.startswith("pyload.plugin.") or
- m.__name__.startswith("userplugins.")) and
- m.__name__.count(".") >= 2, sys.modules.itervalues()
- )
-
- reloads = []
-
- for m in modules:
- root, type, name = m.__name__.rsplit(".", 2)
- id = (type, name)
- if type in self.core.pluginManager.plugins:
- f = m.__file__.replace(".pyc", ".py")
- if not path.isfile(f):
- continue
-
- mtime = stat(f).st_mtime
-
- if id not in self.mtimes:
- self.mtimes[id] = mtime
- elif self.mtimes[id] < mtime:
- reloads.append(id)
- self.mtimes[id] = mtime
-
- return True if self.core.pluginManager.reloadPlugins(reloads) else False
-
-
- def periodical(self):
- if self.info['pyload'] or self.getConfig("nodebugupdate") and self.core.debug:
- return
-
- self.updateThread()
-
-
- def server_request(self):
- try:
- return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines()
- except Exception:
- self.logWarning(_("Unable to contact server to get updates"))
-
-
- @threaded
- def updateThread(self):
- self.updating = True
-
- status = self.update(onlyplugin=self.getConfig("mode") == "plugins only")
-
- if status is 2 and self.getConfig("autorestart"):
- self.core.api.restart()
- else:
- self.updating = False
-
-
- @Expose
- def updatePlugins(self):
- """ simple wrapper for calling plugin update quickly """
- return self.update(onlyplugin=True)
-
-
- @Expose
- def update(self, onlyplugin=False):
- """ check for updates """
- data = self.server_request()
-
- if not data:
- exitcode = 0
-
- elif data[0] == "None":
- self.logInfo(_("No new pyLoad version available"))
- updates = data[1:]
- exitcode = self._updatePlugins(updates)
-
- elif onlyplugin:
- exitcode = 0
-
- else:
- newversion = data[0]
- self.logInfo(_("*** New pyLoad Version %s available ***") % newversion)
- self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***"))
- exitcode = 3
- self.info['pyload'] = True
- self.info['version'] = newversion
-
- return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required; 3 = No plugins updated, new pyLoad version available
-
-
- def _updatePlugins(self, updates):
- """ check for plugin updates """
-
- if self.info['plugins']:
- return False #: plugins were already updated
-
- exitcode = 0
- updated = []
-
- url = updates[0]
- schema = updates[1].split('|')
-
- if "BLACKLIST" in updates:
- blacklist = updates[updates.index('BLACKLIST') + 1:]
- updates = updates[2:updates.index('BLACKLIST')]
- else:
- blacklist = None
- updates = updates[2:]
-
- upgradable = [dict(zip(schema, x.split('|'))) for x in updates]
- blacklisted = [(x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]) for x in blacklist] if blacklist else []
-
- if blacklist:
- # Protect internal plugins against removing
- for i, t, n in enumerate(blacklisted):
- if t == "internal":
- blacklisted.pop(i)
- continue
-
- for idx, plugin in enumerate(upgradable):
- if n == plugin['name'] and t == plugin['type']:
- upgradable.pop(idx)
- break
-
- for t, n in self.removePlugins(sorted(blacklisted)):
- self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % {
- 'type': t,
- 'name': n,
- })
-
- for plugin in sorted(upgradable, key=itemgetter("type", "name")):
- filename = plugin['name']
- type = plugin['type']
- version = plugin['version']
-
- if filename.endswith(".pyc"):
- name = filename[:filename.find("_")]
- else:
- name = filename.replace(".py", "")
-
- plugins = getattr(self.core.pluginManager, "%sPlugins" % type)
-
- oldver = float(plugins[name]['version']) if name in plugins else None
- newver = float(version)
-
- if not oldver:
- msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)"
- elif newver > oldver:
- msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)"
- else:
- continue
-
- self.logInfo(_(msg) % {'type' : type,
- 'name' : name,
- 'oldver': oldver,
- 'newver': newver})
- try:
- content = getURL(url % plugin)
- m = self.VERSION.search(content)
-
- if m and m.group(2) == version:
- with open(safe_join("userplugins", prefix, filename), "wb") as f:
- f.write(content)
-
- updated.append((prefix, name))
- else:
- raise Exception, _("Version mismatch")
-
- except Exception, e:
- self.logError(_("Error updating plugin: %s") % filename, str(e))
-
- if updated:
- reloaded = self.core.pluginManager.reloadPlugins(updated)
- if reloaded:
- self.logInfo(_("Plugins updated and reloaded"))
- exitcode = 1
- else:
- self.logInfo(_("*** Plugins have been updated, but need a pyLoad restart to be reloaded ***"))
- self.info['plugins'] = True
- exitcode = 2
- else:
- self.logInfo(_("No plugin updates available"))
-
- return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required
-
-
- @Expose
- def removePlugins(self, type_plugins):
- """ delete plugins from disk """
-
- if not type_plugins:
- return
-
- self.logDebug("Requested deletion of plugins: %s" % type_plugins)
-
- removed = []
-
- for type, name in type_plugins:
- err = False
- file = name + ".py"
-
- for root in ("userplugins", path.join(pypath, "pyload", "plugins")):
-
- filename = safe_join(root, type, file)
- try:
- remove(filename)
- except Exception, e:
- self.logDebug("Error deleting: %s" % path.basename(filename), e)
- err = True
-
- filename += "c"
- if path.isfile(filename):
- try:
- if type == "addon":
- self.manager.deactivateAddon(name)
- remove(filename)
- except Exception, e:
- self.logDebug("Error deleting: %s" % path.basename(filename), e)
- err = True
-
- if not err:
- id = (type, name)
- removed.append(id)
-
- return removed #: return a list of the plugins successfully removed
diff --git a/pyload/plugin/addon/WindowsPhoneToastNotify.py b/pyload/plugin/addon/WindowsPhoneToastNotify.py
deleted file mode 100644
index 0ac6719e1..000000000
--- a/pyload/plugin/addon/WindowsPhoneToastNotify.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import httplib
-import time
-
-from pyload.plugin.Addon import Addon
-
-
-class WindowsPhoneToastNotify(Addon):
- __name__ = "WindowsPhoneToastNotify"
- __type__ = "addon"
- __version__ = "0.03"
-
- __config__ = [("force" , "bool", "Force even if client is connected" , False),
- ("pushId" , "str" , "pushId" , "" ),
- ("pushUrl" , "str" , "pushUrl" , "" ),
- ("pushTimeout", "int" , "Timeout between notifications in seconds", 0 )]
-
- __description__ = """Send push notifications to Windows Phone"""
- __license__ = "GPLv3"
- __authors__ = [("Andy Voigt", "phone-support@hotmail.de")]
-
-
- def getXmlData(self):
- myxml = ("<?xml version='1.0' encoding='utf-8'?> <wp:Notification xmlns:wp='WPNotification'> "
- "<wp:Toast> <wp:Text1>Pyload Mobile</wp:Text1> <wp:Text2>Captcha waiting!</wp:Text2> "
- "</wp:Toast> </wp:Notification>")
- return myxml
-
-
- def doRequest(self):
- URL = self.getConfig("pushUrl")
- request = self.getXmlData()
- webservice = httplib.HTTP(URL)
- webservice.putrequest("POST", self.getConfig("pushId"))
- webservice.putheader("Host", URL)
- webservice.putheader("Content-type", "text/xml")
- webservice.putheader("X-NotificationClass", "2")
- webservice.putheader("X-WindowsPhone-Target", "toast")
- webservice.putheader("Content-length", "%d" % len(request))
- webservice.endheaders()
- webservice.send(request)
- webservice.close()
- self.setStorage("LAST_NOTIFY", time.time())
-
-
- def captchaTask(self, task):
- if not self.getConfig("pushId") or not self.getConfig("pushUrl"):
- return False
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return False
-
- if (time.time() - float(self.getStorage("LAST_NOTIFY", 0))) < self.getConf("pushTimeout"):
- return False
-
- self.doRequest()
diff --git a/pyload/plugin/addon/XMPPInterface.py b/pyload/plugin/addon/XMPPInterface.py
deleted file mode 100644
index 77a49af6f..000000000
--- a/pyload/plugin/addon/XMPPInterface.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from pyxmpp import streamtls
-from pyxmpp.all import JID, Message
-from pyxmpp.interface import implements
-from pyxmpp.interfaces import *
-from pyxmpp.jabber.client import JabberClient
-
-from pyload.plugin.addon.IRCInterface import IRCInterface
-
-
-class XMPPInterface(IRCInterface, JabberClient):
- __name__ = "XMPPInterface"
- __type__ = "addon"
- __version__ = "0.11"
-
- __config__ = [("jid" , "str" , "Jabber ID" , "user@exmaple-jabber-server.org" ),
- ("pw" , "str" , "Password" , "" ),
- ("tls" , "bool", "Use TLS" , False ),
- ("owners" , "str" , "List of JIDs accepting commands from", "me@icq-gateway.org;some@msn-gateway.org"),
- ("info_file", "bool", "Inform about every file finished" , False ),
- ("info_pack", "bool", "Inform about every package finished" , True ),
- ("captcha" , "bool", "Send captcha requests" , True )]
-
- __description__ = """Connect to jabber and let owner perform different tasks"""
- __license__ = "GPLv3"
- __authors__ = [("RaNaN", "RaNaN@pyload.org")]
-
-
- implements(IMessageHandlersProvider)
-
-
- def __init__(self, core, manager):
- IRCInterface.__init__(self, core, manager)
-
- self.jid = JID(self.getConfig("jid"))
- password = self.getConfig("pw")
-
- # if bare JID is provided add a resource -- it is required
- if not self.jid.resource:
- self.jid = JID(self.jid.node, self.jid.domain, "pyLoad")
-
- if self.getConfig("tls"):
- tls_settings = streamtls.TLSSettings(require=True, verify_peer=False)
- auth = ("sasl:PLAIN", "sasl:DIGEST-MD5")
- else:
- tls_settings = None
- auth = ("sasl:DIGEST-MD5", "digest")
-
- # setup client with provided connection information
- # and identity data
- JabberClient.__init__(self, self.jid, password,
- disco_name="pyLoad XMPP Client", disco_type="bot",
- tls_settings=tls_settings, auth_methods=auth)
-
- self.interface_providers = [
- VersionHandler(self),
- self,
- ]
-
-
- def activate(self):
- self.new_package = {}
-
- self.start()
-
-
- def packageFinished(self, pypack):
- try:
- if self.getConfig("info_pack"):
- self.announce(_("Package finished: %s") % pypack.name)
- except Exception:
- pass
-
-
- def downloadFinished(self, pyfile):
- try:
- if self.getConfig("info_file"):
- self.announce(
- _("Download finished: %(name)s @ %(plugin)s") % {"name": pyfile.name, "plugin": pyfile.pluginname})
- except Exception:
- pass
-
-
- def run(self):
- # connect to IRC etc.
- self.connect()
- try:
- self.loop()
- except Exception, ex:
- self.logError(ex)
-
-
- def stream_state_changed(self, state, arg):
- """This one is called when the state of stream connecting the component
- to a server changes. This will usually be used to let the user
- know what is going on."""
- self.logDebug("*** State changed: %s %r ***" % (state, arg))
-
-
- def disconnected(self):
- self.logDebug("Client was disconnected")
-
-
- def stream_closed(self, stream):
- self.logDebug("Stream was closed", stream)
-
-
- def stream_error(self, err):
- self.logDebug("Stream Error", err)
-
-
- def get_message_handlers(self):
- """Return list of (message_type, message_handler) tuples.
-
- The handlers returned will be called when matching message is received
- in a client session."""
- return [("normal", self.message)]
-
-
- def message(self, stanza):
- """Message handler for the component."""
- subject = stanza.get_subject()
- body = stanza.get_body()
- t = stanza.get_type()
- self.logDebug("Message from %s received." % unicode(stanza.get_from()))
- self.logDebug("Body: %s Subject: %s Type: %s" % (body, subject, t))
-
- if t == "headline":
- # 'headline' messages should never be replied to
- return True
- if subject:
- subject = u"Re: " + subject
-
- to_jid = stanza.get_from()
- from_jid = stanza.get_to()
-
- #j = JID()
- to_name = to_jid.as_utf8()
- from_name = from_jid.as_utf8()
-
- names = self.getConfig("owners").split(";")
-
- if to_name in names or to_jid.node + "@" + to_jid.domain in names:
- messages = []
-
- trigger = "pass"
- args = None
-
- try:
- temp = body.split()
- trigger = temp[0]
- if len(temp) > 1:
- args = temp[1:]
- except Exception:
- pass
-
- handler = getattr(self, "event_%s" % trigger, self.event_pass)
- try:
- res = handler(args)
- for line in res:
- m = Message(
- to_jid=to_jid,
- from_jid=from_jid,
- stanza_type=stanza.get_type(),
- subject=subject,
- body=line)
-
- messages.append(m)
- except Exception, e:
- self.logError(e)
-
- return messages
-
- else:
- return True
-
-
- def response(self, msg, origin=""):
- return self.announce(msg)
-
-
- def announce(self, message):
- """ send message to all owners"""
- for user in self.getConfig("owners").split(";"):
- self.logDebug("Send message to", user)
-
- to_jid = JID(user)
-
- m = Message(from_jid=self.jid,
- to_jid=to_jid,
- stanza_type="chat",
- body=message)
-
- stream = self.get_stream()
- if not stream:
- self.connect()
- stream = self.get_stream()
-
- stream.send(m)
-
-
- def beforeReconnecting(self, ip):
- self.disconnect()
-
-
- def afterReconnecting(self, ip):
- self.connect()
-
-
-class VersionHandler(object):
- """Provides handler for a version query.
-
- This class will answer version query and announce 'jabber:iq:version' namespace
- in the client's disco#info results."""
-
- implements(IIqHandlersProvider, IFeaturesProvider)
-
-
- def __init__(self, client):
- """Just remember who created this."""
- self.client = client
-
-
- def get_features(self):
- """Return namespace which should the client include in its reply to a
- disco#info query."""
- return ["jabber:iq:version"]
-
-
- def get_iq_get_handlers(self):
- """Return list of tuples (element_name, namespace, handler) describing
- handlers of <iq type='get'/> stanzas"""
- return [("query", "jabber:iq:version", self.get_version)]
-
-
- def get_iq_set_handlers(self):
- """Return empty list, as this class provides no <iq type='set'/> stanza handler."""
- return []
-
-
- def get_version(self, iq):
- """Handler for jabber:iq:version queries.
-
- jabber:iq:version queries are not supported directly by PyXMPP, so the
- XML node is accessed directly through the libxml2 API. This should be
- used very carefully!"""
- iq = iq.make_result_response()
- q = iq.new_query("jabber:iq:version")
- q.newTextChild(q.ns(), "name", "Echo component")
- q.newTextChild(q.ns(), "version", "1.0")
- return iq
diff --git a/pyload/plugin/addon/__init__.py b/pyload/plugin/addon/__init__.py
deleted file mode 100644
index 40a96afc6..000000000
--- a/pyload/plugin/addon/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# -*- coding: utf-8 -*-