diff options
Diffstat (limited to 'module/plugins/hooks')
31 files changed, 58 insertions, 2690 deletions
diff --git a/module/plugins/hooks/BypassCaptcha.py b/module/plugins/hooks/BypassCaptcha.py index cf8754dae..bd19fe953 100644 --- a/module/plugins/hooks/BypassCaptcha.py +++ b/module/plugins/hooks/BypassCaptcha.py @@ -4,7 +4,7 @@ from pycurl import FORM_FILE, LOW_SPEED_TIME from module.network.HTTPRequest import BadHeader from module.network.RequestFactory import getURL, getRequest -from module.plugins.Hook import Hook, threaded +from module.plugins.Addon import Addon, threaded class BypassCaptchaException(Exception): @@ -25,13 +25,13 @@ class BypassCaptchaException(Exception): return "<BypassCaptchaException %s>" % self.err -class BypassCaptcha(Hook): +class BypassCaptcha(Addon): __name__ = "BypassCaptcha" __type__ = "hook" __version__ = "0.06" __config__ = [("force", "bool", "Force BC even if client is connected", False), - ("passkey", "password", "Passkey", "")] + ("passkey", "password", "Passkey", "")] __description__ = """Send captchas to BypassCaptcha.com""" __license__ = "GPLv3" @@ -47,15 +47,6 @@ class BypassCaptcha(Hook): GETCREDITS_URL = "http://bypasscaptcha.com/ex_left.php" - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.info = {} #@TODO: Remove in 0.4.10 - - def getCredits(self): res = getURL(self.GETCREDITS_URL, post={"key": self.getConfig("passkey")}) @@ -98,7 +89,7 @@ class BypassCaptcha(Hook): self.logError(_("Could not send response"), e) - def newCaptchaTask(self, task): + def captchaTask(self, task): if "service" in task.data: return False diff --git a/module/plugins/hooks/Captcha9Kw.py b/module/plugins/hooks/Captcha9Kw.py index 544965b0f..04caff345 100644 --- a/module/plugins/hooks/Captcha9Kw.py +++ b/module/plugins/hooks/Captcha9Kw.py @@ -7,28 +7,28 @@ import re from base64 import b64encode from time import sleep -from module.network.HTTPRequest import BadHeader -from module.network.RequestFactory import getURL +from pyload.network.HTTPRequest import BadHeader +from pyload.network.RequestFactory import getURL from module.plugins.Hook import Hook, threaded -class Captcha9Kw(Hook): +class Captcha9kw(Hook): __name__ = "Captcha9Kw" __type__ = "hook" __version__ = "0.28" __config__ = [("ssl" , "bool" , "Use HTTPS" , True ), - ("force" , "bool" , "Force captcha resolving even if client is connected" , True ), - ("confirm" , "bool" , "Confirm Captcha (cost +6 credits)" , False ), - ("captchaperhour", "int" , "Captcha per hour" , "9999" ), - ("captchapermin" , "int" , "Captcha per minute" , "9999" ), - ("prio" , "int" , "Priority (max 10)(cost +0 -> +10 credits)" , "0" ), - ("queue" , "int" , "Max. Queue (max 999)" , "50" ), - ("hoster_options", "string" , "Hoster options (format: pluginname:prio=1:selfsolfe=1:confirm=1:timeout=900|...)", "ShareonlineBiz:prio=0:timeout=999 | UploadedTo:prio=0:timeout=999"), - ("selfsolve" , "bool" , "Selfsolve (manually solve your captcha in your 9kw client if active)" , "0" ), - ("passkey" , "password", "API key" , "" ), - ("timeout" , "int" , "Timeout in seconds (min 60, max 3999)" , "900" )] + ("force" , "bool" , "Force captcha resolving even if client is connected" , True ), + ("confirm" , "bool" , "Confirm Captcha (cost +6 credits)" , False ), + ("captchaperhour", "int" , "Captcha per hour" , "9999" ), + ("captchapermin" , "int" , "Captcha per minute" , "9999" ), + ("prio" , "int" , "Priority (max 10)(cost +0 -> +10 credits)" , "0" ), + ("queue" , "int" , "Max. Queue (max 999)" , "50" ), + ("hoster_options", "string" , "Hoster options (format: pluginname:prio=1:selfsolfe=1:confirm=1:timeout=900|...)", "ShareonlineBiz:prio=0:timeout=999 | UploadedTo:prio=0:timeout=999"), + ("selfsolve" , "bool" , "Selfsolve (manually solve your captcha in your 9kw client if active)" , "0" ), + ("passkey" , "password", "API key" , "" ), + ("timeout" , "int" , "Timeout in seconds (min 60, max 3999)" , "900" )] __description__ = """Send captchas to 9kw.eu""" __license__ = "GPLv3" @@ -39,13 +39,7 @@ class Captcha9Kw(Hook): API_URL = "http://www.9kw.eu/index.cgi" - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.info = {} #@TODO: Remove in 0.4.10 + def activate(self): if self.getConfig("ssl"): self.API_URL = self.API_URL.replace("http://", "https://") @@ -168,7 +162,7 @@ class Captcha9Kw(Hook): task.setResult(result) - def newCaptchaTask(self, task): + def captchaTask(self, task): if not task.isTextual() and not task.isPositional(): return diff --git a/module/plugins/hooks/CaptchaBrotherhood.py b/module/plugins/hooks/CaptchaBrotherhood.py index 3c08f5e36..ba9d3eb8e 100644 --- a/module/plugins/hooks/CaptchaBrotherhood.py +++ b/module/plugins/hooks/CaptchaBrotherhood.py @@ -13,7 +13,7 @@ except ImportError: from time import sleep from urllib import urlencode -from module.network.RequestFactory import getURL, getRequest +from pyload.network.RequestFactory import getURL, getRequest from module.plugins.Hook import Hook, threaded @@ -35,14 +35,14 @@ class CaptchaBrotherhoodException(Exception): return "<CaptchaBrotherhoodException %s>" % self.err -class CaptchaBrotherhood(Hook): +class CaptchaBrotherhood(Addon): __name__ = "CaptchaBrotherhood" __type__ = "hook" __version__ = "0.08" __config__ = [("username", "str", "Username", ""), - ("force", "bool", "Force CT even if client is connected", False), - ("passkey", "password", "Password", "")] + ("force", "bool", "Force CT even if client is connected", False), + ("passkey", "password", "Password", "")] __description__ = """Send captchas to CaptchaBrotherhood.com""" __license__ = "GPLv3" @@ -53,15 +53,6 @@ class CaptchaBrotherhood(Hook): API_URL = "http://www.captchabrotherhood.com/" - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.info = {} #@TODO: Remove in 0.4.10 - - def getCredits(self): res = getURL(self.API_URL + "askCredits.aspx", get={"username": self.getConfig("username"), "password": self.getConfig("passkey")}) @@ -136,7 +127,7 @@ class CaptchaBrotherhood(Hook): return res - def newCaptchaTask(self, task): + def captchaTask(self, task): if "service" in task.data: return False diff --git a/module/plugins/hooks/Checksum.py b/module/plugins/hooks/Checksum.py deleted file mode 100644 index 064375a41..000000000 --- a/module/plugins/hooks/Checksum.py +++ /dev/null @@ -1,199 +0,0 @@ -# -*- coding: utf-8 -*- - -from __future__ import with_statement - -import hashlib -import re -import zlib - -from os import remove -from os.path import getsize, isfile, splitext - -from module.plugins.Hook import Hook -from module.utils import save_join, fs_encode - - -def computeChecksum(local_file, algorithm): - if algorithm in getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")): - h = getattr(hashlib, algorithm)() - - with open(local_file, 'rb') as f: - for chunk in iter(lambda: f.read(128 * h.block_size), ''): - h.update(chunk) - - return h.hexdigest() - - elif algorithm in ("adler32", "crc32"): - hf = getattr(zlib, algorithm) - last = 0 - - with open(local_file, 'rb') as f: - for chunk in iter(lambda: f.read(8192), ''): - last = hf(chunk, last) - - return "%x" % last - - else: - return None - - -class Checksum(Hook): - __name__ = "Checksum" - __type__ = "hook" - __version__ = "0.16" - - __config__ = [("check_checksum", "bool", "Check checksum? (If False only size will be verified)", True), - ("check_action", "fail;retry;nothing", "What to do if check fails?", "retry"), - ("max_tries", "int", "Number of retries", 2), - ("retry_action", "fail;nothing", "What to do if all retries fail?", "fail"), - ("wait_time", "int", "Time to wait before each retry (seconds)", 1)] - - __description__ = """Verify downloaded file size and checksum""" - __license__ = "GPLv3" - __authors__ = [("zoidberg", "zoidberg@mujmail.cz"), - ("Walter Purcaro", "vuolter@gmail.com"), - ("stickell", "l.stickell@yahoo.it")] - - - methods = {'sfv' : 'crc32', - 'crc' : 'crc32', - 'hash': 'md5'} - regexps = {'sfv' : r'^(?P<NAME>[^;].+)\s+(?P<HASH>[0-9A-Fa-f]{8})$', - 'md5' : r'^(?P<NAME>[0-9A-Fa-f]{32}) (?P<FILE>.+)$', - 'crc' : r'filename=(?P<NAME>.+)\nsize=(?P<SIZE>\d+)\ncrc32=(?P<HASH>[0-9A-Fa-f]{8})$', - 'default': r'^(?P<HASH>[0-9A-Fa-f]+)\s+\*?(?P<NAME>.+)$'} - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def coreReady(self): - if not self.getConfig("check_checksum"): - self.logInfo(_("Checksum validation is disabled in plugin configuration")) - - - def setup(self): - self.algorithms = sorted( - getattr(hashlib, "algorithms", ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")), reverse=True) - self.algorithms.extend(["crc32", "adler32"]) - self.formats = self.algorithms + ["sfv", "crc", "hash"] - - - def downloadFinished(self, pyfile): - """ - Compute checksum for the downloaded file and compare it with the hash provided by the hoster. - pyfile.plugin.check_data should be a dictionary which can contain: - a) if known, the exact filesize in bytes (e.g. "size": 123456789) - b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307") - """ - if hasattr(pyfile.plugin, "check_data") and isinstance(pyfile.plugin.check_data, dict): - data = pyfile.plugin.check_data.copy() - - elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict): - data = pyfile.plugin.api_data.copy() - - elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict): - data = pyfile.plugin.info.copy() - data.pop('size', None) #@NOTE: Don't check file size until a similary matcher will be implemented - - else: - return - - self.logDebug(data) - - if not pyfile.plugin.lastDownload: - self.checkFailed(pyfile, None, "No file downloaded") - - local_file = fs_encode(pyfile.plugin.lastDownload) - #download_folder = self.config['general']['download_folder'] - #local_file = fs_encode(save_join(download_folder, pyfile.package().folder, pyfile.name)) - - if not isfile(local_file): - self.checkFailed(pyfile, None, "File does not exist") - - # validate file size - if "size" in data: - api_size = int(data['size']) - file_size = getsize(local_file) - - if api_size != file_size: - self.logWarning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size)) - self.checkFailed(pyfile, local_file, "Incorrect file size") - - data.pop('size', None) - - # validate checksum - if data and self.getConfig("check_checksum"): - - if not 'md5' in data: - for type in ("checksum", "hashsum", "hash"): - if type in data: - data['md5'] = data[type] #@NOTE: What happens if it's not an md5 hash? - break - - for key in self.algorithms: - if key in data: - checksum = computeChecksum(local_file, key.replace("-", "").lower()) - if checksum: - if checksum == data[key].lower(): - self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') % - (pyfile.name, key.upper(), checksum)) - break - else: - self.logWarning(_("%s checksum for file %s does not match (%s != %s)") % - (key.upper(), pyfile.name, checksum, data[key])) - self.checkFailed(pyfile, local_file, "Checksums do not match") - else: - self.logWarning(_("Unsupported hashing algorithm"), key.upper()) - else: - self.logWarning(_("Unable to validate checksum for file: ") + pyfile.name) - - - def checkFailed(self, pyfile, local_file, msg): - check_action = self.getConfig("check_action") - if check_action == "retry": - max_tries = self.getConfig("max_tries") - retry_action = self.getConfig("retry_action") - if pyfile.plugin.retries < max_tries: - if local_file: - remove(local_file) - pyfile.plugin.retry(max_tries, self.getConfig("wait_time"), msg) - elif retry_action == "nothing": - return - elif check_action == "nothing": - return - pyfile.plugin.fail(reason=msg) - - - def packageFinished(self, pypack): - download_folder = save_join(self.config['general']['download_folder'], pypack.folder, "") - - for link in pypack.getChildren().itervalues(): - file_type = splitext(link['name'])[1][1:].lower() - - if file_type not in self.formats: - continue - - hash_file = fs_encode(save_join(download_folder, link['name'])) - if not isfile(hash_file): - self.logWarning(_("File not found"), link['name']) - continue - - with open(hash_file) as f: - text = f.read() - - for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text): - data = m.groupdict() - self.logDebug(link['name'], data) - - local_file = fs_encode(save_join(download_folder, data['NAME'])) - algorithm = self.methods.get(file_type, file_type) - checksum = computeChecksum(local_file, algorithm) - if checksum == data['HASH']: - self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') % - (data['NAME'], algorithm, checksum)) - else: - self.logWarning(_("%s checksum for file %s does not match (%s != %s)") % - (algorithm, data['NAME'], checksum, data['HASH'])) diff --git a/module/plugins/hooks/DeathByCaptcha.py b/module/plugins/hooks/DeathByCaptcha.py index d513c446d..8786df12f 100644 --- a/module/plugins/hooks/DeathByCaptcha.py +++ b/module/plugins/hooks/DeathByCaptcha.py @@ -8,9 +8,9 @@ from base64 import b64encode from pycurl import FORM_FILE, HTTPHEADER from time import sleep -from module.common.json_layer import json_loads -from module.network.HTTPRequest import BadHeader -from module.network.RequestFactory import getRequest +from pyload.utils import json_loads +from pyload.network.HTTPRequest import BadHeader +from pyload.network.RequestFactory import getRequest from module.plugins.Hook import Hook, threaded @@ -48,14 +48,14 @@ class DeathByCaptchaException(Exception): return "<DeathByCaptchaException %s>" % self.err -class DeathByCaptcha(Hook): +class DeathByCaptcha(Addon): __name__ = "DeathByCaptcha" __type__ = "hook" __version__ = "0.06" __config__ = [("username", "str", "Username", ""), - ("passkey", "password", "Password", ""), - ("force", "bool", "Force DBC even if client is connected", False)] + ("passkey", "password", "Password", ""), + ("force", "bool", "Force DBC even if client is connected", False)] __description__ = """Send captchas to DeathByCaptcha.com""" __license__ = "GPLv3" @@ -66,15 +66,6 @@ class DeathByCaptcha(Hook): API_URL = "http://api.dbcapi.me/api/" - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.info = {} #@TODO: Remove in 0.4.10 - - def api_response(self, api="captcha", post=False, multipart=False): req = getRequest() req.c.setopt(HTTPHEADER, ["Accept: application/json", "User-Agent: pyLoad %s" % self.core.version]) @@ -165,7 +156,7 @@ class DeathByCaptcha(Hook): return ticket, result - def newCaptchaTask(self, task): + def captchaTask(self, task): if "service" in task.data: return False diff --git a/module/plugins/hooks/DeleteFinished.py b/module/plugins/hooks/DeleteFinished.py deleted file mode 100644 index 5d2b78d50..000000000 --- a/module/plugins/hooks/DeleteFinished.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- - -from module.database import style -from module.plugins.Hook import Hook - - -class DeleteFinished(Hook): - __name__ = "DeleteFinished" - __type__ = "hook" - __version__ = "1.11" - - __config__ = [('activated', 'bool', 'Activated', 'False'), - ('interval', 'int', 'Delete every (hours)', '72'), - ('deloffline', 'bool', 'Delete packages with offline links', 'False')] - - __description__ = """Automatically delete all finished packages from queue""" - __license__ = "GPLv3" - __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] - - - # event_list = ["pluginConfigChanged"] - - - ## overwritten methods ## - def periodical(self): - if not self.info['sleep']: - deloffline = self.getConfig('deloffline') - mode = '0,1,4' if deloffline else '0,4' - msg = _('delete all finished packages in queue list (%s packages with offline links)') - self.logInfo(msg % (_('including') if deloffline else _('excluding'))) - self.deleteFinished(mode) - self.info['sleep'] = True - self.addEvent('packageFinished', self.wakeup) - - - def pluginConfigChanged(self, plugin, name, value): - if name == "interval" and value != self.interval: - self.interval = value * 3600 - self.initPeriodical() - - - def unload(self): - self.removeEvent('packageFinished', self.wakeup) - - - def coreReady(self): - self.info = {'sleep': True} - interval = self.getConfig('interval') - self.pluginConfigChanged(self.__name__, 'interval', interval) - self.addEvent('packageFinished', self.wakeup) - - - ## own methods ## - @style.queue - def deleteFinished(self, mode): - self.c.execute('DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE package=packages.id AND status NOT IN (%s))' % mode) - self.c.execute('DELETE FROM links WHERE NOT EXISTS(SELECT 1 FROM packages WHERE id=links.package)') - - - def wakeup(self, pypack): - self.removeEvent('packageFinished', self.wakeup) - self.info['sleep'] = False - - - ## event managing ## - def addEvent(self, event, func): - """Adds an event listener for event name""" - if event in self.m.events: - if func in self.m.events[event]: - self.logDebug("Function already registered", func) - else: - self.m.events[event].append(func) - else: - self.m.events[event] = [func] - - - def setup(self): - self.m = self.manager - self.removeEvent = self.m.removeEvent diff --git a/module/plugins/hooks/DownloadScheduler.py b/module/plugins/hooks/DownloadScheduler.py deleted file mode 100644 index 4996e212d..000000000 --- a/module/plugins/hooks/DownloadScheduler.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- - -import re - -from time import localtime - -from module.plugins.Hook import Hook - - -class DownloadScheduler(Hook): - __name__ = "DownloadScheduler" - __type__ = "hook" - __version__ = "0.22" - - __config__ = [("timetable", "str", "List time periods as hh:mm full or number(kB/s)", - "0:00 full, 7:00 250, 10:00 0, 17:00 150"), - ("abort", "bool", "Abort active downloads when start period with speed 0", False)] - - __description__ = """Download Scheduler""" - __license__ = "GPLv3" - __authors__ = [("zoidberg", "zoidberg@mujmail.cz"), - ("stickell", "l.stickell@yahoo.it")] - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.cb = None # callback to scheduler job; will be by removed hookmanager when hook unloaded - - - def coreReady(self): - self.updateSchedule() - - - def updateSchedule(self, schedule=None): - if schedule is None: - schedule = self.getConfig("timetable") - - schedule = re.findall("(\d{1,2}):(\d{2})[\s]*(-?\d+)", - schedule.lower().replace("full", "-1").replace("none", "0")) - if not schedule: - self.logError(_("Invalid schedule")) - return - - t0 = localtime() - now = (t0.tm_hour, t0.tm_min, t0.tm_sec, "X") - schedule = sorted([(int(x[0]), int(x[1]), 0, int(x[2])) for x in schedule] + [now]) - - self.logDebug("Schedule", schedule) - - for i, v in enumerate(schedule): - if v[3] == "X": - last, next = schedule[i - 1], schedule[(i + 1) % len(schedule)] - self.logDebug("Now/Last/Next", now, last, next) - - self.setDownloadSpeed(last[3]) - - next_time = (((24 + next[0] - now[0]) * 60 + next[1] - now[1]) * 60 + next[2] - now[2]) % 86400 - self.core.scheduler.removeJob(self.cb) - self.cb = self.core.scheduler.addJob(next_time, self.updateSchedule, threaded=False) - - - def setDownloadSpeed(self, speed): - if speed == 0: - abort = self.getConfig("abort") - self.logInfo(_("Stopping download server. (Running downloads will %sbe aborted.)") % '' if abort else _('not ')) - self.core.api.pauseServer() - if abort: - self.core.api.stopAllDownloads() - else: - self.core.api.unpauseServer() - - if speed > 0: - self.logInfo(_("Setting download speed to %d kB/s") % speed) - self.core.api.setConfigValue("download", "limit_speed", 1) - self.core.api.setConfigValue("download", "max_speed", speed) - else: - self.logInfo(_("Setting download speed to FULL")) - self.core.api.setConfigValue("download", "limit_speed", 0) - self.core.api.setConfigValue("download", "max_speed", -1) diff --git a/module/plugins/hooks/ExpertDecoders.py b/module/plugins/hooks/ExpertDecoders.py index c9f8204c4..04f6d0795 100644 --- a/module/plugins/hooks/ExpertDecoders.py +++ b/module/plugins/hooks/ExpertDecoders.py @@ -11,13 +11,13 @@ from module.network.RequestFactory import getURL, getRequest from module.plugins.Hook import Hook, threaded -class ExpertDecoders(Hook): +class ExpertDecoders(Addon): __name__ = "ExpertDecoders" __type__ = "hook" __version__ = "0.04" __config__ = [("force", "bool", "Force CT even if client is connected", False), - ("passkey", "password", "Access key", "")] + ("passkey", "password", "Access key", "")] __description__ = """Send captchas to expertdecoders.com""" __license__ = "GPLv3" @@ -28,15 +28,6 @@ class ExpertDecoders(Hook): API_URL = "http://www.fasttypers.org/imagepost.ashx" - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.info = {} #@TODO: Remove in 0.4.10 - - def getCredits(self): res = getURL(self.API_URL, post={"key": self.getConfig("passkey"), "action": "balance"}) @@ -74,7 +65,7 @@ class ExpertDecoders(Hook): task.setResult(result) - def newCaptchaTask(self, task): + def captchaTask(self, task): if not task.isTextual(): return False diff --git a/module/plugins/hooks/ExternalScripts.py b/module/plugins/hooks/ExternalScripts.py deleted file mode 100644 index 8bd803308..000000000 --- a/module/plugins/hooks/ExternalScripts.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- - -import os -import subprocess - -from itertools import chain - -from module.plugins.Hook import Hook -from module.utils import save_join - - -class ExternalScripts(Hook): - __name__ = "ExternalScripts" - __type__ = "hook" - __version__ = "0.29" - - __config__ = [("activated", "bool", "Activated" , True ), - ("wait" , "bool", "Wait script ending", False)] - - __description__ = """Run external scripts""" - __license__ = "GPLv3" - __authors__ = [("mkaay", "mkaay@mkaay.de"), - ("RaNaN", "ranan@pyload.org"), - ("spoob", "spoob@pyload.org"), - ("Walter Purcaro", "vuolter@gmail.com")] - - - event_list = ["archive_extracted", "package_extracted", "all_archives_extracted", "all_archives_processed", - "allDownloadsFinished", "allDownloadsProcessed"] - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.scripts = {} - - folders = ["download_preparing", "download_finished", "all_downloads_finished", "all_downloads_processed", - "before_reconnect", "after_reconnect", - "package_finished", "package_extracted", - "archive_extracted", "all_archives_extracted", "all_archives_processed", - # deprecated folders - "unrar_finished", "all_dls_finished", "all_dls_processed"] - - for folder in folders: - self.scripts[folder] = [] - - self.initPluginType(folder, os.path.join(pypath, 'scripts', folder)) - self.initPluginType(folder, os.path.join('scripts', folder)) - - for script_type, names in self.scripts.iteritems(): - if names: - self.logInfo(_("Installed scripts for"), script_type, ", ".join(map(os.path.basename, names))) - - - def initPluginType(self, folder, path): - if not os.path.exists(path): - try: - os.makedirs(path) - - except Exception: - self.logDebug("Script folder %s not created" % folder) - return - - for f in os.listdir(path): - if f.startswith("#") or f.startswith(".") or f.startswith("_") or f.endswith("~") or f.endswith(".swp"): - continue - - if not os.access(os.path.join(path, f), os.X_OK): - self.logWarning(_("Script not executable:") + " %s/%s" % (folder, f)) - - self.scripts[folder].append(os.path.join(path, f)) - - - def callScript(self, script, *args): - try: - cmd = [script] + [str(x) if not isinstance(x, basestring) else x for x in args] - - self.logDebug("Executing", os.path.abspath(script), " ".join(cmd)) - - p = subprocess.Popen(cmd, bufsize=-1) #@NOTE: output goes to pyload - if self.getConfig('wait'): - p.communicate() - - except Exception, e: - self.logError(_("Error in %(script)s: %(error)s") % {"script": os.path.basename(script), "error": e}) - - - def downloadPreparing(self, pyfile): - for script in self.scripts['download_preparing']: - self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.id) - - - def downloadFinished(self, pyfile): - download_folder = self.config['general']['download_folder'] - for script in self.scripts['download_finished']: - filename = save_join(download_folder, pyfile.package().folder, pyfile.name) - self.callScript(script, pyfile.pluginname, pyfile.url, pyfile.name, filename, pyfile.id) - - - def packageFinished(self, pypack): - download_folder = self.config['general']['download_folder'] - for script in self.scripts['package_finished']: - folder = save_join(download_folder, pypack.folder) - self.callScript(script, pypack.name, folder, pypack.password, pypack.id) - - - def beforeReconnecting(self, ip): - for script in self.scripts['before_reconnect']: - self.callScript(script, ip) - - - def afterReconnecting(self, ip): - for script in self.scripts['after_reconnect']: - self.callScript(script, ip) - - - def archive_extracted(self, pyfile, folder, filename, files): - for script in self.scripts['archive_extracted']: - self.callScript(script, folder, filename, files) - for script in self.scripts['unrar_finished']: #: deprecated - self.callScript(script, folder, filename) - - - def package_extracted(self, pypack): - download_folder = self.config['general']['download_folder'] - for script in self.scripts['package_extracted']: - folder = save_join(download_folder, pypack.folder) - self.callScript(script, pypack.name, folder, pypack.password, pypack.id) - - - def all_archives_extracted(self): - for script in self.scripts['all_archives_extracted']: - self.callScript(script) - - - def all_archives_processed(self): - for script in self.scripts['all_archives_processed']: - self.callScript(script) - - - def allDownloadsFinished(self): - for script in chain(self.scripts['all_downloads_finished'], self.scripts['all_dls_finished']): - self.callScript(script) - - - def allDownloadsProcessed(self): - for script in chain(self.scripts['all_downloads_processed'], self.scripts['all_dls_processed']): - self.callScript(script) diff --git a/module/plugins/hooks/ExtractArchive.py b/module/plugins/hooks/ExtractArchive.py deleted file mode 100644 index 3ea8839dc..000000000 --- a/module/plugins/hooks/ExtractArchive.py +++ /dev/null @@ -1,509 +0,0 @@ -# -*- coding: utf-8 -*- - -from __future__ import with_statement - -import os -import sys - -from copy import copy -from traceback import print_exc - -# monkey patch bug in python 2.6 and lower -# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717 -if sys.version_info < (2, 7) and os.name != "nt": - import errno - - from subprocess import Popen - - def _eintr_retry_call(func, *args): - while True: - try: - return func(*args) - - except OSError, e: - if e.errno == errno.EINTR: - continue - raise - - - # unsued timeout option for older python version - def wait(self, timeout=0): - """Wait for child process to terminate. Returns returncode - attribute.""" - if self.returncode is None: - try: - pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0) - except OSError, e: - if e.errno != errno.ECHILD: - raise - # This happens if SIGCLD is set to be ignored or waiting - # for child processes has otherwise been disabled for our - # process. This child is dead, we can't get the status. - sts = 0 - self._handle_exitstatus(sts) - return self.returncode - - Popen.wait = wait - -if os.name != "nt": - from grp import getgrnam - from pwd import getpwnam - -from module.plugins.Hook import Hook, threaded, Expose -from module.plugins.internal.Extractor import ArchiveError, CRCError, PasswordError -from module.plugins.internal.SimpleHoster import replace_patterns -from module.utils import fs_encode, save_join, uniqify - - -class ArchiveQueue(object): - - def __init__(self, plugin, storage): - self.plugin = plugin - self.storage = storage - - - def get(self): - try: - return [int(pid) for pid in self.plugin.getStorage("ExtractArchive:%s" % self.storage, "").decode('base64').split()] - except Exception: - return [] - - - def set(self, value): - if isinstance(value, list): - item = str(value)[1:-1].replace(' ', '').replace(',', ' ') - else: - item = str(value).strip() - return self.plugin.setStorage("ExtractArchive:%s" % self.storage, item.encode('base64')[:-1]) - - - def delete(self): - return self.plugin.delStorage("ExtractArchive:%s" % self.storage) - - - def add(self, item): - queue = self.get() - if item not in queue: - return self.set(queue + [item]) - else: - return True - - - def remove(self, item): - queue = self.get() - try: - queue.remove(item) - except ValueError: - pass - if queue == []: - return self.delete() - return self.set(queue) - - - -class ExtractArchive(Hook): - __name__ = "ExtractArchive" - __type__ = "hook" - __version__ = "1.29" - - __config__ = [("activated" , "bool" , "Activated" , True ), - ("fullpath" , "bool" , "Extract with full paths" , True ), - ("overwrite" , "bool" , "Overwrite files" , False ), - ("keepbroken" , "bool" , "Try to extract broken archives" , False ), - ("repair" , "bool" , "Repair broken archives" , True ), - ("usepasswordfile" , "bool" , "Use password file" , True ), - ("passwordfile" , "file" , "Password file" , "archive_password.txt" ), - ("delete" , "bool" , "Delete archive when successfully extracted", False ), - ("subfolder" , "bool" , "Create subfolder for each package" , False ), - ("destination" , "folder", "Extract files to folder" , "" ), - ("extensions" , "str" , "Extract the following extensions" , "7z,bz2,bzip2,gz,gzip,lha,lzh,lzma,rar,tar,taz,tbz,tbz2,tgz,xar,xz,z,zip"), - ("excludefiles" , "str" , "Don't extract the following files" , "*.nfo,*.DS_Store,index.dat,thumb.db" ), - ("recursive" , "bool" , "Extract archives in archives" , True ), - ("waitall" , "bool" , "Wait for all downloads to be finished" , False ), - ("renice" , "int" , "CPU priority" , 0 )] - - __description__ = """Extract different kind of archives""" - __license__ = "GPLv3" - __authors__ = [("Walter Purcaro", "vuolter@gmail.com"), - ("Immenz" , "immenz@gmx.net" )] - - - event_list = ["allDownloadsProcessed"] - - NAME_REPLACEMENTS = [(r'\.part\d+\.rar$', ".part.rar")] - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.queue = ArchiveQueue(self, "Queue") - self.failed = ArchiveQueue(self, "Failed") - - self.interval = 60 - self.extracting = False - self.extractors = [] - self.passwords = [] - - - def coreReady(self): - # self.extracting = False - - for p in ("UnRar", "SevenZip", "UnZip"): - try: - module = self.core.pluginManager.loadModule("internal", p) - klass = getattr(module, p) - if klass.isUsable(): - self.extractors.append(klass) - - except OSError, e: - if e.errno == 2: - self.logInfo(_("No %s installed") % p) - else: - self.logWarning(_("Could not activate: %s") % p, e) - if self.core.debug: - print_exc() - - except Exception, e: - self.logWarning(_("Could not activate: %s") % p, e) - if self.core.debug: - print_exc() - - if self.extractors: - self.logInfo(_("Activated") + " " + "|".join("%s %s" % (Extractor.__name__,Extractor.VERSION) for Extractor in self.extractors)) - - if self.getConfig("waitall"): - self.extractPackage(*self.queue.get()) #: Resume unfinished extractions - else: - super(ExtractArchive, self).initPeriodical() - - else: - self.logInfo(_("No Extract plugins activated")) - - - def periodical(self): - if not self.extracting: - self.extractPackage(*self.queue.get()) - - - @Expose - def extractPackage(self, *ids): - """ Extract packages with given id""" - self.manager.startThread(self.extract, ids) - - - def packageFinished(self, pypack): - self.queue.add(pypack.id) - - - @threaded - def allDownloadsProcessed(self, thread): - if self.extract(self.queue.get(), thread): #@NOTE: check only if all gone fine, no failed reporting for now - self.manager.dispatchEvent("all_archives_extracted") - - self.manager.dispatchEvent("all_archives_processed") - - - def extract(self, ids, thread=None): - if not ids: - return False - - self.extracting = True - - processed = [] - extracted = [] - failed = [] - - toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|') - - destination = self.getConfig("destination") - subfolder = self.getConfig("subfolder") - fullpath = self.getConfig("fullpath") - overwrite = self.getConfig("overwrite") - renice = self.getConfig("renice") - recursive = self.getConfig("recursive") - delete = self.getConfig("delete") - keepbroken = self.getConfig("keepbroken") - - extensions = [x.lstrip('.').lower() for x in toList(self.getConfig("extensions"))] - excludefiles = toList(self.getConfig("excludefiles")) - - if extensions: - self.logDebug("Use for extensions: %s" % "|.".join(extensions)) - - # reload from txt file - self.reloadPasswords() - - # dl folder - dl = self.config['general']['download_folder'] - - #iterate packages -> extractors -> targets - for pid in ids: - pypack = self.core.files.getPackage(pid) - - if not pypack: - continue - - self.logInfo(_("Check package: %s") % pypack.name) - - # determine output folder - out = save_join(dl, pypack.folder, destination, "") #: force trailing slash - - if subfolder: - out = save_join(out, pypack.folder) - - if not os.path.exists(out): - os.makedirs(out) - - matched = False - success = True - files_ids = [(save_join(dl, pypack.folder, pylink['name']), pylink['id'], out) for pylink in pypack.getChildren().itervalues()] - - # check as long there are unseen files - while files_ids: - new_files_ids = [] - - if extensions: - files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \ - if filter(lambda ext: fname.lower().endswith(ext), extensions)] - - for Extractor in self.extractors: - targets = Extractor.getTargets(files_ids) - if targets: - self.logDebug("Targets for %s: %s" % (Extractor.__name__, targets)) - matched = True - - for fname, fid, fout in targets: - name = os.path.basename(fname) - - if not os.path.exists(fname): - self.logDebug(name, "File not found") - continue - - self.logInfo(name, _("Extract to: %s") % fout) - try: - archive = Extractor(self, - fname, - fout, - fullpath, - overwrite, - excludefiles, - renice, - delete, - keepbroken, - fid) - archive.init() - - new_files = self._extract(archive, fid, pypack.password, thread) - - except Exception, e: - self.logError(name, e) - success = False - continue - - files_ids.remove((fname, fid, fout)) # don't let other extractors spam log - self.logDebug("Extracted files: %s" % new_files) - self.setPermissions(new_files) - - for filename in new_files: - file = fs_encode(save_join(os.path.dirname(archive.filename), filename)) - if not os.path.exists(file): - self.logDebug("New file %s does not exists" % filename) - continue - - if recursive and os.path.isfile(file): - new_files_ids.append((filename, fid, os.path.dirname(filename))) # append as new target - - files_ids = new_files_ids # also check extracted files - - if matched: - if success: - extracted.append(pid) - self.manager.dispatchEvent("package_extracted", pypack) - else: - failed.append(pid) - self.manager.dispatchEvent("package_extract_failed", pypack) - - self.failed.add(pid) - else: - self.logInfo(_("No files found to extract")) - - if not matched or not success and subfolder: - try: - os.rmdir(out) - - except OSError: - pass - - self.queue.remove(pid) - - self.extracting = False - return True if not failed else False - - - def _extract(self, archive, fid, password, thread): - pyfile = self.core.files.getFile(fid) - name = os.path.basename(archive.filename) - - thread.addActive(pyfile) - pyfile.setStatus("processing") - - encrypted = False - try: - try: - archive.check() - - except CRCError, e: - self.logDebug(name, e) - self.logInfo(name, _("Header protected")) - - if self.getConfig("repair"): - self.logWarning(name, _("Repairing...")) - - pyfile.setCustomStatus(_("repairing")) - pyfile.setProgress(0) - - repaired = archive.repair() - - pyfile.setProgress(100) - - if not repaired and not self.getConfig("keepbroken"): - raise CRCError("Archive damaged") - - except PasswordError: - self.logInfo(name, _("Password protected")) - encrypted = True - - except ArchiveError, e: - raise ArchiveError(e) - - self.logDebug("Password: %s" % (password or "No provided")) - - pyfile.setCustomStatus(_("extracting")) - pyfile.setProgress(0) - - if not encrypted or not self.getConfig("usepasswordfile"): - archive.extract(password) - else: - for pw in filter(None, uniqify([password] + self.getPasswords(False))): - try: - self.logDebug("Try password: %s" % pw) - - ispw = archive.isPassword(pw) - if ispw or ispw is None: - archive.extract(pw) - self.addPassword(pw) - break - - except PasswordError: - self.logDebug("Password was wrong") - else: - raise PasswordError - - pyfile.setProgress(100) - pyfile.setCustomStatus(_("finalizing")) - - if self.core.debug: - self.logDebug("Would delete: %s" % ", ".join(archive.getDeleteFiles())) - - if self.getConfig("delete"): - files = archive.getDeleteFiles() - self.logInfo(_("Deleting %s files") % len(files)) - for f in files: - file = fs_encode(f) - if os.path.exists(file): - os.remove(file) - else: - self.logDebug("%s does not exists" % f) - - self.logInfo(name, _("Extracting finished")) - - extracted_files = archive.files or archive.list() - self.manager.dispatchEvent("archive_extracted", pyfile, archive.out, archive.filename, extracted_files) - - return extracted_files - - except PasswordError: - self.logError(name, _("Wrong password" if password else "No password found")) - - except CRCError, e: - self.logError(name, _("CRC mismatch"), e) - - except ArchiveError, e: - self.logError(name, _("Archive error"), e) - - except Exception, e: - self.logError(name, _("Unknown error"), e) - if self.core.debug: - print_exc() - - finally: - pyfile.finishIfDone() - - self.manager.dispatchEvent("archive_extract_failed", pyfile) - - raise Exception(_("Extract failed")) - - - @Expose - def getPasswords(self, reload=True): - """ List of saved passwords """ - if reload: - self.reloadPasswords() - - return self.passwords - - - def reloadPasswords(self): - try: - passwords = [] - - file = fs_encode(self.getConfig("passwordfile")) - with open(file) as f: - for pw in f.read().splitlines(): - passwords.append(pw) - - except IOError, e: - self.logError(e) - - else: - self.passwords = passwords - - - @Expose - def addPassword(self, password): - """ Adds a password to saved list""" - try: - self.passwords = uniqify([password] + self.passwords) - - file = fs_encode(self.getConfig("passwordfile")) - with open(file, "wb") as f: - for pw in self.passwords: - f.write(pw + '\n') - - except IOError, e: - self.logError(e) - - - def setPermissions(self, files): - for f in files: - if not os.path.exists(f): - continue - - try: - if self.config['permission']['change_file']: - if os.path.isfile(f): - os.chmod(f, int(self.config['permission']['file'], 8)) - - elif os.path.isdir(f): - os.chmod(f, int(self.config['permission']['folder'], 8)) - - if self.config['permission']['change_dl'] and os.name != "nt": - uid = getpwnam(self.config['permission']['user'])[2] - gid = getgrnam(self.config['permission']['group'])[2] - os.chown(f, uid, gid) - - except Exception, e: - self.logWarning(_("Setting User and Group failed"), e) diff --git a/module/plugins/hooks/FastixRu.py b/module/plugins/hooks/FastixRu.py index 6373da8d9..5f339c4c0 100644 --- a/module/plugins/hooks/FastixRu.py +++ b/module/plugins/hooks/FastixRu.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from module.common.json_layer import json_loads +from pyload.utils import json_loads from module.plugins.internal.MultiHook import MultiHook diff --git a/module/plugins/hooks/HotFolder.py b/module/plugins/hooks/HotFolder.py deleted file mode 100644 index 1ff02c319..000000000 --- a/module/plugins/hooks/HotFolder.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- - -from __future__ import with_statement - -import time - -from os import listdir, makedirs -from os.path import exists, isfile, join -from shutil import move - -from module.plugins.Hook import Hook -from module.utils import fs_encode, save_join - - -class HotFolder(Hook): - __name__ = "HotFolder" - __type__ = "hook" - __version__ = "0.12" - - __config__ = [("folder", "str", "Folder to observe", "container"), - ("watch_file", "bool", "Observe link file", False), - ("keep", "bool", "Keep added containers", True), - ("file", "str", "Link file", "links.txt")] - - __description__ = """Observe folder and file for changes and add container and links""" - __license__ = "GPLv3" - __authors__ = [("RaNaN", "RaNaN@pyload.de")] - - - def setup(self): - self.interval = 10 - - - def periodical(self): - folder = fs_encode(self.getConfig("folder")) - - try: - if not exists(join(folder, "finished")): - makedirs(join(folder, "finished")) - - if self.getConfig("watch_file"): - file = fs_encode(self.getConfig("file")) - with open(file, "a+") as f: - content = f.read().strip() - - if content: - name = "%s_%s.txt" % (self.getConfig("file"), time.strftime("%H-%M-%S_%d%b%Y")) - - with open(save_join(folder, "finished", name), "wb") as f: - f.write(content) - - self.core.api.addPackage(f.name, [f.name], 1) - - for f in listdir(folder): - path = join(folder, f) - - if not isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."): - continue - - newpath = join(folder, "finished", f if self.getConfig("keep") else "tmp_" + f) - move(path, newpath) - - self.logInfo(_("Added %s from HotFolder") % f) - self.core.api.addPackage(f, [newpath], 1) - - except IOError, e: - self.logError(e) diff --git a/module/plugins/hooks/IRCInterface.py b/module/plugins/hooks/IRCInterface.py deleted file mode 100644 index 623f2d1bf..000000000 --- a/module/plugins/hooks/IRCInterface.py +++ /dev/null @@ -1,436 +0,0 @@ -# -*- coding: utf-8 -*- - -import re -import socket -import ssl -import time - -from pycurl import FORM_FILE -from select import select -from threading import Thread -from time import sleep -from traceback import print_exc - -from module.Api import PackageDoesNotExists, FileDoesNotExists -from module.network.RequestFactory import getURL -from module.plugins.Hook import Hook -from module.utils import formatSize - - -class IRCInterface(Thread, Hook): - __name__ = "IRCInterface" - __type__ = "hook" - __version__ = "0.13" - - __config__ = [("host", "str", "IRC-Server Address", "Enter your server here!"), - ("port", "int", "IRC-Server Port", 6667), - ("ident", "str", "Clients ident", "pyload-irc"), - ("realname", "str", "Realname", "pyload-irc"), - ("ssl", "bool", "Use SSL", False), - ("nick", "str", "Nickname the Client will take", "pyLoad-IRC"), - ("owner", "str", "Nickname the Client will accept commands from", "Enter your nick here!"), - ("info_file", "bool", "Inform about every file finished", False), - ("info_pack", "bool", "Inform about every package finished", True), - ("captcha", "bool", "Send captcha requests", True)] - - __description__ = """Connect to irc and let owner perform different tasks""" - __license__ = "GPLv3" - __authors__ = [("Jeix", "Jeix@hasnomail.com")] - - - def __init__(self, core, manager): - Thread.__init__(self) - Hook.__init__(self, core, manager) - self.setDaemon(True) - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def coreReady(self): - self.abort = False - self.more = [] - self.new_package = {} - - self.start() - - - def packageFinished(self, pypack): - try: - if self.getConfig("info_pack"): - self.response(_("Package finished: %s") % pypack.name) - except Exception: - pass - - - def downloadFinished(self, pyfile): - try: - if self.getConfig("info_file"): - self.response( - _("Download finished: %(name)s @ %(plugin)s ") % {"name": pyfile.name, "plugin": pyfile.pluginname}) - except Exception: - pass - - - def newCaptchaTask(self, task): - if self.getConfig("captcha") and task.isTextual(): - task.handler.append(self) - task.setWaiting(60) - - html = getURL("http://www.freeimagehosting.net/upload.php", - post={"attached": (FORM_FILE, task.captchaFile)}, multipart=True) - - url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", html).group(1) - self.response(_("New Captcha Request: %s") % url) - self.response(_("Answer with 'c %s text on the captcha'") % task.id) - - - def run(self): - # connect to IRC etc. - self.sock = socket.socket() - host = self.getConfig("host") - self.sock.connect((host, self.getConfig("port"))) - - if self.getConfig("ssl"): - self.sock = ssl.wrap_socket(self.sock, cert_reqs=ssl.CERT_NONE) #@TODO: support certificate - - nick = self.getConfig("nick") - self.sock.send("NICK %s\r\n" % nick) - self.sock.send("USER %s %s bla :%s\r\n" % (nick, host, nick)) - for t in self.getConfig("owner").split(): - if t.strip().startswith("#"): - self.sock.send("JOIN %s\r\n" % t.strip()) - self.logInfo(_("Connected to"), host) - self.logInfo(_("Switching to listening mode!")) - try: - self.main_loop() - - except IRCError, ex: - self.sock.send("QUIT :byebye\r\n") - print_exc() - self.sock.close() - - - def main_loop(self): - readbuffer = "" - while True: - sleep(1) - fdset = select([self.sock], [], [], 0) - if self.sock not in fdset[0]: - continue - - if self.abort: - raise IRCError("quit") - - readbuffer += self.sock.recv(1024) - temp = readbuffer.split("\n") - readbuffer = temp.pop() - - for line in temp: - line = line.rstrip() - first = line.split() - - if first[0] == "PING": - self.sock.send("PONG %s\r\n" % first[1]) - - if first[0] == "ERROR": - raise IRCError(line) - - msg = line.split(None, 3) - if len(msg) < 4: - continue - - msg = { - "origin": msg[0][1:], - "action": msg[1], - "target": msg[2], - "text": msg[3][1:] - } - - self.handle_events(msg) - - - def handle_events(self, msg): - if not msg['origin'].split("!", 1)[0] in self.getConfig("owner").split(): - return - - if msg['target'].split("!", 1)[0] != self.getConfig("nick"): - return - - if msg['action'] != "PRIVMSG": - return - - # HANDLE CTCP ANTI FLOOD/BOT PROTECTION - if msg['text'] == "\x01VERSION\x01": - self.logDebug("Sending CTCP VERSION") - self.sock.send("NOTICE %s :%s\r\n" % (msg['origin'], "pyLoad! IRC Interface")) - return - elif msg['text'] == "\x01TIME\x01": - self.logDebug("Sending CTCP TIME") - self.sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time())) - return - elif msg['text'] == "\x01LAG\x01": - self.logDebug("Received CTCP LAG") #: don't know how to answer - return - - trigger = "pass" - args = None - - try: - temp = msg['text'].split() - trigger = temp[0] - if len(temp) > 1: - args = temp[1:] - except Exception: - pass - - handler = getattr(self, "event_%s" % trigger, self.event_pass) - try: - res = handler(args) - for line in res: - self.response(line, msg['origin']) - except Exception, e: - self.logError(e) - - - def response(self, msg, origin=""): - if origin == "": - for t in self.getConfig("owner").split(): - self.sock.send("PRIVMSG %s :%s\r\n" % (t.strip(), msg)) - else: - self.sock.send("PRIVMSG %s :%s\r\n" % (origin.split("!", 1)[0], msg)) - - - #### Events - - def event_pass(self, args): - return [] - - - def event_status(self, args): - downloads = self.core.api.statusDownloads() - if not downloads: - return ["INFO: There are no active downloads currently."] - - temp_progress = "" - lines = ["ID - Name - Status - Speed - ETA - Progress"] - for data in downloads: - - if data.status == 5: - temp_progress = data.format_wait - else: - temp_progress = "%d%% (%s)" % (data.percent, data.format_size) - - lines.append("#%d - %s - %s - %s - %s - %s" % - ( - data.fid, - data.name, - data.statusmsg, - "%s/s" % formatSize(data.speed), - "%s" % data.format_eta, - temp_progress - )) - return lines - - - def event_queue(self, args): - ps = self.core.api.getQueueData() - - if not ps: - return ["INFO: There are no packages in queue."] - - lines = [] - for pack in ps: - lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links))) - - return lines - - - def event_collector(self, args): - ps = self.core.api.getCollectorData() - if not ps: - return ["INFO: No packages in collector!"] - - lines = [] - for pack in ps: - lines.append('PACKAGE #%s: "%s" with %d links.' % (pack.pid, pack.name, len(pack.links))) - - return lines - - - def event_info(self, args): - if not args: - return ["ERROR: Use info like this: info <id>"] - - info = None - try: - info = self.core.api.getFileData(int(args[0])) - - except FileDoesNotExists: - return ["ERROR: Link doesn't exists."] - - return ['LINK #%s: %s (%s) [%s][%s]' % (info.fid, info.name, info.format_size, info.statusmsg, info.plugin)] - - - def event_packinfo(self, args): - if not args: - return ["ERROR: Use packinfo like this: packinfo <id>"] - - lines = [] - pack = None - try: - pack = self.core.api.getPackageData(int(args[0])) - - except PackageDoesNotExists: - return ["ERROR: Package doesn't exists."] - - id = args[0] - - self.more = [] - - lines.append('PACKAGE #%s: "%s" with %d links' % (id, pack.name, len(pack.links))) - for pyfile in pack.links: - self.more.append('LINK #%s: %s (%s) [%s][%s]' % (pyfile.fid, pyfile.name, pyfile.format_size, - pyfile.statusmsg, pyfile.plugin)) - - if len(self.more) < 6: - lines.extend(self.more) - self.more = [] - else: - lines.extend(self.more[:6]) - self.more = self.more[6:] - lines.append("%d more links do display." % len(self.more)) - - return lines - - - def event_more(self, args): - if not self.more: - return ["No more information to display."] - - lines = self.more[:6] - self.more = self.more[6:] - lines.append("%d more links do display." % len(self.more)) - - return lines - - - def event_start(self, args): - self.core.api.unpauseServer() - return ["INFO: Starting downloads."] - - - def event_stop(self, args): - self.core.api.pauseServer() - return ["INFO: No new downloads will be started."] - - - def event_add(self, args): - if len(args) < 2: - return ['ERROR: Add links like this: "add <packagename|id> links". ', - "This will add the link <link> to to the package <package> / the package with id <id>!"] - - pack = args[0].strip() - links = [x.strip() for x in args[1:]] - - count_added = 0 - count_failed = 0 - try: - id = int(pack) - pack = self.core.api.getPackageData(id) - if not pack: - return ["ERROR: Package doesn't exists."] - - #TODO add links - - return ["INFO: Added %d links to Package %s [#%d]" % (len(links), pack['name'], id)] - - except Exception: - # create new package - id = self.core.api.addPackage(pack, links, 1) - return ["INFO: Created new Package %s [#%d] with %d links." % (pack, id, len(links))] - - - def event_del(self, args): - if len(args) < 2: - return ["ERROR: Use del command like this: del -p|-l <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"] - - if args[0] == "-p": - ret = self.core.api.deletePackages(map(int, args[1:])) - return ["INFO: Deleted %d packages!" % len(args[1:])] - - elif args[0] == "-l": - ret = self.core.api.delLinks(map(int, args[1:])) - return ["INFO: Deleted %d links!" % len(args[1:])] - - else: - return ["ERROR: Use del command like this: del <-p|-l> <id> [...] (-p indicates that the ids are from packages, -l indicates that the ids are from links)"] - - - def event_push(self, args): - if not args: - return ["ERROR: Push package to queue like this: push <package id>"] - - id = int(args[0]) - try: - info = self.core.api.getPackageInfo(id) - except PackageDoesNotExists: - return ["ERROR: Package #%d does not exist." % id] - - self.core.api.pushToQueue(id) - return ["INFO: Pushed package #%d to queue." % id] - - - def event_pull(self, args): - if not args: - return ["ERROR: Pull package from queue like this: pull <package id>."] - - id = int(args[0]) - if not self.core.api.getPackageData(id): - return ["ERROR: Package #%d does not exist." % id] - - self.core.api.pullFromQueue(id) - return ["INFO: Pulled package #%d from queue to collector." % id] - - - def event_c(self, args): - """ captcha answer """ - if not args: - return ["ERROR: Captcha ID missing."] - - task = self.core.captchaManager.getTaskByID(args[0]) - if not task: - return ["ERROR: Captcha Task with ID %s does not exists." % args[0]] - - task.setResult(" ".join(args[1:])) - return ["INFO: Result %s saved." % " ".join(args[1:])] - - - def event_help(self, args): - lines = ["The following commands are available:", - "add <package|packid> <links> [...] Adds link to package. (creates new package if it does not exist)", - "queue Shows all packages in the queue", - "collector Shows all packages in collector", - "del -p|-l <id> [...] Deletes all packages|links with the ids specified", - "info <id> Shows info of the link with id <id>", - "packinfo <id> Shows info of the package with id <id>", - "more Shows more info when the result was truncated", - "start Starts all downloads", - "stop Stops the download (but not abort active downloads)", - "push <id> Push package to queue", - "pull <id> Pull package from queue", - "status Show general download status", - "help Shows this help message"] - return lines - - -class IRCError(Exception): - - def __init__(self, value): - self.value = value - - - def __str__(self): - return repr(self.value) diff --git a/module/plugins/hooks/ImageTyperz.py b/module/plugins/hooks/ImageTyperz.py index d62fed385..8bf326f4d 100644 --- a/module/plugins/hooks/ImageTyperz.py +++ b/module/plugins/hooks/ImageTyperz.py @@ -29,14 +29,14 @@ class ImageTyperzException(Exception): return "<ImageTyperzException %s>" % self.err -class ImageTyperz(Hook): +class ImageTyperz(Addon): __name__ = "ImageTyperz" __type__ = "hook" __version__ = "0.06" __config__ = [("username", "str", "Username", ""), - ("passkey", "password", "Password", ""), - ("force", "bool", "Force IT even if client is connected", False)] + ("passkey", "password", "Password", ""), + ("force", "bool", "Force IT even if client is connected", False)] __description__ = """Send captchas to ImageTyperz.com""" __license__ = "GPLv3" @@ -49,15 +49,6 @@ class ImageTyperz(Hook): GETCREDITS_URL = "http://captchatypers.com/Forms/RequestBalance.ashx" - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.info = {} #@TODO: Remove in 0.4.10 - - def getCredits(self): res = getURL(self.GETCREDITS_URL, post={'action': "REQUESTBALANCE", @@ -112,7 +103,7 @@ class ImageTyperz(Hook): return ticket, result - def newCaptchaTask(self, task): + def captchaTask(self, task): if "service" in task.data: return False diff --git a/module/plugins/hooks/LinksnappyCom.py b/module/plugins/hooks/LinksnappyCom.py index 5eb0c7f6d..7e607a3d3 100644 --- a/module/plugins/hooks/LinksnappyCom.py +++ b/module/plugins/hooks/LinksnappyCom.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from module.common.json_layer import json_loads +from pyload.utils import json_loads from module.plugins.internal.MultiHook import MultiHook diff --git a/module/plugins/hooks/MegaDebridEu.py b/module/plugins/hooks/MegaDebridEu.py index f67fa7ac0..0711b3546 100644 --- a/module/plugins/hooks/MegaDebridEu.py +++ b/module/plugins/hooks/MegaDebridEu.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from module.common.json_layer import json_loads +from pyload.utils import json_loads from module.plugins.internal.MultiHook import MultiHook diff --git a/module/plugins/hooks/MergeFiles.py b/module/plugins/hooks/MergeFiles.py deleted file mode 100644 index 9f1348485..000000000 --- a/module/plugins/hooks/MergeFiles.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- - -from __future__ import with_statement - -import os -import re - -from traceback import print_exc - -from module.plugins.Hook import Hook, threaded -from module.utils import save_join - - -class MergeFiles(Hook): - __name__ = "MergeFiles" - __type__ = "hook" - __version__ = "0.14" - - __config__ = [("activated", "bool", "Activated", True)] - - __description__ = """Merges parts splitted with hjsplit""" - __license__ = "GPLv3" - __authors__ = [("and9000", "me@has-no-mail.com")] - - - BUFFER_SIZE = 4096 - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - # nothing to do - pass - - - @threaded - def packageFinished(self, pack): - files = {} - fid_dict = {} - for fid, data in pack.getChildren().iteritems(): - if re.search("\.\d{3}$", data['name']): - if data['name'][:-4] not in files: - files[data['name'][:-4]] = [] - files[data['name'][:-4]].append(data['name']) - files[data['name'][:-4]].sort() - fid_dict[data['name']] = fid - - download_folder = self.config['general']['download_folder'] - - if self.config['general']['folder_per_package']: - download_folder = save_join(download_folder, pack.folder) - - for name, file_list in files.iteritems(): - self.logInfo(_("Starting merging of"), name) - - with open(save_join(download_folder, name), "wb") as final_file: - for splitted_file in file_list: - self.logDebug("Merging part", splitted_file) - - pyfile = self.core.files.getFile(fid_dict[splitted_file]) - - pyfile.setStatus("processing") - - try: - with open(save_join(download_folder, splitted_file), "rb") as s_file: - size_written = 0 - s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file))) - while True: - f_buffer = s_file.read(self.BUFFER_SIZE) - if f_buffer: - final_file.write(f_buffer) - size_written += self.BUFFER_SIZE - pyfile.setProgress((size_written * 100) / s_file_size) - else: - break - self.logDebug("Finished merging part", splitted_file) - - except Exception, e: - print_exc() - - finally: - pyfile.setProgress(100) - pyfile.setStatus("finished") - pyfile.release() - - self.logInfo(_("Finished merging of"), name) diff --git a/module/plugins/hooks/MultiHome.py b/module/plugins/hooks/MultiHome.py deleted file mode 100644 index c9f6fc30c..000000000 --- a/module/plugins/hooks/MultiHome.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- - -from time import time - -from module.plugins.Hook import Hook - - -class MultiHome(Hook): - __name__ = "MultiHome" - __type__ = "hook" - __version__ = "0.12" - - __config__ = [("interfaces", "str", "Interfaces", "None")] - - __description__ = """Ip address changer""" - __license__ = "GPLv3" - __authors__ = [("mkaay", "mkaay@mkaay.de")] - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def setup(self): - self.register = {} - self.interfaces = [] - self.parseInterfaces(self.getConfig("interfaces").split(";")) - if not self.interfaces: - self.parseInterfaces([self.config['download']['interface']]) - self.setConfig("interfaces", self.toConfig()) - - - def toConfig(self): - return ";".join(i.adress for i in self.interfaces) - - - def parseInterfaces(self, interfaces): - for interface in interfaces: - if not interface or str(interface).lower() == "none": - continue - self.interfaces.append(Interface(interface)) - - - def coreReady(self): - requestFactory = self.core.requestFactory - oldGetRequest = requestFactory.getRequest - - - def getRequest(pluginName, account=None): - iface = self.bestInterface(pluginName, account) - if iface: - iface.useFor(pluginName, account) - requestFactory.iface = lambda: iface.adress - self.logDebug("Using address", iface.adress) - return oldGetRequest(pluginName, account) - - requestFactory.getRequest = getRequest - - - def bestInterface(self, pluginName, account): - best = None - for interface in self.interfaces: - if not best or interface.lastPluginAccess(pluginName, account) < best.lastPluginAccess(pluginName, account): - best = interface - return best - - -class Interface(object): - - def __init__(self, adress): - self.adress = adress - self.history = {} - - - def lastPluginAccess(self, pluginName, account): - if (pluginName, account) in self.history: - return self.history[(pluginName, account)] - return 0 - - - def useFor(self, pluginName, account): - self.history[(pluginName, account)] = time() - - - def __repr__(self): - return "<Interface - %s>" % self.adress diff --git a/module/plugins/hooks/MyfastfileCom.py b/module/plugins/hooks/MyfastfileCom.py index 86408cb6d..a1b5aaba0 100644 --- a/module/plugins/hooks/MyfastfileCom.py +++ b/module/plugins/hooks/MyfastfileCom.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -from module.common.json_layer import json_loads from module.plugins.internal.MultiHook import MultiHook +from pyload.utils import json_loads class MyfastfileCom(MultiHook): diff --git a/module/plugins/hooks/PremiumizeMe.py b/module/plugins/hooks/PremiumizeMe.py index 293fcf339..18b6187c4 100644 --- a/module/plugins/hooks/PremiumizeMe.py +++ b/module/plugins/hooks/PremiumizeMe.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from module.common.json_layer import json_loads +from pyload.utils import json_loads from module.plugins.internal.MultiHook import MultiHook diff --git a/module/plugins/hooks/RPNetBiz.py b/module/plugins/hooks/RPNetBiz.py index 0768bd6cd..2277b10ab 100644 --- a/module/plugins/hooks/RPNetBiz.py +++ b/module/plugins/hooks/RPNetBiz.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from module.common.json_layer import json_loads +from pyload.utils import json_loads from module.plugins.internal.MultiHook import MultiHook diff --git a/module/plugins/hooks/RestartFailed.py b/module/plugins/hooks/RestartFailed.py deleted file mode 100644 index 07fb80967..000000000 --- a/module/plugins/hooks/RestartFailed.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- - -from module.plugins.Hook import Hook - - -class RestartFailed(Hook): - __name__ = "RestartFailed" - __type__ = "hook" - __version__ = "1.57" - - __config__ = [("interval", "int", "Check interval in minutes", 90)] - - __description__ = """Periodically restart all failed downloads in queue""" - __license__ = "GPLv3" - __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] - - - # event_list = ["pluginConfigChanged"] - - MIN_INTERVAL = 15 * 60 #: 15m minimum check interval (value is in seconds) - - - def pluginConfigChanged(self, plugin, name, value): - if name == "interval": - interval = value * 60 - if self.MIN_INTERVAL <= interval != self.interval: - self.core.scheduler.removeJob(self.cb) - self.interval = interval - self.initPeriodical() - else: - self.logDebug("Invalid interval value, kept current") - - - def periodical(self): - self.logDebug(_("Restart failed downloads")) - self.core.api.restartFailed() - - - def setup(self): - self.interval = self.MIN_INTERVAL - - - def coreReady(self): - self.pluginConfigChanged(self.__name__, "interval", self.getConfig("interval")) diff --git a/module/plugins/hooks/RestartSlow.py b/module/plugins/hooks/RestartSlow.py deleted file mode 100644 index c3e1e5468..000000000 --- a/module/plugins/hooks/RestartSlow.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- - -import pycurl - -from module.plugins.Hook import Hook - - -class RestartSlow(Hook): - __name__ = "RestartSlow" - __type__ = "hook" - __version__ = "0.04" - - __config__ = [("free_limit" , "int" , "Transfer speed threshold in kilobytes" , 100 ), - ("free_time" , "int" , "Sample interval in minutes" , 5 ), - ("premium_limit", "int" , "Transfer speed threshold for premium download in kilobytes", 300 ), - ("premium_time" , "int" , "Sample interval for premium download in minutes" , 2 ), - ("safe_mode" , "bool", "Don't restart if download is not resumable" , True)] - - __description__ = """Restart slow downloads""" - __license__ = "GPLv3" - __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] - - - event_list = ["downloadStarts"] - - - def setup(self): - self.info = {'chunk': {}} - - - def initPeriodical(self): - pass - - - def periodical(self): - if not self.pyfile.plugin.req.dl: - return - - if self.getConfig("safe_mode") and not self.pyfile.plugin.resumeDownload: - time = 30 - limit = 5 - else: - type = "premium" if self.pyfile.plugin.premium else "free" - time = max(30, self.getConfig("%s_time" % type) * 60) - limit = max(5, self.getConfig("%s_limit" % type) * 1024) - - chunks = [chunk for chunk in self.pyfile.plugin.req.dl.chunks \ - if chunk.id not in self.info['chunk'] or self.info['chunk'][chunk.id] is not (time, limit)] - - for chunk in chunks: - chunk.c.setopt(pycurl.LOW_SPEED_TIME , time) - chunk.c.setopt(pycurl.LOW_SPEED_LIMIT, limit) - - self.info['chunk'][chunk.id] = (time, limit) - - - def downloadStarts(self, pyfile, url, filename): - if self.cb or (self.getConfig("safe_mode") and not pyfile.plugin.resumeDownload): - return - self.pyfile = pyfile - super(RestartSlow, self).initPeriodical() diff --git a/module/plugins/hooks/SimplyPremiumCom.py b/module/plugins/hooks/SimplyPremiumCom.py index 843a3aa82..e51cbd1bc 100644 --- a/module/plugins/hooks/SimplyPremiumCom.py +++ b/module/plugins/hooks/SimplyPremiumCom.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from module.common.json_layer import json_loads +from pyload.utils import json_loads from module.plugins.internal.MultiHook import MultiHook diff --git a/module/plugins/hooks/SkipRev.py b/module/plugins/hooks/SkipRev.py deleted file mode 100644 index 0bbdec3b2..000000000 --- a/module/plugins/hooks/SkipRev.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- - -from types import MethodType -from urllib import unquote -from urlparse import urlparse - -from module.PyFile import PyFile -from module.plugins.Hook import Hook -from module.plugins.Plugin import SkipDownload - - -def _setup(self): - self.pyfile.plugin._setup() - if self.pyfile.hasStatus("skipped"): - raise SkipDownload(self.pyfile.statusname or self.pyfile.pluginname) - - -class SkipRev(Hook): - __name__ = "SkipRev" - __type__ = "hook" - __version__ = "0.25" - - __config__ = [("tokeep", "int", "Number of rev files to keep for package (-1 to auto)", -1)] - - __description__ = """Skip files ending with extension rev""" - __license__ = "GPLv3" - __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def _pyname(self, pyfile): - if hasattr(pyfile.pluginmodule, "getInfo"): - return getattr(pyfile.pluginmodule, "getInfo")([pyfile.url]).next()[0] - else: - self.logWarning("Unable to grab file name") - return urlparse(unquote(pyfile.url)).path.split('/')[-1] - - - def _pyfile(self, link): - return PyFile(self.core.files, - link.fid, - link.url, - link.name, - link.size, - link.status, - link.error, - link.plugin, - link.packageID, - link.order) - - - def downloadPreparing(self, pyfile): - if pyfile.statusname is "unskipped" or not self._pyname(pyfile).endswith(".rev"): - return - - tokeep = self.getConfig("tokeep") - - if tokeep: - status_list = (1, 4, 8, 9, 14) if tokeep < 0 else (1, 3, 4, 8, 9, 14) - - queued = [True for link in self.core.api.getPackageData(pyfile.package().id).links \ - if link.name.endswith(".rev") and link.status not in status_list].count(True) - - if not queued or queued < tokeep: #: keep one rev at least in auto mode - return - - pyfile.setCustomStatus("SkipRev", "skipped") - pyfile.plugin._setup = pyfile.plugin.setup - pyfile.plugin.setup = MethodType(_setup, pyfile.plugin) #: work-around: inject status checker inside the preprocessing routine of the plugin - - - def downloadFailed(self, pyfile): - #: Check if pyfile is still "failed", - # maybe might has been restarted in meantime - if pyfile.status != 8: - return - - tokeep = self.getConfig("tokeep") - - if not tokeep: - return - - for link in self.core.api.getPackageData(pyfile.package().id).links: - if link.status is 4 and link.name.endswith(".rev"): - pylink = self._pyfile(link) - - if tokeep > -1 or pyfile.name.endswith(".rev"): - pylink.setStatus("queued") - else: - pylink.setCustomStatus("unskipped", "queued") - - self.core.files.save() - pylink.release() - return diff --git a/module/plugins/hooks/UnSkipOnFail.py b/module/plugins/hooks/UnSkipOnFail.py deleted file mode 100644 index 1becb937a..000000000 --- a/module/plugins/hooks/UnSkipOnFail.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- - -from module.PyFile import PyFile -from module.plugins.Hook import Hook - - -class UnSkipOnFail(Hook): - __name__ = "UnSkipOnFail" - __type__ = "hook" - __version__ = "0.05" - - __config__ = [("activated", "bool", "Activated", True)] - - __description__ = """Queue skipped duplicates when download fails""" - __license__ = "GPLv3" - __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] - - - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def downloadFailed(self, pyfile): - #: Check if pyfile is still "failed", - # maybe might has been restarted in meantime - if pyfile.status != 8: - return - - msg = _("Looking for skipped duplicates of: %s (pid:%s)") - self.logInfo(msg % (pyfile.name, pyfile.package().id)) - - dup = self.findDuplicate(pyfile) - if dup: - self.logInfo(_("Queue found duplicate: %s (pid:%s)") % (dup.name, dup.packageID)) - - #: Change status of "link" to "new_status". - # "link" has to be a valid FileData object, - # "new_status" has to be a valid status name - # (i.e. "queued" for this Plugin) - # It creates a temporary PyFile object using - # "link" data, changes its status, and tells - # the core.files-manager to save its data. - pylink = _pyfile(link) - - pylink.setCustomStatus("UnSkipOnFail", "queued") - - self.core.files.save() - pylink.release() - - else: - self.logInfo(_("No duplicates found")) - - - def findDuplicate(self, pyfile): - """ Search all packages for duplicate links to "pyfile". - Duplicates are links that would overwrite "pyfile". - To test on duplicity the package-folder and link-name - of twolinks are compared (link.name). - So this method returns a list of all links with equal - package-folders and filenames as "pyfile", but except - the data for "pyfile" iotselöf. - It does MOT check the link's status. - """ - queue = self.core.api.getQueue() #: get packages (w/o files, as most file data is useless here) - - for package in queue: - #: check if package-folder equals pyfile's package folder - if package.folder != pyfile.package().folder: - continue - - #: now get packaged data w/ files/links - pdata = self.core.api.getPackageData(package.pid) - for link in pdata.links: - #: check if link is "skipped" - if link.status != 4: - continue - - #: check if link name collides with pdata's name - #: AND at last check if it is not pyfile itself - if link.name == pyfile.name and link.fid != pyfile.id: - return link - - - def _pyfile(self, link): - return PyFile(self.core.files, - link.fid, - link.url, - link.name, - link.size, - link.status, - link.error, - link.plugin, - link.packageID, - link.order) diff --git a/module/plugins/hooks/UnrestrictLi.py b/module/plugins/hooks/UnrestrictLi.py index cb5abb26e..b4c547883 100644 --- a/module/plugins/hooks/UnrestrictLi.py +++ b/module/plugins/hooks/UnrestrictLi.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from module.common.json_layer import json_loads +from pyload.utils import json_loads from module.plugins.internal.MultiHook import MultiHook diff --git a/module/plugins/hooks/UpdateManager.py b/module/plugins/hooks/UpdateManager.py deleted file mode 100644 index b6a8bac7c..000000000 --- a/module/plugins/hooks/UpdateManager.py +++ /dev/null @@ -1,310 +0,0 @@ -# -*- coding: utf-8 -*- - -from __future__ import with_statement - -import re -import sys - -from operator import itemgetter -from os import path, remove, stat - -from module.network.RequestFactory import getURL -from module.plugins.Hook import Expose, Hook, threaded -from module.utils import save_join - - -class UpdateManager(Hook): - __name__ = "UpdateManager" - __type__ = "hook" - __version__ = "0.43" - - __config__ = [("activated" , "bool" , "Activated" , True ), - ("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"), - ("interval" , "int" , "Check interval in hours" , 8 ), - ("autorestart" , "bool" , "Automatically restart pyLoad when required" , True ), - ("reloadplugins", "bool" , "Monitor plugins for code changes in debug mode", True ), - ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , False )] - - __description__ = """ Check for updates """ - __license__ = "GPLv3" - __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] - - - # event_list = ["pluginConfigChanged"] - - SERVER_URL = "http://updatemanager.pyload.org" - VERSION = re.compile(r'__version__.*=.*("|\')([\d.]+)') - MIN_INTERVAL = 3 * 60 * 60 #: 3h minimum check interval (value is in seconds) - - - def pluginConfigChanged(self, plugin, name, value): - if name == "interval": - interval = value * 60 * 60 - if self.MIN_INTERVAL <= interval != self.interval: - self.core.scheduler.removeJob(self.cb) - self.interval = interval - self.initPeriodical() - else: - self.logDebug("Invalid interval value, kept current") - - elif name == "reloadplugins": - if self.cb2: - self.core.scheduler.removeJob(self.cb2) - if value is True and self.core.debug: - self.periodical2() - - - def coreReady(self): - self.pluginConfigChanged(self.__name__, "interval", self.getConfig("interval")) - x = lambda: self.pluginConfigChanged(self.__name__, "reloadplugins", self.getConfig("reloadplugins")) - self.core.scheduler.addJob(10, x, threaded=False) - - - def unload(self): - self.pluginConfigChanged(self.__name__, "reloadplugins", False) - - - def setup(self): - self.cb2 = None - self.interval = self.MIN_INTERVAL - self.updating = False - self.info = {'pyload': False, 'version': None, 'plugins': False} - self.mtimes = {} #: store modification time for each plugin - - - def periodical2(self): - if not self.updating: - self.autoreloadPlugins() - - self.cb2 = self.core.scheduler.addJob(4, self.periodical2, threaded=False) - - - @Expose - def autoreloadPlugins(self): - """ reload and reindex all modified plugins """ - modules = filter( - lambda m: m and (m.__name__.startswith("module.plugins.") or - m.__name__.startswith("userplugins.")) and - m.__name__.count(".") >= 2, sys.modules.itervalues() - ) - - reloads = [] - - for m in modules: - root, type, name = m.__name__.rsplit(".", 2) - id = (type, name) - if type in self.core.pluginManager.plugins: - f = m.__file__.replace(".pyc", ".py") - if not path.isfile(f): - continue - - mtime = stat(f).st_mtime - - if id not in self.mtimes: - self.mtimes[id] = mtime - elif self.mtimes[id] < mtime: - reloads.append(id) - self.mtimes[id] = mtime - - return True if self.core.pluginManager.reloadPlugins(reloads) else False - - - def periodical(self): - if not self.info['pyload'] and not (self.getConfig("nodebugupdate") and self.core.debug): - self.updateThread() - - - def server_request(self): - try: - return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines() - except Exception: - self.logWarning(_("Unable to contact server to get updates")) - - - @threaded - def updateThread(self): - self.updating = True - - status = self.update(onlyplugin=self.getConfig("mode") == "plugins only") - - if status is 2 and self.getConfig("autorestart"): - self.core.api.restart() - else: - self.updating = False - - - @Expose - def updatePlugins(self): - """ simple wrapper for calling plugin update quickly """ - return self.update(onlyplugin=True) - - - @Expose - def update(self, onlyplugin=False): - """ check for updates """ - data = self.server_request() - - if not data: - exitcode = 0 - - elif data[0] == "None": - self.logInfo(_("No new pyLoad version available")) - updates = data[1:] - exitcode = self._updatePlugins(updates) - - elif onlyplugin: - exitcode = 0 - - else: - newversion = data[0] - self.logInfo(_("*** New pyLoad Version %s available ***") % newversion) - self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***")) - exitcode = 3 - self.info['pyload'] = True - self.info['version'] = newversion - - return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required; 3 = No plugins updated, new pyLoad version available - - - def _updatePlugins(self, updates): - """ check for plugin updates """ - - if self.info['plugins']: - return False #: plugins were already updated - - exitcode = 0 - updated = [] - - url = updates[0] - schema = updates[1].split('|') - - if "BLACKLIST" in updates: - blacklist = updates[updates.index('BLACKLIST') + 1:] - updates = updates[2:updates.index('BLACKLIST')] - else: - blacklist = None - updates = updates[2:] - - upgradable = [dict(zip(schema, x.split('|'))) for x in updates] - blacklisted = [(x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]) for x in blacklist] if blacklist else [] - - if blacklist: - # Protect UpdateManager from self-removing - try: - blacklisted.remove(("hook", "UpdateManager")) - except Exception: - pass - - for t, n in blacklisted: - for idx, plugin in enumerate(upgradable): - if n == plugin['name'] and t == plugin['type']: - upgradable.pop(idx) - break - - for t, n in self.removePlugins(sorted(blacklisted)): - self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % { - 'type': t, - 'name': n, - }) - - for plugin in sorted(upgradable, key=itemgetter("type", "name")): - filename = plugin['name'] - prefix = plugin['type'] - version = plugin['version'] - - if filename.endswith(".pyc"): - name = filename[:filename.find("_")] - else: - name = filename.replace(".py", "") - - #@TODO: obsolete after 0.4.10 - if prefix.endswith("s"): - type = prefix[:-1] - else: - type = prefix - - plugins = getattr(self.core.pluginManager, "%sPlugins" % type) - - oldver = float(plugins[name]['v']) if name in plugins else None - newver = float(version) - - if not oldver: - msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)" - elif newver > oldver: - msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)" - else: - continue - - self.logInfo(_(msg) % {'type' : type, - 'name' : name, - 'oldver': oldver, - 'newver': newver}) - try: - content = getURL(url % plugin) - m = self.VERSION.search(content) - - if m and m.group(2) == version: - with open(save_join("userplugins", prefix, filename), "wb") as f: - f.write(content) - - updated.append((prefix, name)) - else: - raise Exception, _("Version mismatch") - - except Exception, e: - self.logError(_("Error updating plugin: %s") % filename, str(e)) - - if updated: - reloaded = self.core.pluginManager.reloadPlugins(updated) - if reloaded: - self.logInfo(_("Plugins updated and reloaded")) - exitcode = 1 - else: - self.logInfo(_("*** Plugins have been updated, but need a pyLoad restart to be reloaded ***")) - self.info['plugins'] = True - exitcode = 2 - else: - self.logInfo(_("No plugin updates available")) - - return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required - - - @Expose - def removePlugins(self, type_plugins): - """ delete plugins from disk """ - - if not type_plugins: - return - - self.logDebug("Requested deletion of plugins: %s" % type_plugins) - - removed = [] - - for type, name in type_plugins: - err = False - file = name + ".py" - - for root in ("userplugins", path.join(pypath, "module", "plugins")): - - filename = save_join(root, type, file) - try: - remove(filename) - except Exception, e: - self.logDebug("Error removing: %s" % path.basename(filename), str(e)) - err = True - - filename += "c" - if path.isfile(filename): - try: - if type == "hook": - self.manager.deactivateHook(name) - remove(filename) - except Exception, e: - self.logDebug("Error removing: %s" % path.basename(filename), str(e)) - err = True - - if not err: - id = (type, name) - removed.append(id) - - return removed #: return a list of the plugins successfully removed diff --git a/module/plugins/hooks/XFileSharingPro.py b/module/plugins/hooks/XFileSharingPro.py index a1ee7f5ef..78e14c179 100644 --- a/module/plugins/hooks/XFileSharingPro.py +++ b/module/plugins/hooks/XFileSharingPro.py @@ -2,10 +2,10 @@ import re -from module.plugins.Hook import Hook +from pyload.plugin.Addon import Addon -class XFileSharingPro(Hook): +class XFileSharingPro(Addon): __name__ = "XFileSharingPro" __type__ = "hook" __version__ = "0.31" @@ -45,20 +45,14 @@ class XFileSharingPro(Hook): # self.loadPattern() - #@TODO: Remove in 0.4.10 - def initPeriodical(self): - pass - - - def coreReady(self): + def activate(self): self.loadPattern() def loadPattern(self): use_builtin_list = self.getConfig('use_builtin_list') - for type, plugin in (("hoster", "XFileSharingPro"), - ("crypter", "XFileSharingProFolder")): + for type in ("hoster", "crypter"): every_plugin = not self.getConfig("use_%s_list" % type) if every_plugin: @@ -75,7 +69,7 @@ class XFileSharingPro(Hook): if not plugin_set: self.logInfo(_("No %s to handle") % type) - self._unload(type, plugin) + self._unload(type) return match_list = '|'.join(sorted(plugin_set)) @@ -88,39 +82,23 @@ class XFileSharingPro(Hook): pattern = self.regexp[type][1] % match_list.replace('.', '\.') - dict = self.core.pluginManager.plugins[type][plugin] + dict = self.core.pluginManager.plugins[type]["XFileSharingPro"] dict['pattern'] = pattern dict['re'] = re.compile(pattern) self.logDebug("Loaded %s pattern: %s" % (type, pattern)) - def _unload(self, type, plugin): - dict = self.core.pluginManager.plugins[type][plugin] + def _unload(self, type): + dict = self.core.pluginManager.plugins[type]["XFileSharingPro"] dict['pattern'] = r'^unmatchable$' dict['re'] = re.compile(dict['pattern']) - def unload(self): + def deactivate(self): # self.unloadHoster("BasePlugin") - for type, plugin in (("hoster", "XFileSharingPro"), - ("crypter", "XFileSharingProFolder")): - self._unload(type, plugin) - - - def unloadHoster(self, hoster): - hdict = self.core.pluginManager.hosterPlugins[hoster] - if "new_name" in hdict and hdict['new_name'] == "XFileSharingPro": - if "module" in hdict: - hdict.pop('module', None) - - if "new_module" in hdict: - hdict.pop('new_module', None) - hdict.pop('new_name', None) - - return True - else: - return False + for type in ("hoster", "crypter"): + self._unload(type, "XFileSharingPro") # def downloadFailed(self, pyfile): diff --git a/module/plugins/hooks/XMPPInterface.py b/module/plugins/hooks/XMPPInterface.py deleted file mode 100644 index b8e9fc1ad..000000000 --- a/module/plugins/hooks/XMPPInterface.py +++ /dev/null @@ -1,252 +0,0 @@ -# -*- coding: utf-8 -*- - -from pyxmpp import streamtls -from pyxmpp.all import JID, Message -from pyxmpp.interface import implements -from pyxmpp.interfaces import * -from pyxmpp.jabber.client import JabberClient - -from module.plugins.hooks.IRCInterface import IRCInterface - - -class XMPPInterface(IRCInterface, JabberClient): - __name__ = "XMPPInterface" - __type__ = "hook" - __version__ = "0.11" - - __config__ = [("jid", "str", "Jabber ID", "user@exmaple-jabber-server.org"), - ("pw", "str", "Password", ""), - ("tls", "bool", "Use TLS", False), - ("owners", "str", "List of JIDs accepting commands from", "me@icq-gateway.org;some@msn-gateway.org"), - ("info_file", "bool", "Inform about every file finished", False), - ("info_pack", "bool", "Inform about every package finished", True), - ("captcha", "bool", "Send captcha requests", True)] - - __description__ = """Connect to jabber and let owner perform different tasks""" - __license__ = "GPLv3" - __authors__ = [("RaNaN", "RaNaN@pyload.org")] - - - implements(IMessageHandlersProvider) - - - def __init__(self, core, manager): - IRCInterface.__init__(self, core, manager) - - self.jid = JID(self.getConfig("jid")) - password = self.getConfig("pw") - - # if bare JID is provided add a resource -- it is required - if not self.jid.resource: - self.jid = JID(self.jid.node, self.jid.domain, "pyLoad") - - if self.getConfig("tls"): - tls_settings = streamtls.TLSSettings(require=True, verify_peer=False) - auth = ("sasl:PLAIN", "sasl:DIGEST-MD5") - else: - tls_settings = None - auth = ("sasl:DIGEST-MD5", "digest") - - # setup client with provided connection information - # and identity data - JabberClient.__init__(self, self.jid, password, - disco_name="pyLoad XMPP Client", disco_type="bot", - tls_settings=tls_settings, auth_methods=auth) - - self.interface_providers = [ - VersionHandler(self), - self, - ] - - - def coreReady(self): - self.new_package = {} - - self.start() - - - def packageFinished(self, pypack): - try: - if self.getConfig("info_pack"): - self.announce(_("Package finished: %s") % pypack.name) - except Exception: - pass - - - def downloadFinished(self, pyfile): - try: - if self.getConfig("info_file"): - self.announce( - _("Download finished: %(name)s @ %(plugin)s") % {"name": pyfile.name, "plugin": pyfile.pluginname}) - except Exception: - pass - - - def run(self): - # connect to IRC etc. - self.connect() - try: - self.loop() - except Exception, ex: - self.logError(ex) - - - def stream_state_changed(self, state, arg): - """This one is called when the state of stream connecting the component - to a server changes. This will usually be used to let the user - know what is going on.""" - self.logDebug("*** State changed: %s %r ***" % (state, arg)) - - - def disconnected(self): - self.logDebug("Client was disconnected") - - - def stream_closed(self, stream): - self.logDebug("Stream was closed", stream) - - - def stream_error(self, err): - self.logDebug("Stream Error", err) - - - def get_message_handlers(self): - """Return list of (message_type, message_handler) tuples. - - The handlers returned will be called when matching message is received - in a client session.""" - return [("normal", self.message)] - - - def message(self, stanza): - """Message handler for the component.""" - subject = stanza.get_subject() - body = stanza.get_body() - t = stanza.get_type() - self.logDebug("Message from %s received." % unicode(stanza.get_from())) - self.logDebug("Body: %s Subject: %s Type: %s" % (body, subject, t)) - - if t == "headline": - # 'headline' messages should never be replied to - return True - if subject: - subject = u"Re: " + subject - - to_jid = stanza.get_from() - from_jid = stanza.get_to() - - #j = JID() - to_name = to_jid.as_utf8() - from_name = from_jid.as_utf8() - - names = self.getConfig("owners").split(";") - - if to_name in names or to_jid.node + "@" + to_jid.domain in names: - messages = [] - - trigger = "pass" - args = None - - try: - temp = body.split() - trigger = temp[0] - if len(temp) > 1: - args = temp[1:] - except Exception: - pass - - handler = getattr(self, "event_%s" % trigger, self.event_pass) - try: - res = handler(args) - for line in res: - m = Message( - to_jid=to_jid, - from_jid=from_jid, - stanza_type=stanza.get_type(), - subject=subject, - body=line) - - messages.append(m) - except Exception, e: - self.logError(e) - - return messages - - else: - return True - - - def response(self, msg, origin=""): - return self.announce(msg) - - - def announce(self, message): - """ send message to all owners""" - for user in self.getConfig("owners").split(";"): - self.logDebug("Send message to", user) - - to_jid = JID(user) - - m = Message(from_jid=self.jid, - to_jid=to_jid, - stanza_type="chat", - body=message) - - stream = self.get_stream() - if not stream: - self.connect() - stream = self.get_stream() - - stream.send(m) - - - def beforeReconnecting(self, ip): - self.disconnect() - - - def afterReconnecting(self, ip): - self.connect() - - -class VersionHandler(object): - """Provides handler for a version query. - - This class will answer version query and announce 'jabber:iq:version' namespace - in the client's disco#info results.""" - - implements(IIqHandlersProvider, IFeaturesProvider) - - - def __init__(self, client): - """Just remember who created this.""" - self.client = client - - - def get_features(self): - """Return namespace which should the client include in its reply to a - disco#info query.""" - return ["jabber:iq:version"] - - - def get_iq_get_handlers(self): - """Return list of tuples (element_name, namespace, handler) describing - handlers of <iq type='get'/> stanzas""" - return [("query", "jabber:iq:version", self.get_version)] - - - def get_iq_set_handlers(self): - """Return empty list, as this class provides no <iq type='set'/> stanza handler.""" - return [] - - - def get_version(self, iq): - """Handler for jabber:iq:version queries. - - jabber:iq:version queries are not supported directly by PyXMPP, so the - XML node is accessed directly through the libxml2 API. This should be - used very carefully!""" - iq = iq.make_result_response() - q = iq.new_query("jabber:iq:version") - q.newTextChild(q.ns(), "name", "Echo component") - q.newTextChild(q.ns(), "version", "1.0") - return iq diff --git a/module/plugins/hooks/__init__.py b/module/plugins/hooks/__init__.py index e69de29bb..40a96afc6 100644 --- a/module/plugins/hooks/__init__.py +++ b/module/plugins/hooks/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- |