summaryrefslogtreecommitdiffstats
path: root/module/plugins/addon
diff options
context:
space:
mode:
Diffstat (limited to 'module/plugins/addon')
-rw-r--r--module/plugins/addon/Checksum.py43
-rw-r--r--module/plugins/addon/ExternalScripts.py37
-rw-r--r--module/plugins/addon/ExtractArchive.py454
-rw-r--r--module/plugins/addon/HotFolder.py3
-rw-r--r--module/plugins/addon/IRCInterface.py4
-rw-r--r--module/plugins/addon/MergeFiles.py6
-rw-r--r--module/plugins/addon/MultiHome.py2
-rw-r--r--module/plugins/addon/RestartSlow.py10
-rw-r--r--module/plugins/addon/SkipRev.py87
-rw-r--r--module/plugins/addon/UnSkipOnFail.py140
-rw-r--r--module/plugins/addon/UpdateManager.py15
-rw-r--r--module/plugins/addon/WindowsPhoneToastNotify.py57
12 files changed, 496 insertions, 362 deletions
diff --git a/module/plugins/addon/Checksum.py b/module/plugins/addon/Checksum.py
index 0589bd55a..35be60773 100644
--- a/module/plugins/addon/Checksum.py
+++ b/module/plugins/addon/Checksum.py
@@ -40,7 +40,7 @@ def computeChecksum(local_file, algorithm):
class Checksum(Addon):
__name__ = "Checksum"
__type__ = "addon"
- __version__ = "0.15"
+ __version__ = "0.16"
__config__ = [("activated" , "bool" , "Activated" , True ),
("check_checksum", "bool" , "Check checksum? (If False only size will be verified)", True ),
@@ -56,11 +56,13 @@ class Checksum(Addon):
("stickell", "l.stickell@yahoo.it")]
- methods = {'sfv': 'crc32', 'crc': 'crc32', 'hash': 'md5'}
- regexps = {'sfv': r'^(?P<name>[^;].+)\s+(?P<hash>[0-9A-Fa-f]{8})$',
- 'md5': r'^(?P<name>[0-9A-Fa-f]{32}) (?P<file>.+)$',
- 'crc': r'filename=(?P<name>.+)\nsize=(?P<size>\d+)\ncrc32=(?P<hash>[0-9A-Fa-f]{8})$',
- 'default': r'^(?P<hash>[0-9A-Fa-f]+)\s+\*?(?P<name>.+)$'}
+ methods = {'sfv' : 'crc32',
+ 'crc' : 'crc32',
+ 'hash': 'md5'}
+ regexps = {'sfv' : r'^(?P<NAME>[^;].+)\s+(?P<HASH>[0-9A-Fa-f]{8})$',
+ 'md5' : r'^(?P<NAME>[0-9A-Fa-f]{32}) (?P<FILE>.+)$',
+ 'crc' : r'filename=(?P<NAME>.+)\nsize=(?P<SIZE>\d+)\ncrc32=(?P<HASH>[0-9A-Fa-f]{8})$',
+ 'default': r'^(?P<HASH>[0-9A-Fa-f]+)\s+\*?(?P<NAME>.+)$'}
def activate(self):
@@ -88,8 +90,9 @@ class Checksum(Addon):
elif hasattr(pyfile.plugin, "api_data") and isinstance(pyfile.plugin.api_data, dict):
data = pyfile.plugin.api_data.copy()
- # elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict):
- # data = pyfile.plugin.info.copy()
+ elif hasattr(pyfile.plugin, "info") and isinstance(pyfile.plugin.info, dict):
+ data = pyfile.plugin.info.copy()
+ data.pop('size', None) #@NOTE: Don't check file size until a similary matcher will be implemented
else:
return
@@ -106,19 +109,25 @@ class Checksum(Addon):
if not isfile(local_file):
self.checkFailed(pyfile, None, "File does not exist")
- # validate file size
+ # validate file size
if "size" in data:
- api_size = int(data['size'])
+ api_size = int(data['size'])
file_size = getsize(local_file)
+
if api_size != file_size:
self.logWarning(_("File %s has incorrect size: %d B (%d expected)") % (pyfile.name, file_size, api_size))
self.checkFailed(pyfile, local_file, "Incorrect file size")
- del data['size']
+
+ data.pop('size', None)
# validate checksum
if data and self.getConfig("check_checksum"):
- if "checksum" in data:
- data['md5'] = data['checksum']
+
+ if not 'md5' in data:
+ for type in ("checksum", "hashsum", "hash"):
+ if type in data:
+ data['md5'] = data[type] #@NOTE: What happens if it's not an md5 hash?
+ break
for key in self.algorithms:
if key in data:
@@ -175,12 +184,12 @@ class Checksum(Addon):
data = m.groupdict()
self.logDebug(link['name'], data)
- local_file = fs_encode(safe_join(download_folder, data['name']))
+ local_file = fs_encode(safe_join(download_folder, data['NAME']))
algorithm = self.methods.get(file_type, file_type)
checksum = computeChecksum(local_file, algorithm)
- if checksum == data['hash']:
+ if checksum == data['HASH']:
self.logInfo(_('File integrity of "%s" verified by %s checksum (%s)') %
- (data['name'], algorithm, checksum))
+ (data['NAME'], algorithm, checksum))
else:
self.logWarning(_("%s checksum for file %s does not match (%s != %s)") %
- (algorithm, data['name'], checksum, data['hash']))
+ (algorithm, data['NAME'], checksum, data['HASH']))
diff --git a/module/plugins/addon/ExternalScripts.py b/module/plugins/addon/ExternalScripts.py
index 31283afc2..5aebf2338 100644
--- a/module/plugins/addon/ExternalScripts.py
+++ b/module/plugins/addon/ExternalScripts.py
@@ -1,10 +1,9 @@
# -*- coding: utf-8 -*-
+import os
import subprocess
from itertools import chain
-from os import listdir, access, X_OK, makedirs
-from os.path import join, exists, basename, abspath
from pyload.plugin.Addon import Addon
from pyload.utils import safe_join
@@ -13,9 +12,10 @@ from pyload.utils import safe_join
class ExternalScripts(Addon):
__name__ = "ExternalScripts"
__type__ = "addon"
- __version__ = "0.25"
+ __version__ = "0.29"
- __config__ = [("activated", "bool", "Activated", True)]
+ __config__ = [("activated", "bool", "Activated" , True ),
+ ("wait" , "bool", "Wait script ending", False)]
__description__ = """Run external scripts"""
__license__ = "GPLv3"
@@ -46,40 +46,45 @@ class ExternalScripts(Addon):
for folder in folders:
self.scripts[folder] = []
- self.initPluginType(folder, join(pypath, 'scripts', folder))
- self.initPluginType(folder, join('scripts', folder))
+ self.initPluginType(folder, os.path.join(pypath, 'scripts', folder))
+ self.initPluginType(folder, os.path.join('scripts', folder))
for script_type, names in self.scripts.iteritems():
if names:
- self.logInfo(_("Installed scripts for"), script_type, ", ".join([basename(x) for x in names]))
+ self.logInfo(_("Installed scripts for"), script_type, ", ".join(map(os.path.basename, names)))
def initPluginType(self, folder, path):
- if not exists(path):
+ if not os.path.exists(path):
try:
- makedirs(path)
+ os.makedirs(path)
+
except Exception:
self.logDebug("Script folder %s not created" % folder)
return
- for f in listdir(path):
+ for f in os.listdir(path):
if f.startswith("#") or f.startswith(".") or f.startswith("_") or f.endswith("~") or f.endswith(".swp"):
continue
- if not access(join(path, f), X_OK):
+ if not os.access(os.path.join(path, f), os.X_OK):
self.logWarning(_("Script not executable:") + " %s/%s" % (folder, f))
- self.scripts[folder].append(join(path, f))
+ self.scripts[folder].append(os.path.join(path, f))
def callScript(self, script, *args):
try:
cmd = [script] + [str(x) if not isinstance(x, basestring) else x for x in args]
- self.logDebug("Executing", abspath(script), " ".join(cmd))
- #output goes to pyload
- subprocess.Popen(cmd, bufsize=-1)
+
+ self.logDebug("Executing", os.path.abspath(script), " ".join(cmd))
+
+ p = subprocess.Popen(cmd, bufsize=-1) #@NOTE: output goes to pyload
+ if self.getConfig('wait'):
+ p.communicate()
+
except Exception, e:
- self.logError(_("Error in %(script)s: %(error)s") % {"script": basename(script), "error": e})
+ self.logError(_("Error in %(script)s: %(error)s") % {"script": os.path.basename(script), "error": e})
def downloadPreparing(self, pyfile):
diff --git a/module/plugins/addon/ExtractArchive.py b/module/plugins/addon/ExtractArchive.py
index b24bb37a2..3ea8839dc 100644
--- a/module/plugins/addon/ExtractArchive.py
+++ b/module/plugins/addon/ExtractArchive.py
@@ -6,21 +6,20 @@ import os
import sys
from copy import copy
-from os import remove, chmod, makedirs
-from os.path import exists, basename, isfile, isdir
from traceback import print_exc
# monkey patch bug in python 2.6 and lower
# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
if sys.version_info < (2, 7) and os.name != "nt":
import errno
- from subprocess import Popen
+ from subprocess import Popen
def _eintr_retry_call(func, *args):
while True:
try:
return func(*args)
+
except OSError, e:
if e.errno == errno.EINTR:
continue
@@ -48,112 +47,191 @@ if sys.version_info < (2, 7) and os.name != "nt":
if os.name != "nt":
from grp import getgrnam
- from os import chown
from pwd import getpwnam
-from pyload.plugin.Addon import Addon, threaded, Expose
-from pyload.plugin.internal.AbstractExtractor import ArchiveError, CRCError, WrongPassword
-from pyload.utils import safe_join, fs_encode
+from module.plugins.Hook import Hook, threaded, Expose
+from module.plugins.internal.Extractor import ArchiveError, CRCError, PasswordError
+from module.plugins.internal.SimpleHoster import replace_patterns
+from module.utils import fs_encode, save_join, uniqify
+
+
+class ArchiveQueue(object):
+
+ def __init__(self, plugin, storage):
+ self.plugin = plugin
+ self.storage = storage
+
+
+ def get(self):
+ try:
+ return [int(pid) for pid in self.plugin.getStorage("ExtractArchive:%s" % self.storage, "").decode('base64').split()]
+ except Exception:
+ return []
+
+
+ def set(self, value):
+ if isinstance(value, list):
+ item = str(value)[1:-1].replace(' ', '').replace(',', ' ')
+ else:
+ item = str(value).strip()
+ return self.plugin.setStorage("ExtractArchive:%s" % self.storage, item.encode('base64')[:-1])
+
+
+ def delete(self):
+ return self.plugin.delStorage("ExtractArchive:%s" % self.storage)
+
+
+ def add(self, item):
+ queue = self.get()
+ if item not in queue:
+ return self.set(queue + [item])
+ else:
+ return True
+
+
+ def remove(self, item):
+ queue = self.get()
+ try:
+ queue.remove(item)
+ except ValueError:
+ pass
+ if queue == []:
+ return self.delete()
+ return self.set(queue)
+
-class ExtractArchive(Addon):
+class ExtractArchive(Hook):
__name__ = "ExtractArchive"
- __type__ = "addon"
- __version__ = "0.19"
-
- __config__ = [("activated" , "bool" , "Activated" , True ),
- ("fullpath" , "bool" , "Extract full path" , True ),
- ("overwrite" , "bool" , "Overwrite files" , True ),
- ("passwordfile" , "file" , "password file" , "archive_password.txt"),
- ("deletearchive", "bool" , "Delete archives when done" , False ),
- ("subfolder" , "bool" , "Create subfolder for each package" , False ),
- ("destination" , "folder", "Extract files to" , "" ),
- ("excludefiles" , "str" , "Exclude files from unpacking (seperated by ;)", "" ),
- ("recursive" , "bool" , "Extract archives in archvies" , True ),
- ("queue" , "bool" , "Wait for all downloads to be finished" , True ),
- ("renice" , "int" , "CPU Priority" , 0 )]
+ __type__ = "hook"
+ __version__ = "1.29"
+
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("fullpath" , "bool" , "Extract with full paths" , True ),
+ ("overwrite" , "bool" , "Overwrite files" , False ),
+ ("keepbroken" , "bool" , "Try to extract broken archives" , False ),
+ ("repair" , "bool" , "Repair broken archives" , True ),
+ ("usepasswordfile" , "bool" , "Use password file" , True ),
+ ("passwordfile" , "file" , "Password file" , "archive_password.txt" ),
+ ("delete" , "bool" , "Delete archive when successfully extracted", False ),
+ ("subfolder" , "bool" , "Create subfolder for each package" , False ),
+ ("destination" , "folder", "Extract files to folder" , "" ),
+ ("extensions" , "str" , "Extract the following extensions" , "7z,bz2,bzip2,gz,gzip,lha,lzh,lzma,rar,tar,taz,tbz,tbz2,tgz,xar,xz,z,zip"),
+ ("excludefiles" , "str" , "Don't extract the following files" , "*.nfo,*.DS_Store,index.dat,thumb.db" ),
+ ("recursive" , "bool" , "Extract archives in archives" , True ),
+ ("waitall" , "bool" , "Wait for all downloads to be finished" , False ),
+ ("renice" , "int" , "CPU priority" , 0 )]
__description__ = """Extract different kind of archives"""
__license__ = "GPLv3"
- __authors__ = [("RaNaN", "ranan@pyload.org"),
- ("AndroKev", ""),
- ("Walter Purcaro", "vuolter@gmail.com")]
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com"),
+ ("Immenz" , "immenz@gmx.net" )]
+
+
+ event_list = ["allDownloadsProcessed"]
+ NAME_REPLACEMENTS = [(r'\.part\d+\.rar$', ".part.rar")]
- event_map = {'all_downloads-processed': "allDownloadsProcessed"}
+
+ #@TODO: Remove in 0.4.10
+ def initPeriodical(self):
+ pass
def setup(self):
- self.plugins = []
- self.passwords = []
- names = []
+ self.queue = ArchiveQueue(self, "Queue")
+ self.failed = ArchiveQueue(self, "Failed")
+
+ self.interval = 60
+ self.extracting = False
+ self.extractors = []
+ self.passwords = []
- for p in ("UnRar", "UnZip"):
+
+ def coreReady(self):
+ # self.extracting = False
+
+ for p in ("UnRar", "SevenZip", "UnZip"):
try:
module = self.core.pluginManager.loadModule("internal", p)
- klass = getattr(module, p)
- if klass.checkDeps():
- names.append(p)
- self.plugins.append(klass)
+ klass = getattr(module, p)
+ if klass.isUsable():
+ self.extractors.append(klass)
except OSError, e:
if e.errno == 2:
self.logInfo(_("No %s installed") % p)
else:
- self.logWarning(_("Could not activate %s") % p, e)
+ self.logWarning(_("Could not activate: %s") % p, e)
if self.core.debug:
print_exc()
except Exception, e:
- self.logWarning(_("Could not activate %s") % p, e)
+ self.logWarning(_("Could not activate: %s") % p, e)
if self.core.debug:
print_exc()
- if names:
- self.logInfo(_("Activated") + " " + " ".join(names))
+ if self.extractors:
+ self.logInfo(_("Activated") + " " + "|".join("%s %s" % (Extractor.__name__,Extractor.VERSION) for Extractor in self.extractors))
+
+ if self.getConfig("waitall"):
+ self.extractPackage(*self.queue.get()) #: Resume unfinished extractions
+ else:
+ super(ExtractArchive, self).initPeriodical()
+
else:
self.logInfo(_("No Extract plugins activated"))
- # queue with package ids
- self.queue = []
+
+ def periodical(self):
+ if not self.extracting:
+ self.extractPackage(*self.queue.get())
@Expose
- def extractPackage(self, id):
- """ Extract package with given id"""
- self.manager.startThread(self.extract, [id])
+ def extractPackage(self, *ids):
+ """ Extract packages with given id"""
+ self.manager.startThread(self.extract, ids)
def packageFinished(self, pypack):
- pid = pypack.id
- if self.getConfig("queue"):
- self.logInfo(_("Package %s queued for later extracting") % pypack.name)
- self.queue.append(pid)
- else:
- self.manager.startThread(self.extract, [pid])
+ self.queue.add(pypack.id)
@threaded
def allDownloadsProcessed(self, thread):
- local = copy(self.queue)
- del self.queue[:]
- if self.extract(local, thread): #: check only if all gone fine, no failed reporting for now
- self.manager.dispatchEvent("all_archives-extracted")
- self.manager.dispatchEvent("all_archives-processed")
+ if self.extract(self.queue.get(), thread): #@NOTE: check only if all gone fine, no failed reporting for now
+ self.manager.dispatchEvent("all_archives_extracted")
+
+ self.manager.dispatchEvent("all_archives_processed")
def extract(self, ids, thread=None):
+ if not ids:
+ return False
+
+ self.extracting = True
+
processed = []
extracted = []
- failed = []
+ failed = []
- destination = self.getConfig("destination")
- subfolder = self.getConfig("subfolder")
- fullpath = self.getConfig("fullpath")
- overwrite = self.getConfig("overwrite")
- excludefiles = self.getConfig("excludefiles")
- renice = self.getConfig("renice")
- recursive = self.getConfig("recursive")
+ toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|')
+
+ destination = self.getConfig("destination")
+ subfolder = self.getConfig("subfolder")
+ fullpath = self.getConfig("fullpath")
+ overwrite = self.getConfig("overwrite")
+ renice = self.getConfig("renice")
+ recursive = self.getConfig("recursive")
+ delete = self.getConfig("delete")
+ keepbroken = self.getConfig("keepbroken")
+
+ extensions = [x.lstrip('.').lower() for x in toList(self.getConfig("extensions"))]
+ excludefiles = toList(self.getConfig("excludefiles"))
+
+ if extensions:
+ self.logDebug("Use for extensions: %s" % "|.".join(extensions))
# reload from txt file
self.reloadPasswords()
@@ -161,161 +239,229 @@ class ExtractArchive(Addon):
# dl folder
dl = self.config['general']['download_folder']
- #iterate packages -> plugins -> targets
+ #iterate packages -> extractors -> targets
for pid in ids:
- p = self.core.files.getPackage(pid)
- self.logInfo(_("Check package %s") % p.name)
- if not p:
+ pypack = self.core.files.getPackage(pid)
+
+ if not pypack:
continue
+ self.logInfo(_("Check package: %s") % pypack.name)
+
# determine output folder
- out = safe_join(dl, p.folder, "")
+ out = save_join(dl, pypack.folder, destination, "") #: force trailing slash
- out = safe_join(dl, p.folder, self.getConfig("destination"), "")
if subfolder:
- out = safe_join(out, fs_encode(p.folder))
+ out = save_join(out, pypack.folder)
- if not exists(out):
- makedirs(out)
+ if not os.path.exists(out):
+ os.makedirs(out)
- files_ids = [(safe_join(dl, p.folder, x['name']), x['id']) for x in p.getChildren().itervalues()]
- matched = False
- success = True
+ matched = False
+ success = True
+ files_ids = [(save_join(dl, pypack.folder, pylink['name']), pylink['id'], out) for pylink in pypack.getChildren().itervalues()]
# check as long there are unseen files
while files_ids:
new_files_ids = []
- for plugin in self.plugins:
- targets = plugin.getTargets(files_ids)
+ if extensions:
+ files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
+ if filter(lambda ext: fname.lower().endswith(ext), extensions)]
+
+ for Extractor in self.extractors:
+ targets = Extractor.getTargets(files_ids)
if targets:
- self.logDebug("Targets for %s: %s" % (plugin.__name__, targets))
+ self.logDebug("Targets for %s: %s" % (Extractor.__name__, targets))
matched = True
- for target, fid in targets:
- if target in processed:
- self.logDebug(basename(target), "skipped")
- continue
- processed.append(target) # prevent extracting same file twice
+ for fname, fid, fout in targets:
+ name = os.path.basename(fname)
- self.logInfo(basename(target), _("Extract to %s") % out)
+ if not os.path.exists(fname):
+ self.logDebug(name, "File not found")
+ continue
+
+ self.logInfo(name, _("Extract to: %s") % fout)
try:
- klass = plugin(self, target, out, fullpath, overwrite, excludefiles, renice)
- klass.init()
- password = p.password.strip().splitlines()
- new_files = self._extract(klass, fid, password, thread)
+ archive = Extractor(self,
+ fname,
+ fout,
+ fullpath,
+ overwrite,
+ excludefiles,
+ renice,
+ delete,
+ keepbroken,
+ fid)
+ archive.init()
+
+ new_files = self._extract(archive, fid, pypack.password, thread)
+
except Exception, e:
- self.logError(basename(target), e)
+ self.logError(name, e)
success = False
continue
- self.logDebug("Extracted", new_files)
+ files_ids.remove((fname, fid, fout)) # don't let other extractors spam log
+ self.logDebug("Extracted files: %s" % new_files)
self.setPermissions(new_files)
- for file in new_files:
- if not exists(file):
- self.logDebug("New file %s does not exists" % file)
+ for filename in new_files:
+ file = fs_encode(save_join(os.path.dirname(archive.filename), filename))
+ if not os.path.exists(file):
+ self.logDebug("New file %s does not exists" % filename)
continue
- if recursive and isfile(file):
- new_files_ids.append((file, fid)) # append as new target
+
+ if recursive and os.path.isfile(file):
+ new_files_ids.append((filename, fid, os.path.dirname(filename))) # append as new target
files_ids = new_files_ids # also check extracted files
if matched:
if success:
extracted.append(pid)
- self.manager.dispatchEvent("package-extracted", p)
+ self.manager.dispatchEvent("package_extracted", pypack)
else:
failed.append(pid)
- self.manager.dispatchEvent("package-extract_failed", p)
+ self.manager.dispatchEvent("package_extract_failed", pypack)
+
+ self.failed.add(pid)
else:
self.logInfo(_("No files found to extract"))
+ if not matched or not success and subfolder:
+ try:
+ os.rmdir(out)
+
+ except OSError:
+ pass
+
+ self.queue.remove(pid)
+
+ self.extracting = False
return True if not failed else False
- def _extract(self, plugin, fid, passwords, thread):
+ def _extract(self, archive, fid, password, thread):
pyfile = self.core.files.getFile(fid)
- deletearchive = self.getConfig("deletearchive")
+ name = os.path.basename(archive.filename)
- pyfile.setCustomStatus(_("extracting"))
- thread.addActive(pyfile) # keep this file until everything is done
+ thread.addActive(pyfile)
+ pyfile.setStatus("processing")
+ encrypted = False
try:
- progress = lambda x: pyfile.setProgress(x)
- success = False
+ try:
+ archive.check()
- if not plugin.checkArchive():
- plugin.extract(progress)
- success = True
- else:
- self.logInfo(basename(plugin.file), _("Password protected"))
- self.logDebug("Passwords", passwords)
+ except CRCError, e:
+ self.logDebug(name, e)
+ self.logInfo(name, _("Header protected"))
+
+ if self.getConfig("repair"):
+ self.logWarning(name, _("Repairing..."))
+
+ pyfile.setCustomStatus(_("repairing"))
+ pyfile.setProgress(0)
+
+ repaired = archive.repair()
+
+ pyfile.setProgress(100)
+
+ if not repaired and not self.getConfig("keepbroken"):
+ raise CRCError("Archive damaged")
+
+ except PasswordError:
+ self.logInfo(name, _("Password protected"))
+ encrypted = True
+
+ except ArchiveError, e:
+ raise ArchiveError(e)
+
+ self.logDebug("Password: %s" % (password or "No provided"))
- pwlist = copy(self.getPasswords())
- # remove already supplied pws from list (only local)
- for pw in passwords:
- if pw in pwlist:
- pwlist.remove(pw)
+ pyfile.setCustomStatus(_("extracting"))
+ pyfile.setProgress(0)
- for pw in passwords + pwlist:
+ if not encrypted or not self.getConfig("usepasswordfile"):
+ archive.extract(password)
+ else:
+ for pw in filter(None, uniqify([password] + self.getPasswords(False))):
try:
- self.logDebug("Try password", pw)
- if plugin.checkPassword(pw):
- plugin.extract(progress, pw)
+ self.logDebug("Try password: %s" % pw)
+
+ ispw = archive.isPassword(pw)
+ if ispw or ispw is None:
+ archive.extract(pw)
self.addPassword(pw)
- success = True
break
- except WrongPassword:
+
+ except PasswordError:
self.logDebug("Password was wrong")
+ else:
+ raise PasswordError
- if not success:
- raise Exception(_("Wrong password"))
+ pyfile.setProgress(100)
+ pyfile.setCustomStatus(_("finalizing"))
if self.core.debug:
- self.logDebug("Would delete", ", ".join(plugin.getDeleteFiles()))
+ self.logDebug("Would delete: %s" % ", ".join(archive.getDeleteFiles()))
- if deletearchive:
- files = plugin.getDeleteFiles()
+ if self.getConfig("delete"):
+ files = archive.getDeleteFiles()
self.logInfo(_("Deleting %s files") % len(files))
for f in files:
- if exists(f):
- remove(f)
+ file = fs_encode(f)
+ if os.path.exists(file):
+ os.remove(file)
else:
self.logDebug("%s does not exists" % f)
- self.logInfo(basename(plugin.file), _("Extracting finished"))
+ self.logInfo(name, _("Extracting finished"))
- extracted_files = plugin.getExtractedFiles()
- self.manager.dispatchEvent("archive-extracted", pyfile, plugin.out, plugin.file, extracted_files)
+ extracted_files = archive.files or archive.list()
+ self.manager.dispatchEvent("archive_extracted", pyfile, archive.out, archive.filename, extracted_files)
return extracted_files
+ except PasswordError:
+ self.logError(name, _("Wrong password" if password else "No password found"))
+
+ except CRCError, e:
+ self.logError(name, _("CRC mismatch"), e)
+
except ArchiveError, e:
- self.logError(basename(plugin.file), _("Archive Error"), e)
- except CRCError:
- self.logError(basename(plugin.file), _("CRC Mismatch"))
+ self.logError(name, _("Archive error"), e)
+
except Exception, e:
+ self.logError(name, _("Unknown error"), e)
if self.core.debug:
print_exc()
- self.logError(basename(plugin.file), _("Unknown Error"), e)
- self.manager.dispatchEvent("archive-extract_failed", pyfile)
+ finally:
+ pyfile.finishIfDone()
+
+ self.manager.dispatchEvent("archive_extract_failed", pyfile)
+
raise Exception(_("Extract failed"))
@Expose
- def getPasswords(self):
+ def getPasswords(self, reload=True):
""" List of saved passwords """
+ if reload:
+ self.reloadPasswords()
+
return self.passwords
def reloadPasswords(self):
- passwordfile = self.getConfig("passwordfile")
-
try:
passwords = []
- with open(passwordfile, "a+") as f:
+
+ file = fs_encode(self.getConfig("passwordfile"))
+ with open(file) as f:
for pw in f.read().splitlines():
passwords.append(pw)
@@ -327,37 +473,37 @@ class ExtractArchive(Addon):
@Expose
- def addPassword(self, pw):
+ def addPassword(self, password):
""" Adds a password to saved list"""
- passwordfile = self.getConfig("passwordfile")
-
- if pw in self.passwords:
- self.passwords.remove(pw)
-
- self.passwords.insert(0, pw)
-
try:
- with open(passwordfile, "wb") as f:
+ self.passwords = uniqify([password] + self.passwords)
+
+ file = fs_encode(self.getConfig("passwordfile"))
+ with open(file, "wb") as f:
for pw in self.passwords:
- f.write(pw + "\n")
+ f.write(pw + '\n')
+
except IOError, e:
self.logError(e)
def setPermissions(self, files):
for f in files:
- if not exists(f):
+ if not os.path.exists(f):
continue
+
try:
if self.config['permission']['change_file']:
- if isfile(f):
- chmod(f, int(self.config['permission']['file'], 8))
- elif isdir(f):
- chmod(f, int(self.config['permission']['folder'], 8))
+ if os.path.isfile(f):
+ os.chmod(f, int(self.config['permission']['file'], 8))
+
+ elif os.path.isdir(f):
+ os.chmod(f, int(self.config['permission']['folder'], 8))
if self.config['permission']['change_dl'] and os.name != "nt":
uid = getpwnam(self.config['permission']['user'])[2]
gid = getgrnam(self.config['permission']['group'])[2]
- chown(f, uid, gid)
+ os.chown(f, uid, gid)
+
except Exception, e:
self.logWarning(_("Setting User and Group failed"), e)
diff --git a/module/plugins/addon/HotFolder.py b/module/plugins/addon/HotFolder.py
index 3bbafd5ed..eb607ac7e 100644
--- a/module/plugins/addon/HotFolder.py
+++ b/module/plugins/addon/HotFolder.py
@@ -43,7 +43,8 @@ class HotFolder(Addon):
makedirs(join(folder, "finished"))
if self.getConfig("watch_file"):
- with open(fs_encode(self.getConfig("file")), "a+") as f:
+ file = fs_encode(self.getConfig("file"))
+ with open(file, "a+") as f:
content = f.read().strip()
if content:
diff --git a/module/plugins/addon/IRCInterface.py b/module/plugins/addon/IRCInterface.py
index a4d466319..86d9ea688 100644
--- a/module/plugins/addon/IRCInterface.py
+++ b/module/plugins/addon/IRCInterface.py
@@ -74,10 +74,10 @@ class IRCInterface(Thread, Addon):
task.handler.append(self)
task.setWaiting(60)
- page = getURL("http://www.freeimagehosting.net/upload.php",
+ html = getURL("http://www.freeimagehosting.net/upload.php",
post={"attached": (FORM_FILE, task.captchaFile)}, multipart=True)
- url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", page).group(1)
+ url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", html).group(1)
self.response(_("New Captcha Request: %s") % url)
self.response(_("Answer with 'c %s text on the captcha'") % task.id)
diff --git a/module/plugins/addon/MergeFiles.py b/module/plugins/addon/MergeFiles.py
index 18836f6ac..42ac3ff4d 100644
--- a/module/plugins/addon/MergeFiles.py
+++ b/module/plugins/addon/MergeFiles.py
@@ -8,13 +8,13 @@ import re
from traceback import print_exc
from pyload.plugin.Addon import Addon, threaded
-from pyload.utils import safe_join, fs_encode
+from module.utils import safe_join
class MergeFiles(Addon):
__name__ = "MergeFiles"
__type__ = "addon"
- __version__ = "0.13"
+ __version__ = "0.14"
__config__ = [("activated", "bool", "Activated", True)]
@@ -59,7 +59,7 @@ class MergeFiles(Addon):
pyfile.setStatus("processing")
try:
- with open(os.path.join(download_folder, splitted_file), "rb") as s_file:
+ with open(save_join(download_folder, splitted_file), "rb") as s_file:
size_written = 0
s_file_size = int(os.path.getsize(os.path.join(download_folder, splitted_file)))
diff --git a/module/plugins/addon/MultiHome.py b/module/plugins/addon/MultiHome.py
index 84b1e6ab7..521749fc8 100644
--- a/module/plugins/addon/MultiHome.py
+++ b/module/plugins/addon/MultiHome.py
@@ -27,7 +27,7 @@ class MultiHome(Addon):
def toConfig(self):
- return ";".join([i.adress for i in self.interfaces])
+ return ";".join(i.adress for i in self.interfaces)
def parseInterfaces(self, interfaces):
diff --git a/module/plugins/addon/RestartSlow.py b/module/plugins/addon/RestartSlow.py
index 8621ed80d..332047da7 100644
--- a/module/plugins/addon/RestartSlow.py
+++ b/module/plugins/addon/RestartSlow.py
@@ -8,7 +8,7 @@ from pyload.plugin.Addon import Addon
class RestartSlow(Addon):
__name__ = "RestartSlow"
__type__ = "addon"
- __version__ = "0.02"
+ __version__ = "0.04"
__config__ = [("free_limit" , "int" , "Transfer speed threshold in kilobytes" , 100 ),
("free_time" , "int" , "Sample interval in minutes" , 5 ),
@@ -29,7 +29,7 @@ class RestartSlow(Addon):
def periodical(self):
- if not self.pyfile.req.dl:
+ if not self.pyfile.plugin.req.dl:
return
if self.getConfig("safe_mode") and not self.pyfile.plugin.resumeDownload:
@@ -40,8 +40,8 @@ class RestartSlow(Addon):
time = max(30, self.getConfig("%s_time" % type) * 60)
limit = max(5, self.getConfig("%s_limit" % type) * 1024)
- chunks = [chunk for chunk in self.pyfile.req.dl.chunks \
- if chunk.id not in self.info['chunk'] or self.info['chunk'][chunk.id] not is (time, limit)]
+ chunks = [chunk for chunk in self.pyfile.plugin.req.dl.chunks \
+ if chunk.id not in self.info['chunk'] or self.info['chunk'][chunk.id] is not (time, limit)]
for chunk in chunks:
chunk.c.setopt(pycurl.LOW_SPEED_TIME , time)
@@ -53,5 +53,5 @@ class RestartSlow(Addon):
def downloadStarts(self, pyfile, url, filename):
if self.cb or (self.getConfig("safe_mode") and not pyfile.plugin.resumeDownload):
return
-
+ self.pyfile = pyfile
self.initPeriodical()
diff --git a/module/plugins/addon/SkipRev.py b/module/plugins/addon/SkipRev.py
index d6908614d..0bbdec3b2 100644
--- a/module/plugins/addon/SkipRev.py
+++ b/module/plugins/addon/SkipRev.py
@@ -1,16 +1,24 @@
# -*- coding: utf-8 -*-
+from types import MethodType
from urllib import unquote
from urlparse import urlparse
-from pyload.plugin.Addon import Addon
-from pyload.plugin.Plugin import SkipDownload
+from module.PyFile import PyFile
+from module.plugins.Hook import Hook
+from module.plugins.Plugin import SkipDownload
-class SkipRev(Adoon):
+def _setup(self):
+ self.pyfile.plugin._setup()
+ if self.pyfile.hasStatus("skipped"):
+ raise SkipDownload(self.pyfile.statusname or self.pyfile.pluginname)
+
+
+class SkipRev(Hook):
__name__ = "SkipRev"
- __type__ = "addon"
- __version__ = "0.15"
+ __type__ = "hook"
+ __version__ = "0.25"
__config__ = [("tokeep", "int", "Number of rev files to keep for package (-1 to auto)", -1)]
@@ -19,59 +27,72 @@ class SkipRev(Adoon):
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- def _setup(self):
- super(self.pyfile.plugin, self).setup()
- if self.pyfile.hasStatus("skipped"):
- raise SkipDownload(self.pyfile.getStatusName() or self.pyfile.pluginname)
-
-
- def pyname(self, pyfile):
- url = pyfile.url
- plugin = pyfile.plugin
-
- if hasattr(plugin, "info") and 'name' in plugin.info and plugin.info['name']:
- name = plugin.info['name']
-
- elif hasattr(plugin, "parseInfos"):
- name = next(plugin.parseInfos([url]))['name']
+ #@TODO: Remove in 0.4.10
+ def initPeriodical(self):
+ pass
- elif hasattr(plugin, "getInfo"): #@NOTE: if parseInfos was not found, getInfo should be missing too
- name = plugin.getInfo(url)['name']
+ def _pyname(self, pyfile):
+ if hasattr(pyfile.pluginmodule, "getInfo"):
+ return getattr(pyfile.pluginmodule, "getInfo")([pyfile.url]).next()[0]
else:
self.logWarning("Unable to grab file name")
- name = urlparse(unquote(url)).path.split('/')[-1])
+ return urlparse(unquote(pyfile.url)).path.split('/')[-1]
- return name
+
+ def _pyfile(self, link):
+ return PyFile(self.core.files,
+ link.fid,
+ link.url,
+ link.name,
+ link.size,
+ link.status,
+ link.error,
+ link.plugin,
+ link.packageID,
+ link.order)
def downloadPreparing(self, pyfile):
- if pyfile.getStatusName() is "unskipped" or not pyname(pyfile).endswith(".rev"):
+ if pyfile.statusname is "unskipped" or not self._pyname(pyfile).endswith(".rev"):
return
tokeep = self.getConfig("tokeep")
if tokeep:
- saved = [True for link in pyfile.package().getChildren() \
- if link.name.endswith(".rev") and (link.hasStatus("finished") or link.hasStatus("downloading"))].count(True)
+ status_list = (1, 4, 8, 9, 14) if tokeep < 0 else (1, 3, 4, 8, 9, 14)
+
+ queued = [True for link in self.core.api.getPackageData(pyfile.package().id).links \
+ if link.name.endswith(".rev") and link.status not in status_list].count(True)
- if not saved or saved < tokeep: #: keep one rev at least in auto mode
+ if not queued or queued < tokeep: #: keep one rev at least in auto mode
return
pyfile.setCustomStatus("SkipRev", "skipped")
- pyfile.plugin.setup = _setup #: work-around: inject status checker inside the preprocessing routine of the plugin
+ pyfile.plugin._setup = pyfile.plugin.setup
+ pyfile.plugin.setup = MethodType(_setup, pyfile.plugin) #: work-around: inject status checker inside the preprocessing routine of the plugin
def downloadFailed(self, pyfile):
+ #: Check if pyfile is still "failed",
+ # maybe might has been restarted in meantime
+ if pyfile.status != 8:
+ return
+
tokeep = self.getConfig("tokeep")
if not tokeep:
return
- for link in pyfile.package().getChildren():
- if link.hasStatus("skipped") and link.name.endswith(".rev"):
+ for link in self.core.api.getPackageData(pyfile.package().id).links:
+ if link.status is 4 and link.name.endswith(".rev"):
+ pylink = self._pyfile(link)
+
if tokeep > -1 or pyfile.name.endswith(".rev"):
- link.setStatus("queued")
+ pylink.setStatus("queued")
else:
- link.setCustomStatus("unskipped", "queued")
+ pylink.setCustomStatus("unskipped", "queued")
+
+ self.core.files.save()
+ pylink.release()
return
diff --git a/module/plugins/addon/UnSkipOnFail.py b/module/plugins/addon/UnSkipOnFail.py
index bdbd4a5df..1becb937a 100644
--- a/module/plugins/addon/UnSkipOnFail.py
+++ b/module/plugins/addon/UnSkipOnFail.py
@@ -1,87 +1,95 @@
# -*- coding: utf-8 -*-
-from os.path import basename
+from module.PyFile import PyFile
+from module.plugins.Hook import Hook
-from pyload.datatype.File import PyFile
-from pyload.plugin.Addon import Addon
-from pyload.utils import fs_encode
-
-class UnSkipOnFail(Addon):
+class UnSkipOnFail(Hook):
__name__ = "UnSkipOnFail"
- __type__ = "addon"
- __version__ = "0.02"
+ __type__ = "hook"
+ __version__ = "0.05"
__config__ = [("activated", "bool", "Activated", True)]
- __description__ = """When a download fails, restart skipped duplicates"""
+ __description__ = """Queue skipped duplicates when download fails"""
__license__ = "GPLv3"
- __authors__ = [("hagg", "")]
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ #@TODO: Remove in 0.4.10
+ def initPeriodical(self):
+ pass
def downloadFailed(self, pyfile):
- pyfile_name = basename(pyfile.name)
- pid = pyfile.package().id
- msg = _('look for skipped duplicates for %s (pid:%s)')
- self.logInfo(msg % (pyfile_name, pid))
- dups = self.findDuplicates(pyfile)
- for link in dups:
- # check if link is "skipped"(=4)
- if link.status == 4:
- lpid = link.packageID
- self.logInfo(_('restart "%s" (pid:%s)') % (pyfile_name, lpid))
- self.setLinkStatus(link, "queued")
-
-
- def findDuplicates(self, pyfile):
+ #: Check if pyfile is still "failed",
+ # maybe might has been restarted in meantime
+ if pyfile.status != 8:
+ return
+
+ msg = _("Looking for skipped duplicates of: %s (pid:%s)")
+ self.logInfo(msg % (pyfile.name, pyfile.package().id))
+
+ dup = self.findDuplicate(pyfile)
+ if dup:
+ self.logInfo(_("Queue found duplicate: %s (pid:%s)") % (dup.name, dup.packageID))
+
+ #: Change status of "link" to "new_status".
+ # "link" has to be a valid FileData object,
+ # "new_status" has to be a valid status name
+ # (i.e. "queued" for this Plugin)
+ # It creates a temporary PyFile object using
+ # "link" data, changes its status, and tells
+ # the core.files-manager to save its data.
+ pylink = _pyfile(link)
+
+ pylink.setCustomStatus("UnSkipOnFail", "queued")
+
+ self.core.files.save()
+ pylink.release()
+
+ else:
+ self.logInfo(_("No duplicates found"))
+
+
+ def findDuplicate(self, pyfile):
""" Search all packages for duplicate links to "pyfile".
Duplicates are links that would overwrite "pyfile".
To test on duplicity the package-folder and link-name
- of twolinks are compared (basename(link.name)).
+ of twolinks are compared (link.name).
So this method returns a list of all links with equal
package-folders and filenames as "pyfile", but except
the data for "pyfile" iotselöf.
It does MOT check the link's status.
"""
- dups = []
- pyfile_name = fs_encode(basename(pyfile.name))
- # get packages (w/o files, as most file data is useless here)
- queue = self.core.api.getQueue()
+ queue = self.core.api.getQueue() #: get packages (w/o files, as most file data is useless here)
+
for package in queue:
- # check if package-folder equals pyfile's package folder
- if fs_encode(package.folder) == fs_encode(pyfile.package().folder):
- # now get packaged data w/ files/links
- pdata = self.core.api.getPackageData(package.pid)
- if pdata.links:
- for link in pdata.links:
- link_name = fs_encode(basename(link.name))
- # check if link name collides with pdata's name
- if link_name == pyfile_name:
- # at last check if it is not pyfile itself
- if link.fid != pyfile.id:
- dups.append(link)
- return dups
-
-
- def setLinkStatus(self, link, new_status):
- """ Change status of "link" to "new_status".
- "link" has to be a valid FileData object,
- "new_status" has to be a valid status name
- (i.e. "queued" for this Plugin)
- It creates a temporary PyFile object using
- "link" data, changes its status, and tells
- the core.files-manager to save its data.
- """
- pyfile = PyFile(self.core.files,
- link.fid,
- link.url,
- link.name,
- link.size,
- link.status,
- link.error,
- link.plugin,
- link.packageID,
- link.order)
- pyfile.setStatus(new_status)
- self.core.files.save()
- pyfile.release()
+ #: check if package-folder equals pyfile's package folder
+ if package.folder != pyfile.package().folder:
+ continue
+
+ #: now get packaged data w/ files/links
+ pdata = self.core.api.getPackageData(package.pid)
+ for link in pdata.links:
+ #: check if link is "skipped"
+ if link.status != 4:
+ continue
+
+ #: check if link name collides with pdata's name
+ #: AND at last check if it is not pyfile itself
+ if link.name == pyfile.name and link.fid != pyfile.id:
+ return link
+
+
+ def _pyfile(self, link):
+ return PyFile(self.core.files,
+ link.fid,
+ link.url,
+ link.name,
+ link.size,
+ link.status,
+ link.error,
+ link.plugin,
+ link.packageID,
+ link.order)
diff --git a/module/plugins/addon/UpdateManager.py b/module/plugins/addon/UpdateManager.py
index e31612c23..efccc73fb 100644
--- a/module/plugins/addon/UpdateManager.py
+++ b/module/plugins/addon/UpdateManager.py
@@ -16,14 +16,14 @@ from pyload.utils import safe_join
class UpdateManager(Addon):
__name__ = "UpdateManager"
__type__ = "addon"
- __version__ = "0.42"
+ __version__ = "0.43"
__config__ = [("activated" , "bool" , "Activated" , True ),
("mode" , "pyLoad + plugins;plugins only", "Check updates for" , "pyLoad + plugins"),
("interval" , "int" , "Check interval in hours" , 8 ),
("autorestart" , "bool" , "Automatically restart pyLoad when required" , True ),
("reloadplugins", "bool" , "Monitor plugins for code changes in debug mode", True ),
- ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , True )]
+ ("nodebugupdate", "bool" , "Don't check for updates in debug mode" , False )]
__description__ = """Check for updates"""
__license__ = "GPLv3"
@@ -191,12 +191,13 @@ class UpdateManager(Addon):
blacklisted = [(x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]) for x in blacklist] if blacklist else []
if blacklist:
- # Protect internal plugins against removing
- for i, t, n in enumerate(blacklisted):
- if t == "internal":
- blacklisted.pop(i)
- continue
+ # Protect UpdateManager from self-removing
+ try:
+ blacklisted.remove(("hook", "UpdateManager"))
+ except Exception:
+ pass
+ for t, n in blacklisted:
for idx, plugin in enumerate(upgradable):
if n == plugin['name'] and t == plugin['type']:
upgradable.pop(idx)
diff --git a/module/plugins/addon/WindowsPhoneToastNotify.py b/module/plugins/addon/WindowsPhoneToastNotify.py
deleted file mode 100644
index 0ac6719e1..000000000
--- a/module/plugins/addon/WindowsPhoneToastNotify.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# -*- coding: utf-8 -*-
-
-import httplib
-import time
-
-from pyload.plugin.Addon import Addon
-
-
-class WindowsPhoneToastNotify(Addon):
- __name__ = "WindowsPhoneToastNotify"
- __type__ = "addon"
- __version__ = "0.03"
-
- __config__ = [("force" , "bool", "Force even if client is connected" , False),
- ("pushId" , "str" , "pushId" , "" ),
- ("pushUrl" , "str" , "pushUrl" , "" ),
- ("pushTimeout", "int" , "Timeout between notifications in seconds", 0 )]
-
- __description__ = """Send push notifications to Windows Phone"""
- __license__ = "GPLv3"
- __authors__ = [("Andy Voigt", "phone-support@hotmail.de")]
-
-
- def getXmlData(self):
- myxml = ("<?xml version='1.0' encoding='utf-8'?> <wp:Notification xmlns:wp='WPNotification'> "
- "<wp:Toast> <wp:Text1>Pyload Mobile</wp:Text1> <wp:Text2>Captcha waiting!</wp:Text2> "
- "</wp:Toast> </wp:Notification>")
- return myxml
-
-
- def doRequest(self):
- URL = self.getConfig("pushUrl")
- request = self.getXmlData()
- webservice = httplib.HTTP(URL)
- webservice.putrequest("POST", self.getConfig("pushId"))
- webservice.putheader("Host", URL)
- webservice.putheader("Content-type", "text/xml")
- webservice.putheader("X-NotificationClass", "2")
- webservice.putheader("X-WindowsPhone-Target", "toast")
- webservice.putheader("Content-length", "%d" % len(request))
- webservice.endheaders()
- webservice.send(request)
- webservice.close()
- self.setStorage("LAST_NOTIFY", time.time())
-
-
- def captchaTask(self, task):
- if not self.getConfig("pushId") or not self.getConfig("pushUrl"):
- return False
-
- if self.core.isClientConnected() and not self.getConfig("force"):
- return False
-
- if (time.time() - float(self.getStorage("LAST_NOTIFY", 0))) < self.getConf("pushTimeout"):
- return False
-
- self.doRequest()