summaryrefslogtreecommitdiffstats
path: root/pyload/plugin/addon/UnSkipOnFail.py
diff options
context:
space:
mode:
authorGravatar Walter Purcaro <vuolter@gmail.com> 2015-02-16 21:59:10 +0100
committerGravatar Walter Purcaro <vuolter@gmail.com> 2015-02-16 21:59:10 +0100
commit8e7d14bae4d3c836f029a1235eb227380acc3f75 (patch)
treeebd0679642cccb994e70a89a106b394189cb28bc /pyload/plugin/addon/UnSkipOnFail.py
parentMerge branch 'stable' into 0.4.10 (diff)
downloadpyload-8e7d14bae4d3c836f029a1235eb227380acc3f75.tar.xz
Fix plugins to work on 0.4.10
Diffstat (limited to 'pyload/plugin/addon/UnSkipOnFail.py')
-rw-r--r--pyload/plugin/addon/UnSkipOnFail.py90
1 files changed, 90 insertions, 0 deletions
diff --git a/pyload/plugin/addon/UnSkipOnFail.py b/pyload/plugin/addon/UnSkipOnFail.py
new file mode 100644
index 000000000..7d787d1ed
--- /dev/null
+++ b/pyload/plugin/addon/UnSkipOnFail.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+from pyload.datatype.File import PyFile
+from pyload.plugin.Addon import Addon
+
+
+class UnSkipOnFail(Addon):
+ __name__ = "UnSkipOnFail"
+ __type__ = "addon"
+ __version__ = "0.05"
+
+ __config__ = [("activated", "bool", "Activated", True)]
+
+ __description__ = """Queue skipped duplicates when download fails"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def downloadFailed(self, pyfile):
+ #: Check if pyfile is still "failed",
+ # maybe might has been restarted in meantime
+ if pyfile.status != 8:
+ return
+
+ msg = _("Looking for skipped duplicates of: %s (pid:%s)")
+ self.logInfo(msg % (pyfile.name, pyfile.package().id))
+
+ dup = self.findDuplicate(pyfile)
+ if dup:
+ self.logInfo(_("Queue found duplicate: %s (pid:%s)") % (dup.name, dup.packageID))
+
+ #: Change status of "link" to "new_status".
+ # "link" has to be a valid FileData object,
+ # "new_status" has to be a valid status name
+ # (i.e. "queued" for this Plugin)
+ # It creates a temporary PyFile object using
+ # "link" data, changes its status, and tells
+ # the core.files-manager to save its data.
+ pylink = _pyfile(link)
+
+ pylink.setCustomStatus("UnSkipOnFail", "queued")
+
+ self.core.files.save()
+ pylink.release()
+
+ else:
+ self.logInfo(_("No duplicates found"))
+
+
+ def findDuplicate(self, pyfile):
+ """ Search all packages for duplicate links to "pyfile".
+ Duplicates are links that would overwrite "pyfile".
+ To test on duplicity the package-folder and link-name
+ of twolinks are compared (link.name).
+ So this method returns a list of all links with equal
+ package-folders and filenames as "pyfile", but except
+ the data for "pyfile" iotselöf.
+ It does MOT check the link's status.
+ """
+ queue = self.core.api.getQueue() #: get packages (w/o files, as most file data is useless here)
+
+ for package in queue:
+ #: check if package-folder equals pyfile's package folder
+ if package.folder != pyfile.package().folder:
+ continue
+
+ #: now get packaged data w/ files/links
+ pdata = self.core.api.getPackageData(package.pid)
+ for link in pdata.links:
+ #: check if link is "skipped"
+ if link.status != 4:
+ continue
+
+ #: check if link name collides with pdata's name
+ #: AND at last check if it is not pyfile itself
+ if link.name == pyfile.name and link.fid != pyfile.id:
+ return link
+
+
+ def _pyfile(self, link):
+ return PyFile(self.core.files,
+ link.fid,
+ link.url,
+ link.name,
+ link.size,
+ link.status,
+ link.error,
+ link.plugin,
+ link.packageID,
+ link.order)