summaryrefslogtreecommitdiffstats
path: root/pyload/plugins/hooks/UnSkipOnFail.py
diff options
context:
space:
mode:
authorGravatar Walter Purcaro <vuolter@gmail.com> 2014-09-28 21:19:03 +0200
committerGravatar Walter Purcaro <vuolter@gmail.com> 2014-09-28 21:19:03 +0200
commit46c6fc74a4e423927554f024b78dbbbf33e982cd (patch)
treee7ff9580bb5e80b91e2bd9609409116a19c5941e /pyload/plugins/hooks/UnSkipOnFail.py
parentApi : Add brackets and pipe to urlmatcher (diff)
parent[XFileSharingPro] Fixed typo (diff)
downloadpyload-46c6fc74a4e423927554f024b78dbbbf33e982cd.tar.xz
Merge branch 'stable' into 0.4.10
Conflicts: pyload/plugins/crypter/MultiuploadCom.py pyload/plugins/crypter/SerienjunkiesOrg.py pyload/plugins/hooks/ExternalScripts.py pyload/plugins/hooks/ExtractArchive.py pyload/plugins/hooks/MergeFiles.py pyload/plugins/hoster/CatShareNet.py pyload/plugins/hoster/FilezyNet.py pyload/plugins/hoster/IFileWs.py pyload/plugins/hoster/PremiumTo.py pyload/plugins/hoster/SpeedyshareCom.py pyload/plugins/hoster/UptoboxCom.py pyload/plugins/hoster/XFileSharingPro.py pyload/plugins/hoster/ZippyshareCom.py
Diffstat (limited to 'pyload/plugins/hooks/UnSkipOnFail.py')
-rw-r--r--pyload/plugins/hooks/UnSkipOnFail.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/pyload/plugins/hooks/UnSkipOnFail.py b/pyload/plugins/hooks/UnSkipOnFail.py
index 941ce4fc7..40b0233f5 100644
--- a/pyload/plugins/hooks/UnSkipOnFail.py
+++ b/pyload/plugins/hooks/UnSkipOnFail.py
@@ -22,14 +22,14 @@ class UnSkipOnFail(Hook):
def downloadFailed(self, pyfile):
pyfile_name = basename(pyfile.name)
pid = pyfile.package().id
- msg = 'look for skipped duplicates for %s (pid:%s)...'
+ msg = _('look for skipped duplicates for %s (pid:%s)')
self.logInfo(msg % (pyfile_name, pid))
dups = self.findDuplicates(pyfile)
for link in dups:
# check if link is "skipped"(=4)
if link.status == 4:
lpid = link.packageID
- self.logInfo('restart "%s" (pid:%s)...' % (pyfile_name, lpid))
+ self.logInfo(_('restart "%s" (pid:%s)') % (pyfile_name, lpid))
self.setLinkStatus(link, "queued")
def findDuplicates(self, pyfile):