summaryrefslogtreecommitdiffstats
path: root/module/plugins/internal
diff options
context:
space:
mode:
Diffstat (limited to 'module/plugins/internal')
-rw-r--r--module/plugins/internal/Account.py76
-rw-r--r--module/plugins/internal/Addon.py197
-rw-r--r--module/plugins/internal/Base.py80
-rw-r--r--module/plugins/internal/Captcha.py41
-rw-r--r--module/plugins/internal/CaptchaService.py2
-rw-r--r--module/plugins/internal/Container.py56
-rw-r--r--module/plugins/internal/Crypter.py67
-rw-r--r--module/plugins/internal/DeadCrypter.py7
-rw-r--r--module/plugins/internal/DeadHoster.py7
-rw-r--r--module/plugins/internal/Extractor.py54
-rw-r--r--module/plugins/internal/Hoster.py350
-rw-r--r--module/plugins/internal/MultiAccount.py251
-rw-r--r--module/plugins/internal/MultiCrypter.py9
-rw-r--r--module/plugins/internal/MultiHoster.py8
-rw-r--r--module/plugins/internal/Notifier.py114
-rw-r--r--module/plugins/internal/OCR.py123
-rw-r--r--module/plugins/internal/Plugin.py189
-rw-r--r--module/plugins/internal/SevenZip.py53
-rw-r--r--module/plugins/internal/SimpleCrypter.py67
-rw-r--r--module/plugins/internal/SimpleHoster.py98
-rw-r--r--module/plugins/internal/UnRar.py72
-rw-r--r--module/plugins/internal/UnTar.py69
-rw-r--r--module/plugins/internal/UnZip.py62
-rw-r--r--module/plugins/internal/XFSAccount.py10
-rw-r--r--module/plugins/internal/XFSCrypter.py15
-rw-r--r--module/plugins/internal/XFSHoster.py15
-rw-r--r--module/plugins/internal/misc.py892
-rw-r--r--module/plugins/internal/utils.py482
28 files changed, 1858 insertions, 1608 deletions
diff --git a/module/plugins/internal/Account.py b/module/plugins/internal/Account.py
index ba8db0a6d..c63d182d9 100644
--- a/module/plugins/internal/Account.py
+++ b/module/plugins/internal/Account.py
@@ -6,7 +6,7 @@ import threading
import time
from module.plugins.internal.Plugin import Plugin, Skip
-from module.plugins.internal.utils import compare_time, isiterable, lock, parse_size, safe_format
+from module.plugins.internal.misc import Periodical, compare_time, decode, isiterable, lock, parse_size
class Account(Plugin):
@@ -23,8 +23,6 @@ class Account(Plugin):
LOGIN_TIMEOUT = 30 * 60 #: Relogin account every 30 minutes
TUNE_TIMEOUT = True #: Automatically tune relogin interval
- PERIODICAL_INTERVAL = None
-
def __init__(self, manager, accounts):
self._init(manager.core)
@@ -37,9 +35,9 @@ class Account(Plugin):
self.timeout = self.LOGIN_TIMEOUT
- #: Callback of periodical job task
- self.cb = None
- self.interval = None
+ #: Callback of periodical job task, used by HookManager
+ self.periodical = Periodical(self, self.periodical_task)
+ self.cb = self.periodical.cb #@TODO: Recheck in 0.4.10
self.init()
@@ -66,56 +64,30 @@ class Account(Plugin):
return bool(self.get_data('premium'))
- def setup(self):
- """
- Setup for enviroment and other things, called before logging (possibly more than one time)
- """
- pass
-
-
- def set_interval(self, value):
- newinterval = max(0, self.PERIODICAL_INTERVAL, value)
+ def _log(self, level, plugintype, pluginname, messages):
+ log = getattr(self.pyload.log, level)
+ msg = u" | ".join(decode(a).strip() for a in messages if a)
- if newinterval != value:
- return False
-
- if newinterval != self.interval:
- self.interval = newinterval
-
- return True
-
-
- def start_periodical(self, interval=None, threaded=False, delay=None):
- if interval is not None and self.set_interval(interval) is False:
- return False
- else:
- self.cb = self.pyload.scheduler.addJob(max(1, delay), self._periodical, [threaded], threaded=threaded)
- return True
-
-
- def restart_periodical(self, *args, **kwargs):
- self.stop_periodical()
- return self.start_periodical(*args, **kwargs)
-
-
- def stop_periodical(self):
+ #: Hide any password
try:
- return self.pyload.scheduler.removeJob(self.cb)
- finally:
- self.cb = None
-
+ msg = msg.replace(self.info['login']['password'], "**********")
+ except Exception:
+ pass
- def _periodical(self, threaded):
- try:
- self.periodical()
+ log("%(plugintype)s %(pluginname)s: %(msg)s" %
+ {'plugintype': plugintype.upper(),
+ 'pluginname': pluginname,
+ 'msg' : msg})
- except Exception, e:
- self.log_error(_("Error performing periodical task"), e)
- self.restart_periodical(threaded=threaded, delay=self.interval)
+ def setup(self):
+ """
+ Setup for enviroment and other things, called before logging (possibly more than one time)
+ """
+ pass
- def periodical(self):
+ def periodical_task(self):
raise NotImplementedError
@@ -209,7 +181,7 @@ class Account(Plugin):
self.sync()
clear = lambda x: {} if isinstance(x, dict) else [] if isiterable(x) else None
- self.info['data'] = dict((k, clear(v)) for k, v in self.info['data'].iteritems())
+ self.info['data'] = dict((k, clear(v)) for k, v in self.info['data'].items())
self.info['data']['options'] = {'limitdl': ['0']}
self.syncback()
@@ -237,7 +209,7 @@ class Account(Plugin):
self.syncback()
- self.log_debug("Account info for user `%s`: %s" % (self.user, safe_format(self.info, self.info['login']['password'])))
+ self.log_debug("Account info for user `%s`: %s" % (self.user, self.info))
return self.info
@@ -452,7 +424,7 @@ class Account(Plugin):
###########################################################################
- def parse_traffic(self, size, unit=None): #@NOTE: Returns kilobytes in 0.4.9
+ def parse_traffic(self, size, unit=None): #@NOTE: Returns kilobytes only in 0.4.9
self.log_debug("Size: %s" % size,
"Unit: %s" % (unit or "N/D"))
return parse_size(size, unit or "byte") / 1024 #@TODO: Remove `/ 1024` in 0.4.10
diff --git a/module/plugins/internal/Addon.py b/module/plugins/internal/Addon.py
index ee0febffc..183186fd2 100644
--- a/module/plugins/internal/Addon.py
+++ b/module/plugins/internal/Addon.py
@@ -1,15 +1,9 @@
# -*- coding: utf-8 -*-
-from module.plugins.internal.Plugin import Plugin
-
+import threading
-class Expose(object):
- """
- Used for decoration to declare rpc services
- """
- def __new__(cls, f, *args, **kwargs):
- hookManager.addRPC(f.__module__, f.func_name, f.func_doc)
- return f
+from module.plugins.internal.Plugin import Plugin
+from module.plugins.internal.misc import Periodical, isiterable
def threaded(fn):
@@ -19,10 +13,19 @@ def threaded(fn):
return run
+class Expose(object):
+ """
+ Used for decoration to declare rpc services
+ """
+ def __new__(cls, fn, *args, **kwargs):
+ hookManager.addRPC(fn.__module__, fn.func_name, fn.func_doc)
+ return fn
+
+
class Addon(Plugin):
__name__ = "Addon"
__type__ = "hook" #@TODO: Change to `addon` in 0.4.10
- __version__ = "0.14"
+ __version__ = "0.50"
__status__ = "stable"
__threaded__ = [] #@TODO: Remove in 0.4.10
@@ -32,29 +35,24 @@ class Addon(Plugin):
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- PERIODICAL_INTERVAL = None
-
-
def __init__(self, core, manager):
self._init(core)
#: `HookManager`
self.manager = manager
+ self.lock = threading.Lock()
#: Automatically register event listeners for functions, attribute will be deleted dont use it yourself
self.event_map = {}
- #: Deprecated alternative to event_map
- #: List of events the plugin can handle, name the functions exactly like eventname.
- self.event_list = [] #@NOTE: dont make duplicate entries in event_map
-
self.info['ip'] = None #@TODO: Remove in 0.4.10
#: Callback of periodical job task, used by HookManager
- self.cb = None
- self.interval = None
+ self.periodical = Periodical(self, self.periodical_task)
+ self.cb = self.periodical.cb #@TODO: Recheck in 0.4.10
self.init()
+ self._init_events() #@TODO: Remove in 0.4.10
self.init_events()
@@ -63,7 +61,7 @@ class Addon(Plugin):
"""
Checks if addon is activated
"""
- return self.get_config("activated")
+ return self.config.get("activated")
#@TODO: Remove in 0.4.10
@@ -72,10 +70,25 @@ class Addon(Plugin):
return super(Addon, self)._log(level, plugintype, pluginname, messages)
+ #@TODO: Remove in 0.4.10
+ def _init_events(self):
+ event_map = {'allDownloadsFinished' : "all_downloads_finished" ,
+ 'allDownloadsProcessed': "all_downloads_processed",
+ 'configChanged' : "config_changed" ,
+ 'download_processed' : "download_processed" ,
+ 'download_start' : "download_start" ,
+ 'linksAdded' : "links_added" ,
+ 'packageDeleted' : "package_deleted" ,
+ 'package_failed' : "package_failed" ,
+ 'package_processed' : "package_processed" }
+ for event, funcs in event_map.items():
+ self.manager.addEvent(event, getattr(self, funcs))
+
+
def init_events(self):
if self.event_map:
for event, funcs in self.event_map.items():
- if type(funcs) in (list, tuple):
+ if isiterable(funcs):
for f in funcs:
self.manager.addEvent(event, getattr(self, f))
else:
@@ -84,63 +97,13 @@ class Addon(Plugin):
#: Delete for various reasons
self.event_map = None
- if self.event_list:
- self.log_debug("Deprecated method `event_list`, use `event_map` instead")
-
- for f in self.event_list:
- self.manager.addEvent(f, getattr(self, f))
-
- self.event_list = None
-
-
- def set_interval(self, value):
- newinterval = max(0, self.PERIODICAL_INTERVAL, value)
-
- if newinterval != value:
- return False
-
- if newinterval != self.interval:
- self.interval = newinterval
-
- return True
-
-
- def start_periodical(self, interval=None, threaded=False, delay=None):
- if interval is not None and self.set_interval(interval) is False:
- return False
- else:
- self.cb = self.pyload.scheduler.addJob(max(1, delay), self._periodical, [threaded], threaded=threaded)
- return True
-
-
- def restart_periodical(self, *args, **kwargs):
- self.stop_periodical()
- return self.start_periodical(*args, **kwargs)
-
-
- def stop_periodical(self):
- try:
- return self.pyload.scheduler.removeJob(self.cb)
- finally:
- self.cb = None
-
- def _periodical(self, threaded):
- try:
- self.periodical()
-
- except Exception, e:
- self.log_error(_("Error performing periodical task"), e)
-
- self.restart_periodical(threaded=threaded, delay=self.interval)
-
-
- def periodical(self):
+ def periodical_task(self):
raise NotImplementedError
#: Deprecated method, use `activated` property instead (Remove in 0.4.10)
- def isActivated(self, *args, **kwargs):
+ def isActivated(self):
return self.activated
@@ -152,9 +115,9 @@ class Addon(Plugin):
#: Deprecated method, use `deactivate` instead (Remove in 0.4.10)
- def unload(self, *args, **kwargs):
- self.store("info", self.info)
- return self.deactivate(*args, **kwargs)
+ def unload(self):
+ self.db.store("info", self.info)
+ return self.deactivate()
def activate(self):
@@ -165,13 +128,9 @@ class Addon(Plugin):
#: Deprecated method, use `activate` instead (Remove in 0.4.10)
- def coreReady(self, *args, **kwargs):
- self.retrieve("info", self.info)
-
- if self.PERIODICAL_INTERVAL:
- self.start_periodical(self.PERIODICAL_INTERVAL, delay=5)
-
- return self.activate(*args, **kwargs)
+ def coreReady(self):
+ self.db.retrieve("info", self.info)
+ return self.activate()
def exit(self):
@@ -182,9 +141,25 @@ class Addon(Plugin):
#: Deprecated method, use `exit` instead (Remove in 0.4.10)
- def coreExiting(self, *args, **kwargs):
- self.unload(*args, **kwargs) #@TODO: Fix in 0.4.10
- return self.exit(*args, **kwargs)
+ def coreExiting(self):
+ self.unload() #@TODO: Fix in 0.4.10
+ return self.exit()
+
+
+ def config_changed(self, category, option, value, section):
+ pass
+
+
+ def all_downloads_finished(self):
+ pass
+
+
+ def all_downloads_processed(self):
+ pass
+
+
+ def links_added(self, urls, pypack):
+ pass
def download_preparing(self, pyfile):
@@ -197,13 +172,22 @@ class Addon(Plugin):
return self.download_preparing(pyfile)
+ def download_start(self, pyfile, url, filename):
+ pass
+
+
+ def download_processed(self, pyfile):
+ pass
+
+
def download_finished(self, pyfile):
pass
#: Deprecated method, use `download_finished` instead (Remove in 0.4.10)
- def downloadFinished(self, *args, **kwargs):
- return self.download_finished(*args, **kwargs)
+ def downloadFinished(self, pyfile):
+ if pyfile.hasStatus("finished"): #: Check if still "finished" (Fix in 0.4.10)
+ return self.download_finished(pyfile)
def download_failed(self, pyfile):
@@ -211,8 +195,21 @@ class Addon(Plugin):
#: Deprecated method, use `download_failed` instead (Remove in 0.4.10)
- def downloadFailed(self, *args, **kwargs):
- return self.download_failed(*args, **kwargs)
+ def downloadFailed(self, pyfile):
+ if pyfile.hasStatus("failed"): #: Check if still "failed" (Fix in 0.4.10)
+ return self.download_failed(pyfile)
+
+
+ def package_processed(self, pypack):
+ pass
+
+
+ def package_deleted(self, pid):
+ pass
+
+
+ def package_failed(self, pypack):
+ pass
def package_finished(self, pypack):
@@ -220,8 +217,8 @@ class Addon(Plugin):
#: Deprecated method, use `package_finished` instead (Remove in 0.4.10)
- def packageFinished(self, *args, **kwargs):
- return self.package_finished(*args, **kwargs)
+ def packageFinished(self, pypack):
+ return self.package_finished(pypack)
def before_reconnect(self, ip):
@@ -229,8 +226,8 @@ class Addon(Plugin):
#: Deprecated method, use `before_reconnect` instead (Remove in 0.4.10)
- def beforeReconnecting(self, *args, **kwargs):
- return self.before_reconnect(*args, **kwargs)
+ def beforeReconnecting(self, ip):
+ return self.before_reconnect(ip)
def after_reconnect(self, ip, oldip):
@@ -251,8 +248,8 @@ class Addon(Plugin):
#: Deprecated method, use `captcha_task` instead (Remove in 0.4.10)
- def newCaptchaTask(self, *args, **kwargs):
- return self.captcha_task(*args, **kwargs)
+ def newCaptchaTask(self, task):
+ return self.captcha_task(task)
def captcha_correct(self, task):
@@ -260,8 +257,8 @@ class Addon(Plugin):
#: Deprecated method, use `captcha_correct` instead (Remove in 0.4.10)
- def captchaCorrect(self, *args, **kwargs):
- return self.captcha_correct(*args, **kwargs)
+ def captchaCorrect(self, task):
+ return self.captcha_correct(task)
def captcha_invalid(self, task):
@@ -269,5 +266,5 @@ class Addon(Plugin):
#: Deprecated method, use `captcha_invalid` instead (Remove in 0.4.10)
- def captchaInvalid(self, *args, **kwargs):
- return self.captcha_invalid(*args, **kwargs)
+ def captchaInvalid(self, task):
+ return self.captcha_invalid(task)
diff --git a/module/plugins/internal/Base.py b/module/plugins/internal/Base.py
index e28121ce2..47111ca8e 100644
--- a/module/plugins/internal/Base.py
+++ b/module/plugins/internal/Base.py
@@ -8,11 +8,11 @@ import urlparse
from module.plugins.internal.Captcha import Captcha
from module.plugins.internal.Plugin import Plugin, Abort, Fail, Reconnect, Retry, Skip
-from module.plugins.internal.utils import (decode, encode, fixurl, format_size, format_time,
- parse_html_form, parse_name, replace_patterns)
+from module.plugins.internal.misc import (decode, encode, fixurl, format_size, format_time,
+ parse_html_form, parse_name, replace_patterns)
-#@TODO: Remove in 0.4.10
+#@TODO: Recheck in 0.4.10
def getInfo(urls):
#: result = [ .. (name, size, status, url) .. ]
pass
@@ -24,19 +24,10 @@ def parse_fileInfo(klass, url="", html=""):
return encode(info['name']), info['size'], info['status'], info['url']
-#@TODO: Remove in 0.4.10
-def create_getInfo(klass):
- def get_info(urls):
- for url in urls:
- yield parse_fileInfo(klass, url)
-
- return get_info
-
-
class Base(Plugin):
__name__ = "Base"
__type__ = "base"
- __version__ = "0.19"
+ __version__ = "0.20"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
@@ -55,6 +46,7 @@ class Base(Plugin):
def get_info(cls, url="", html=""):
url = fixurl(url, unquote=True)
info = {'name' : parse_name(url),
+ 'hash' : {},
'pattern': {},
'size' : 0,
'status' : 3 if url else 8,
@@ -72,9 +64,6 @@ class Base(Plugin):
def __init__(self, pyfile):
self._init(pyfile.m.core)
- #:
- self.premium = None
-
#: Engage wan reconnection
self.wantReconnect = False #@TODO: Change to `want_reconnect` in 0.4.10
@@ -82,22 +71,25 @@ class Base(Plugin):
self.multiDL = True #@TODO: Change to `multi_dl` in 0.4.10
#: time.time() + wait in seconds
- self.waiting = False
+ self.waiting = False
#: Account handler instance, see :py:class:`Account`
self.account = None
self.user = None #@TODO: Remove in 0.4.10
+ self.premium = None
#: Associated pyfile instance, see `PyFile`
self.pyfile = pyfile
- self.thread = None #: Holds thread in future
+ #: Holds thread in future
+ self.thread = None
#: Js engine, see `JsEngine`
self.js = self.pyload.js
#: Captcha stuff
- self.captcha = Captcha(self)
+ _Captcha = self.pyload.pluginManager.loadClass("captcha", self.classname) or Captcha
+ self.captcha = _Captcha(pyfile)
#: Some plugins store html code here
self.data = ""
@@ -112,6 +104,13 @@ class Base(Plugin):
def _log(self, level, plugintype, pluginname, messages):
log = getattr(self.pyload.log, level)
msg = u" | ".join(decode(a).strip() for a in messages if a)
+
+ #: Hide any password
+ try:
+ msg = msg.replace(self.account.info['login']['password'], "**********")
+ except Exception:
+ pass
+
log("%(plugintype)s %(pluginname)s[%(id)s]: %(msg)s" %
{'plugintype': plugintype.upper(),
'pluginname': pluginname,
@@ -136,11 +135,12 @@ class Base(Plugin):
def _setup(self):
#@TODO: Remove in 0.4.10
- self.data = ""
self.pyfile.error = ""
- self.last_html = None
+ self.data = ""
+ self.last_html = ""
+ self.last_header = {}
- if self.get_config('use_premium', True):
+ if self.config.get('use_premium', True):
self.load_account() #@TODO: Move to PluginThread in 0.4.10
else:
self.account = False
@@ -193,7 +193,7 @@ class Base(Plugin):
size = self.info.get('size')
if size > 0:
- self.pyfile.size = int(self.info['size']) #@TODO: Fix int conversion in 0.4.10
+ self.pyfile.size = int(self.info.get('size')) #@TODO: Fix int conversion in 0.4.10
else:
size = self.pyfile.size
@@ -249,24 +249,29 @@ class Base(Plugin):
self.abort()
- def _process(self, thread):
- """
- Handles important things to do before starting
- """
+ def _initialize(self):
self.log_debug("Plugin version: " + self.__version__)
self.log_debug("Plugin status: " + self.__status__)
if self.__status__ is "broken":
- self.fail(_("Plugin is temporarily unavailable"))
+ self.abort(_("Plugin is temporarily unavailable"))
elif self.__status__ is "testing":
self.log_warning(_("Plugin may be unstable"))
+
+ def _process(self, thread):
+ """
+ Handles important things to do before starting
+ """
self.thread = thread
+
+ self._initialize()
self._setup()
- # self.pyload.hookManager.downloadPreparing(self.pyfile) #@TODO: Recheck in 0.4.10
- self.check_status()
+ #@TODO: Enable in 0.4.10
+ # self.pyload.hookManager.downloadPreparing(self.pyfile)
+ # self.check_status()
self.pyfile.setStatus("starting")
@@ -277,6 +282,7 @@ class Base(Plugin):
#: Deprecated method, use `_process` instead (Remove in 0.4.10)
def preprocessing(self, *args, **kwargs):
+ time.sleep(1) #@NOTE: Recheck info thread synchronization in 0.4.10
return self._process(*args, **kwargs)
@@ -320,15 +326,13 @@ class Base(Plugin):
"""
Waits the time previously set
"""
- pyfile = self.pyfile
-
if seconds is not None:
self.set_wait(seconds)
if reconnect is not None:
self.set_reconnect(reconnect)
- wait_time = pyfile.waitUntil - time.time()
+ wait_time = self.pyfile.waitUntil - time.time()
if wait_time < 1:
self.log_warning(_("Invalid wait time interval"))
@@ -336,8 +340,8 @@ class Base(Plugin):
self.waiting = True
- status = pyfile.status #@NOTE: Recheck in 0.4.10
- pyfile.setStatus("waiting")
+ status = self.pyfile.status #@NOTE: Recheck in 0.4.10
+ self.pyfile.setStatus("waiting")
self.log_info(_("Waiting %s...") % format_time(wait_time))
@@ -347,12 +351,12 @@ class Base(Plugin):
self.log_warning(_("Reconnection ignored due logged account"))
if not self.wantReconnect or self.account:
- while pyfile.waitUntil > time.time():
+ while self.pyfile.waitUntil > time.time():
self.check_status()
time.sleep(2)
else:
- while pyfile.waitUntil > time.time():
+ while self.pyfile.waitUntil > time.time():
self.check_status()
self.thread.m.reconnecting.wait(1)
@@ -366,7 +370,7 @@ class Base(Plugin):
time.sleep(2)
self.waiting = False
- pyfile.status = status #@NOTE: Recheck in 0.4.10
+ self.pyfile.status = status #@NOTE: Recheck in 0.4.10
def skip(self, msg=""):
diff --git a/module/plugins/internal/Captcha.py b/module/plugins/internal/Captcha.py
index d30271dd4..fe0830693 100644
--- a/module/plugins/internal/Captcha.py
+++ b/module/plugins/internal/Captcha.py
@@ -6,13 +6,13 @@ import os
import time
from module.plugins.internal.Plugin import Plugin
-from module.plugins.internal.utils import encode
+from module.plugins.internal.misc import encode
class Captcha(Plugin):
__name__ = "Captcha"
__type__ = "captcha"
- __version__ = "0.47"
+ __version__ = "0.48"
__status__ = "stable"
__description__ = """Base anti-captcha plugin"""
@@ -20,10 +20,10 @@ class Captcha(Plugin):
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- def __init__(self, plugin): #@TODO: Pass pyfile instead plugin, so store plugin's html in its associated pyfile as data
- self._init(plugin.pyload)
+ def __init__(self, pyfile):
+ self._init(pyfile.m.core)
- self.plugin = plugin
+ self.pyfile = pyfile
self.task = None #: captchaManager task
self.init()
@@ -31,28 +31,27 @@ class Captcha(Plugin):
def _log(self, level, plugintype, pluginname, messages):
messages = (self.__name__,) + messages
- return self.plugin._log(level, plugintype, self.plugin.__name__, messages)
+ return self.pyfile.plugin._log(level, plugintype, self.pyfile.plugin.__name__, messages)
def recognize(self, image):
"""
Extend to build your custom anti-captcha ocr
"""
- self.log_debug("This function does nothing")
pass
def decrypt(self, url, get={}, post={}, ref=False, cookies=True, decode=False, req=None,
input_type='jpg', output_type='textual', ocr=True, timeout=120):
- img = self.load(url, get=get, post=post, ref=ref, cookies=cookies, decode=decode, req=req or self.plugin.req)
+ img = self.load(url, get=get, post=post, ref=ref, cookies=cookies, decode=decode, req=req or self.pyfile.plugin.req)
return self.decrypt_image(img, input_type, output_type, ocr, timeout)
- def decrypt_image(self, data, input_type='jpg', output_type='textual', ocr=False, timeout=120):
+ def decrypt_image(self, img, input_type='jpg', output_type='textual', ocr=False, timeout=120):
"""
Loads a captcha and decrypts it with ocr, plugin, user input
- :param data: image raw data
+ :param img: image raw data
:param get: get part for request
:param post: post part for request
:param cookies: True if cookies should be enabled
@@ -67,27 +66,27 @@ class Captcha(Plugin):
result = ""
time_ref = ("%.2f" % time.time())[-6:].replace(".", "")
- with open(os.path.join("tmp", "captcha_image_%s_%s.%s" % (self.plugin.__name__, time_ref, input_type)), "wb") as tmp_img:
- tmp_img.write(encode(data))
+ with open(os.path.join("tmp", "captcha_image_%s_%s.%s" % (self.pyfile.plugin.__name__, time_ref, input_type)), "wb") as img_f:
+ img_f.write(encode(img))
if ocr:
if isinstance(ocr, basestring):
- OCR = self.pyload.pluginManager.loadClass("captcha", ocr) #: Rename `captcha` to `ocr` in 0.4.10
- result = OCR(self.plugin).recognize(tmp_img.name)
+ _OCR = self.pyload.pluginManager.loadClass("captcha", ocr) #: Rename `captcha` to `ocr` in 0.4.10
+ result = _OCR(self.pyfile).recognize(img_f.name)
else:
- result = self.recognize(tmp_img.name)
+ result = self.recognize(img_f.name)
if not result:
captchaManager = self.pyload.captchaManager
try:
- self.task = captchaManager.newTask(data, input_type, tmp_img.name, output_type)
+ self.task = captchaManager.newTask(img, input_type, img_f.name, output_type)
captchaManager.handleCaptcha(self.task)
self.task.setWaiting(max(timeout, 50)) #@TODO: Move to `CaptchaManager` in 0.4.10
while self.task.isWaiting():
- self.plugin.check_status()
+ self.pyfile.plugin.check_status()
time.sleep(1)
finally:
@@ -97,16 +96,12 @@ class Captcha(Plugin):
self.fail(self.task.error)
elif not self.task.result:
- self.plugin.retry_captcha(msg=_("No captcha result obtained in appropriate time"))
+ self.pyfile.plugin.retry_captcha(msg=_("No captcha result obtained in appropriate time"))
result = self.task.result
if not self.pyload.debug:
- try:
- os.remove(tmp_img.name)
-
- except OSError, e:
- self.log_warning(_("Error removing `%s`") % tmp_img.name, e)
+ self.remove(img_f.name, trash=False)
# self.log_info(_("Captcha result: ") + result) #@TODO: Remove from here?
diff --git a/module/plugins/internal/CaptchaService.py b/module/plugins/internal/CaptchaService.py
index 397f4f750..6f08d4498 100644
--- a/module/plugins/internal/CaptchaService.py
+++ b/module/plugins/internal/CaptchaService.py
@@ -27,7 +27,7 @@ class CaptchaService(Captcha):
def retrieve_data(self):
- return self.plugin.data or self.plugin.last_html or ""
+ return self.pyfile.plugin.data or self.pyfile.plugin.last_html or ""
def detect_key(self, data=None):
diff --git a/module/plugins/internal/Container.py b/module/plugins/internal/Container.py
index cff4ac4ec..db14a286e 100644
--- a/module/plugins/internal/Container.py
+++ b/module/plugins/internal/Container.py
@@ -4,21 +4,22 @@ from __future__ import with_statement
import os
import re
+import urlparse
from module.plugins.internal.Crypter import Crypter
-from module.plugins.internal.utils import encode, exists, fs_join
+from module.plugins.internal.misc import encode, exists
class Container(Crypter):
__name__ = "Container"
__type__ = "container"
- __version__ = "0.09"
+ __version__ = "0.10"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
- __config__ = [("activated" , "bool", "Activated" , True),
- ("use_subfolder" , "bool", "Save package to subfolder" , True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("use_premium" , "bool" , "Use premium account if available", True ),
+ ("folder_per_package", "Default;Yes;No", "Create folder for each package" , "Default")]
__description__ = """Base container decrypter plugin"""
__license__ = "GPLv3"
@@ -30,30 +31,39 @@ class Container(Crypter):
"""
Main method
"""
- self._load2disk()
+ self._make_tmpfile()
self.decrypt(pyfile)
- self.delete_tmp()
-
if self.links:
self._generate_packages()
elif not self.packages:
self.error(_("No link grabbed"), "decrypt")
+ self._delete_tmpfile()
+
self._create_packages()
- def _load2disk(self):
+ def _delete_tmpfile(self):
+ if self.pyfile.name.startswith("tmp_"):
+ self.remove(pyfile.url, trash=False)
+
+
+ def _make_tmpfile(self):
"""
Loads container to disk if its stored remotely and overwrite url,
or check existent on several places at disk
"""
- if self.pyfile.url.startswith("http"):
- self.pyfile.name = re.findall("([^\/=]+)", self.pyfile.url)[-1]
+ remote = bool(urlparse.urlparse(self.pyfile.url).netloc)
+
+ if remote:
content = self.load(self.pyfile.url)
- self.pyfile.url = fs_join(self.pyload.config.get("general", "download_folder"), self.pyfile.name)
+
+ self.pyfile.name = "tmp_" + self.pyfile.name
+ self.pyfile.url = os.path.join(self.pyload.config.get("general", "download_folder"), self.pyfile.name)
+
try:
with open(self.pyfile.url, "wb") as f:
f.write(encode(content))
@@ -61,23 +71,5 @@ class Container(Crypter):
except IOError, e:
self.fail(e)
- else:
- self.pyfile.name = os.path.basename(self.pyfile.url)
-
- if not exists(self.pyfile.url):
- if exists(fs_join(pypath, self.pyfile.url)):
- self.pyfile.url = fs_join(pypath, self.pyfile.url)
- else:
- self.fail(_("File not exists"))
- else:
- self.data = self.pyfile.url #@NOTE: ???
-
-
- def delete_tmp(self):
- if not self.pyfile.name.startswith("tmp_"):
- return
-
- try:
- os.remove(self.pyfile.url)
- except OSError, e:
- self.log_warning(_("Error removing `%s`") % self.pyfile.url, e)
+ elif not exists(self.pyfile.url):
+ self.fail(_("File not found"))
diff --git a/module/plugins/internal/Crypter.py b/module/plugins/internal/Crypter.py
index 157ce5dab..596bbcfb4 100644
--- a/module/plugins/internal/Crypter.py
+++ b/module/plugins/internal/Crypter.py
@@ -1,20 +1,19 @@
# -*- coding: utf-8 -*-
-from module.plugins.internal.Base import Base, create_getInfo, parse_fileInfo
-from module.plugins.internal.utils import fixname, parse_name
+from module.plugins.internal.Base import Base
+from module.plugins.internal.misc import parse_name, safename
class Crypter(Base):
__name__ = "Crypter"
__type__ = "crypter"
- __version__ = "0.14"
+ __version__ = "0.16"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
- __config__ = [("activated" , "bool", "Activated" , True),
- ("use_premium" , "bool", "Use premium account if available" , True),
- ("use_subfolder" , "bool", "Save package to subfolder" , True), #: Overrides pyload.config.get("general", "folder_per_package")
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("use_premium" , "bool" , "Use premium account if available", True ),
+ ("folder_per_package", "Default;Yes;No", "Create folder for each package" , "Default")]
__description__ = """Base decrypter plugin"""
__license__ = "GPLv3"
@@ -23,18 +22,15 @@ class Crypter(Base):
def init_base(self):
self.packages = [] #: Put all packages here. It's a list of tuples like: ( name, [list of links], folder )
- self.links = [] #: List of urls, pyLoad will generate packagenames
+ self.links = [] #: List of urls, pyLoad will generate packagenames
def setup_base(self):
self.packages = []
- self.links = []
+ self.links = []
def process(self, pyfile):
- """
- Main method
- """
self.decrypt(pyfile)
if self.links:
@@ -57,7 +53,8 @@ class Crypter(Base):
"""
Generate new packages from self.links
"""
- packages = [(name, links, None) for name, links in self.pyload.api.generatePackages(self.links).items()]
+ pdict = self.pyload.api.generatePackages(self.links)
+ packages = [(name, links, parse_name(name)) for name, links in pdict.items()]
self.packages.extend(packages)
@@ -65,41 +62,37 @@ class Crypter(Base):
"""
Create new packages from self.packages
"""
- package_folder = self.pyfile.package().folder
- package_password = self.pyfile.package().password
- package_queue = self.pyfile.package().queue
+ pack_folder = self.pyfile.package().folder
+ pack_password = self.pyfile.package().password
+ pack_queue = self.pyfile.package().queue
+
+ folder_per_package = self.config.get('folder_per_package', "Default")
- folder_per_package = self.pyload.config.get("general", "folder_per_package")
- use_subfolder = self.get_config('use_subfolder', folder_per_package)
- subfolder_per_package = self.get_config('subfolder_per_package', True)
+ if folder_per_package is "Default":
+ folder_per_package = self.pyload.config.get("general", "folder_per_package")
for name, links, folder in self.packages:
- self.log_info(_("Parsed package: %s") % name,
- _("Found %d links") % len(links),
- _("Saved to folder: %s") % folder if folder else _("Saved to default download folder"))
+ self.log_info(_("Create package: %s") % name,
+ _("%d links") % len(links))
links = map(self.fixurl, links)
self.log_debug("LINKS for package " + name, *links)
- pid = self.pyload.api.addPackage(name, links, package_queue)
+ pid = self.pyload.api.addPackage(name, links, pack_queue)
- if package_password:
- self.pyload.api.setPackageData(pid, {'password': package_password})
+ if pack_password:
+ self.pyload.api.setPackageData(pid, {'password': pack_password})
#: Workaround to do not break API addPackage method
- set_folder = lambda x="": self.pyload.api.setPackageData(pid, {'folder': fixname(x)})
+ set_folder = lambda x: self.pyload.api.setPackageData(pid, {'folder': safename(x or "")})
- if use_subfolder:
- if not subfolder_per_package:
- set_folder(package_folder)
- self.log_debug("Set package %(name)s folder to: %(folder)s" % {'name': name, 'folder': folder})
+ if not folder_per_package:
+ folder = pack_folder
- elif not folder_per_package or name is not folder:
- if not folder:
- folder = parse_name(name)
+ elif not folder or folder == name:
+ folder = parse_name(name)
- set_folder(folder)
- self.log_debug("Set package %(name)s folder to: %(folder)s" % {'name': name, 'folder': folder})
+ self.log_info(_("Save package `%(name)s` to folder: %(folder)s")
+ % {'name': name, 'folder': folder})
- elif folder_per_package:
- set_folder()
+ set_folder(folder)
diff --git a/module/plugins/internal/DeadCrypter.py b/module/plugins/internal/DeadCrypter.py
index 5618667ba..0ad81db6c 100644
--- a/module/plugins/internal/DeadCrypter.py
+++ b/module/plugins/internal/DeadCrypter.py
@@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
-from module.plugins.internal.Crypter import Crypter, create_getInfo
+from module.plugins.internal.Crypter import Crypter
class DeadCrypter(Crypter):
__name__ = "DeadCrypter"
__type__ = "crypter"
- __version__ = "0.10"
+ __version__ = "0.11"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
@@ -26,6 +26,3 @@ class DeadCrypter(Crypter):
def setup(self):
self.offline(_("Crypter is no longer available"))
-
-
-getInfo = create_getInfo(DeadCrypter)
diff --git a/module/plugins/internal/DeadHoster.py b/module/plugins/internal/DeadHoster.py
index 329f2fdea..430c5eab0 100644
--- a/module/plugins/internal/DeadHoster.py
+++ b/module/plugins/internal/DeadHoster.py
@@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
-from module.plugins.internal.Hoster import Hoster, create_getInfo
+from module.plugins.internal.Hoster import Hoster
class DeadHoster(Hoster):
__name__ = "DeadHoster"
__type__ = "hoster"
- __version__ = "0.20"
+ __version__ = "0.21"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
@@ -26,6 +26,3 @@ class DeadHoster(Hoster):
def setup(self):
self.offline(_("Hoster is no longer available"))
-
-
-getInfo = create_getInfo(DeadHoster)
diff --git a/module/plugins/internal/Extractor.py b/module/plugins/internal/Extractor.py
index 41ba4d429..1d035f3e6 100644
--- a/module/plugins/internal/Extractor.py
+++ b/module/plugins/internal/Extractor.py
@@ -5,7 +5,7 @@ import re
from module.PyFile import PyFile
from module.plugins.internal.Plugin import Plugin
-from module.plugins.internal.utils import encode
+from module.plugins.internal.misc import encode
class ArchiveError(Exception):
@@ -40,7 +40,7 @@ class Extractor(Plugin):
@classmethod
def isarchive(cls, filename):
name = os.path.basename(filename).lower()
- return any(name.endswith(ext) for ext in cls.EXTENSIONS)
+ return any(name.endswith('.' + ext) for ext in cls.EXTENSIONS)
@classmethod
@@ -63,43 +63,48 @@ class Extractor(Plugin):
:param files_ids: List of filepathes
:return: List of targets, id tuple list
"""
- targets = []
+ targets = []
processed = []
- for fname, id, fout in files_ids:
- if cls.isarchive(fname):
- pname = re.sub(cls.re_multipart, "", fname) if cls.ismultipart(fname) else os.path.splitext(fname)[0]
- if pname not in processed:
- processed.append(pname)
- targets.append((fname, id, fout))
+ for id, fname, fout in files_ids:
+ if not cls.isarchive(fname):
+ continue
+
+ if cls.ismultipart(fname):
+ pname = re.sub(cls._RE_PART, "", fname)
+ else:
+ pname = os.path.splitext(fname)[0]
+
+ if pname in processed:
+ continue
+
+ processed.append(pname)
+ targets.append((id, fname, fout))
return targets
- def __init__(self, plugin, filename, out,
+ def __init__(self, pyfile, filename, out,
fullpath=True,
overwrite=False,
excludefiles=[],
priority=0,
- keepbroken=False,
- fid=None):
+ keepbroken=False):
"""
Initialize extractor for specific file
"""
- self._init(plugin.pyload)
+ self._init(pyfile.m.core)
- self.plugin = plugin
+ self.pyfile = pyfile
self.filename = filename
+ self.name = os.path.basename(filename)
self.out = out
self.fullpath = fullpath
self.overwrite = overwrite
self.excludefiles = excludefiles
self.priority = priority
self.keepbroken = keepbroken
- self.files = [] #: Store extracted files here
-
- pyfile = self.pyload.files.getFile(fid) if fid else None
- self.notify_progress = lambda x: pyfile.setProgress(x) if pyfile else lambda x: None
+ self.progress = lambda x: pyfile.setProgress(int(x))
self.init()
@@ -109,9 +114,14 @@ class Extractor(Plugin):
return encode(self.filename)
+ @property
+ def dest(self):
+ return encode(self.out)
+
+
def _log(self, level, plugintype, pluginname, messages):
messages = (self.__name__,) + messages
- return self.plugin._log(level, plugintype, self.plugin.__name__, messages)
+ return self.pyfile.plugin._log(level, plugintype, self.pyfile.plugin.__name__, messages)
def verify(self, password=None):
@@ -134,7 +144,7 @@ class Extractor(Plugin):
raise NotImplementedError
- def items(self):
+ def chunks(self):
"""
Return list of archive parts
"""
@@ -143,6 +153,6 @@ class Extractor(Plugin):
def list(self, password=None):
"""
- Populate self.files at some point while extracting
+ Return list of archive files
"""
- return self.files
+ raise NotImplementedError
diff --git a/module/plugins/internal/Hoster.py b/module/plugins/internal/Hoster.py
index f5ba13875..6d88732ea 100644
--- a/module/plugins/internal/Hoster.py
+++ b/module/plugins/internal/Hoster.py
@@ -2,14 +2,15 @@
from __future__ import with_statement
+import hashlib
import mimetypes
import os
import re
from module.network.HTTPRequest import BadHeader
-from module.plugins.internal.Base import Base, create_getInfo, parse_fileInfo
+from module.plugins.internal.Base import Base
from module.plugins.internal.Plugin import Fail, Retry
-from module.plugins.internal.utils import encode, exists, fixurl, fs_join, parse_name
+from module.plugins.internal.misc import compute_checksum, encode, exists, fixurl, fsjoin, parse_name, safejoin
class Hoster(Base):
@@ -19,16 +20,26 @@ class Hoster(Base):
__status__ = "stable"
__pattern__ = r'^unmatchable$'
- __config__ = [("activated" , "bool", "Activated" , True),
- ("use_premium" , "bool", "Use premium account if available" , True),
- ("fallback" , "bool", "Fallback to free download if premium fails", True),
- ("chk_filesize", "bool", "Check file size" , True)]
+ __config__ = [("activated" , "bool", "Activated" , True ),
+ ("use_premium", "bool", "Use premium account if available" , True ),
+ ("fallback" , "bool", "Fallback to free download if premium fails", True )]
__description__ = """Base hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+ @property
+ def last_download(self):
+ return self._last_download if exists(self._last_download) else ""
+
+
+ @last_download.setter
+ def last_download(self, value):
+ if exists(value):
+ self._last_download = value or ""
+
+
def init_base(self):
#: Enable simultaneous processing of multiple downloads
self.limitDL = 0 #@TODO: Change to `limit_dl` in 0.4.10
@@ -40,7 +51,7 @@ class Hoster(Base):
self.resume_download = False
#: Location where the last call to download was saved
- self.last_download = None
+ self._last_download = ""
#: Re match of the last call to `checkDownload`
self.last_check = None
@@ -72,40 +83,66 @@ class Hoster(Base):
def _process(self, thread):
- self.log_debug("Plugin version: " + self.__version__)
- self.log_debug("Plugin status: " + self.__status__)
-
- if self.__status__ is "broken":
- self.fail(_("Plugin is temporarily unavailable"))
-
- elif self.__status__ is "testing":
- self.log_warning(_("Plugin may be unstable"))
-
self.thread = thread
+
+ self._initialize()
self._setup()
- # self.pyload.hookManager.downloadPreparing(self.pyfile) #@TODO: Recheck in 0.4.10
- self.check_status()
+ #@TODO: Enable in 0.4.10
+ # self.pyload.hookManager.downloadPreparing(self.pyfile)
+ # self.check_status()
+ self.check_duplicates()
self.pyfile.setStatus("starting")
try:
+ self.log_info(_("Processing url: ") + self.pyfile.url)
self.process(self.pyfile)
self.check_status()
- self.check_download()
+
+ self._check_download()
except Fail, e: #@TODO: Move to PluginThread in 0.4.10
- if self.get_config('fallback', True) and self.premium:
+ if self.config.get('fallback', True) and self.premium:
self.log_warning(_("Premium download failed"), e)
self.restart(premium=False)
else:
raise Fail(encode(e))
+ finally:
+ self._finalize()
+
+
+ #@TODO: Remove in 0.4.10
+ def _finalize(self):
+ pypack = self.pyfile.package()
+
+ self.pyload.hookManager.dispatchEvent("download_processed", self.pyfile)
+
+ try:
+ unfinished = any(fdata['status'] is 3 for fid, fdata in pypack.getChildren().items()
+ if fid is not self.pyfile.id)
+ if unfinished:
+ return
+
+ self.pyload.hookManager.dispatchEvent("package_processed", pypack)
+
+ failed = any(fdata['status'] in (1, 6, 8, 9, 14)
+ for fid, fdata in pypack.getChildren().items())
+
+ if not failed:
+ return
+
+ self.pyload.hookManager.dispatchEvent("package_failed", pypack)
+
+ finally:
+ self.check_status()
+
def isdownload(self, url, resume=None, redirect=True):
link = False
- maxredirs = 10
+ maxredirs = 5
if resume is None:
resume = self.resume_download
@@ -114,7 +151,7 @@ class Hoster(Base):
maxredirs = max(redirect, 1)
elif redirect:
- maxredirs = self.get_config("maxredirs", default=maxredirs, plugin="UserAgentSwitcher")
+ maxredirs = int(self.pyload.api.getConfigValue("UserAgentSwitcher", "maxredirs", "plugin")) or maxredirs #@TODO: Remove `int` in 0.4.10
for i in xrange(maxredirs):
self.log_debug("Redirect #%d to: %s" % (i, url))
@@ -128,10 +165,10 @@ class Hoster(Base):
location = self.fixurl(header.get('location'), url)
code = header.get('code')
- if code == 302:
+ if code is 302:
link = location
- elif code == 301:
+ elif code is 301:
url = location
if redirect:
continue
@@ -176,7 +213,8 @@ class Hoster(Base):
if self.pyload.debug:
self.log_debug("DOWNLOAD URL " + url,
- *["%s=%s" % (key, val) for key, val in locals().items() if key not in ("self", "url", "_[1]")])
+ *["%s=%s" % (key, value) for key, value in locals().items()
+ if key not in ("self", "url", "_[1]")])
dl_url = self.fixurl(url)
dl_basename = parse_name(self.pyfile.name)
@@ -184,15 +222,13 @@ class Hoster(Base):
self.pyfile.name = dl_basename
self.captcha.correct()
-
- if self.pyload.config.get("download", "skip_existing"):
- self.check_filedupe()
+ self.check_duplicates()
self.pyfile.setStatus("downloading")
dl_folder = self.pyload.config.get("general", "download_folder")
- dl_dirname = os.path.join(dl_folder, self.pyfile.package().folder)
- dl_filename = os.path.join(dl_dirname, dl_basename)
+ dl_dirname = safejoin(dl_folder, self.pyfile.package().folder)
+ dl_filename = safejoin(dl_dirname, dl_basename)
dl_dir = encode(dl_dirname)
dl_file = encode(dl_filename) #@TODO: Move safe-filename check to HTTPDownload in 0.4.10
@@ -212,7 +248,7 @@ class Hoster(Base):
dl_chunks = self.pyload.config.get("download", "chunks")
chunk_limit = chunks or self.chunk_limit or -1
- if dl_chunks is -1 or chunk_limit is -1:
+ if -1 in (dl_chunks, chunk_limit):
chunks = max(dl_chunks, chunk_limit)
else:
chunks = min(dl_chunks, chunk_limit)
@@ -233,15 +269,11 @@ class Hoster(Base):
self.pyfile.size = self.req.size
if self.req.code in (404, 410):
- bad_file = fs_join(dl_dirname, newname)
- try:
- os.remove(bad_file)
-
- except OSError, e:
- self.log_debug(_("Error removing `%s`") % bad_file, e)
-
- else:
+ bad_file = fsjoin(dl_dirname, newname)
+ if self.remove(bad_file):
return ""
+ else:
+ self.log_info(_("File saved"))
#@TODO: Recheck in 0.4.10
if disposition and newname:
@@ -249,8 +281,8 @@ class Hoster(Base):
if safename != newname:
try:
- old_file = fs_join(dl_dirname, newname)
- new_file = fs_join(dl_dirname, safename)
+ old_file = fsjoin(dl_dirname, newname)
+ new_file = fsjoin(dl_dirname, safename)
os.rename(old_file, new_file)
except OSError, e:
@@ -272,158 +304,178 @@ class Hoster(Base):
return dl_filename
- def check_filesize(self, file_size, size_tolerance=1024):
- """
- Checks the file size of the last downloaded file
-
- :param file_size: expected file size
- :param size_tolerance: size check tolerance
- """
- if not self.last_download:
- return
-
- dl_location = encode(self.last_download)
- dl_size = os.stat(dl_location).st_size
-
- if dl_size < 1:
- self.fail(_("Empty file"))
-
- elif file_size > 0:
- diff = abs(file_size - dl_size)
-
- if diff > size_tolerance:
- self.fail(_("File size mismatch | Expected file size: %s | Downloaded file size: %s")
- % (file_size, dl_size))
-
- elif diff != 0:
- self.log_warning(_("File size is not equal to expected size"))
-
-
- def check_file(self, rules, delete=False, read_size=1048576, file_size=0, size_tolerance=1024):
+ def scan_download(self, rules, read_size=1048576):
"""
Checks the content of the last downloaded file, re match is saved to `last_check`
:param rules: dict with names and rules to match (compiled regexp or strings)
:param delete: delete if matched
- :param file_size: expected file size
- :param size_tolerance: size check tolerance
- :param read_size: amount of bytes to read from files
:return: dictionary key of the first rule that matched
"""
- do_delete = False
- last_download = encode(self.last_download) #@TODO: Recheck in 0.4.10
+ dl_file = encode(self.last_download) #@TODO: Recheck in 0.4.10
- if not self.last_download or not exists(last_download):
- self.fail(self.pyfile.error or _("No file downloaded"))
+ if not self.last_download:
+ self.log_warning(_("No file to scan"))
+ return
- try:
- self.check_filesize(file_size, size_tolerance)
-
- with open(last_download, "rb") as f:
- content = f.read(read_size)
-
- #: Produces encoding errors, better log to other file in the future?
- # self.log_debug("Content: %s" % content)
- for name, rule in rules.items():
- if isinstance(rule, basestring):
- if rule in content:
- do_delete = True
- return name
-
- elif hasattr(rule, "search"):
- m = rule.search(content)
- if m is not None:
- do_delete = True
- self.last_check = m
- return name
- finally:
- if delete and do_delete:
- try:
- os.remove(last_download)
+ with open(dl_file, "rb") as f:
+ content = f.read(read_size)
- except OSError, e:
- self.log_warning(_("Error removing `%s`") % last_download, e)
+ #: Produces encoding errors, better log to other file in the future?
+ # self.log_debug("Content: %s" % content)
+ for name, rule in rules.items():
+ if isinstance(rule, basestring):
+ if rule in content:
+ return name
- else:
- self.log_info(_("File deleted: ") + self.last_download)
- self.last_download = "" #: Recheck in 0.4.10
+ elif hasattr(rule, "search"):
+ m = rule.search(content)
+ if m is not None:
+ self.last_check = m
+ return name
- def check_download(self):
- self.log_info(_("Checking downloaded file..."))
+ def _check_download(self):
+ self.log_info(_("Checking download..."))
+ self.pyfile.setCustomStatus(_("checking"))
- if self.captcha.task and not self.last_download:
- self.retry_captcha()
+ if not self.last_download:
+ if self.captcha.task:
+ self.retry_captcha()
+ else:
+ self.error(_("No file downloaded"))
- elif self.check_file({'Empty file': re.compile(r'\A((.|)(\2|\s)*)\Z')},
- delete=True):
+ elif self.scan_download({'Empty file': re.compile(r'\A((.|)(\2|\s)*)\Z')}):
+ if self.remove(self.last_download):
+ self.last_download = ""
self.error(_("Empty file"))
- elif self.get_config('chk_filesize', False) and self.info.get('size'):
- # 10485760 is 10MB, tolerance is used when comparing displayed size on the hoster website to real size
- # For example displayed size can be 1.46GB for example, but real size can be 1.4649853GB
- self.check_filesize(self.info['size'], size_tolerance=10485760)
-
else:
- self.log_info(_("File is OK"))
+ self.pyload.hookManager.dispatchEvent("download_check", self.pyfile)
+ self.check_status()
+ self.log_info(_("File is OK"))
- def check_traffic(self):
+
+ def out_of_traffic(self):
if not self.account:
- return True
+ return
traffic = self.account.get_data('trafficleft')
if traffic is None:
- return False
+ return True
elif traffic is -1:
- return True
+ return False
else:
#@TODO: Rewrite in 0.4.10
size = self.pyfile.size / 1024
self.log_info(_("Filesize: %s KiB") % size,
_("Traffic left for user `%s`: %d KiB") % (self.account.user, traffic))
- return size <= traffic
+ return size > traffic
- def check_filedupe(self):
- """
- Checks if same file was/is downloaded within same package
+ # def check_size(self, file_size, size_tolerance=1024, delete=False):
+ # """
+ # Checks the file size of the last downloaded file
- :param starting: indicates that the current download is going to start
- :raises Skip:
- """
- pack = self.pyfile.package()
+ # :param file_size: expected file size
+ # :param size_tolerance: size check tolerance
+ # """
+ # self.log_info(_("Checking file size..."))
+
+ # if not self.last_download:
+ # self.log_warning(_("No file to check"))
+ # return
- for pyfile in self.pyload.files.cache.values():
- if pyfile is self.pyfile:
- continue
+ # dl_file = encode(self.last_download)
+ # dl_size = os.stat(dl_file).st_size
- if pyfile.name != self.pyfile.name or pyfile.package().folder != pack.folder:
- continue
+ # try:
+ # if dl_size == 0:
+ # delete = True
+ # self.fail(_("Empty file"))
- if pyfile.status in (0, 5, 7, 12): #: (finished, waiting, starting, downloading)
- self.skip(pyfile.pluginname)
+ # elif file_size > 0:
+ # diff = abs(file_size - dl_size)
- dl_folder = self.pyload.config.get("general", "download_folder")
- package_folder = pack.folder if self.pyload.config.get("general", "folder_per_package") else ""
- dl_location = fs_join(dl_folder, package_folder, self.pyfile.name)
+ # if diff > size_tolerance:
+ # self.fail(_("File size mismatch | Expected file size: %s bytes | Downloaded file size: %s bytes")
+ # % (file_size, dl_size))
- if not exists(dl_location):
+ # elif diff != 0:
+ # self.log_warning(_("File size is not equal to expected download size, but does not exceed the tolerance threshold"))
+ # self.log_debug("Expected file size: %s bytes" % file_size,
+ # "Downloaded file size: %s bytes" % dl_size,
+ # "Tolerance threshold: %s bytes" % size_tolerance)
+ # else:
+ # delete = False
+ # self.log_info(_("File size match"))
+
+ # finally:
+ # if delete:
+ # self.remove(dl_file, trash=False)
+
+
+ # def check_hash(self, type, digest, delete=False):
+ # hashtype = type.strip('-').upper()
+
+ # self.log_info(_("Checking file hashsum %s...") % hashtype)
+
+ # if not self.last_download:
+ # self.log_warning(_("No file to check"))
+ # return
+
+ # dl_file = encode(self.last_download)
+
+ # try:
+ # dl_hash = digest
+ # file_hash = compute_checksum(dl_file, hashtype)
+
+ # if not file_hash:
+ # self.fail(_("Unsupported hashing algorithm: ") + hashtype)
+
+ # elif dl_hash == file_hash:
+ # delete = False
+ # self.log_info(_("File hashsum %s match") % hashtype)
+
+ # else:
+ # self.fail(_("File hashsum %s mismatch | Expected file hashsum: %s | Downloaded file hashsum: %s")
+ # % (hashtype, dl_hash, file_hash))
+ # finally:
+ # if delete:
+ # self.remove(dl_file, trash=False)
+
+
+ def check_duplicates(self):
+ """
+ Checks if same file was downloaded within same package
+
+ :raises Skip:
+ """
+ pack_folder = self.pyfile.package().folder if self.pyload.config.get("general", "folder_per_package") else ""
+ dl_folder = self.pyload.config.get("general", "download_folder")
+ dl_file = fsjoin(dl_folder, pack_folder, self.pyfile.name)
+
+ if not exists(dl_file):
return
- pyfile = self.pyload.db.findDuplicates(self.pyfile.id, package_folder, self.pyfile.name)
- if pyfile:
- self.skip(pyfile[0])
+ if os.stat(dl_file).st_size == 0:
+ if self.remove(self.last_download):
+ self.last_download = ""
+ return
- size = os.stat(dl_location).st_size
- if size >= self.pyfile.size:
- self.skip(_("File exists"))
+ if self.pyload.config.get("download", "skip_existing"):
+ plugin = self.pyload.db.findDuplicates(self.pyfile.id, pack_folder, self.pyfile.name)
+ msg = plugin[0] if plugin else _("File exists")
+ self.skip(msg)
+ else:
+ dl_n = int(re.match(r'.+(\(\d+\)|)$', self.pyfile.name).group(1).strip("()") or 1)
+ self.pyfile.name += " (%s)" % (dl_n + 1)
- #: Deprecated method, use `check_filedupe` instead (Remove in 0.4.10)
+ #: Deprecated method (Recheck in 0.4.10)
def checkForSameFiles(self, *args, **kwargs):
- if self.pyload.config.get("download", "skip_existing"):
- return self.check_filedupe()
+ pass
diff --git a/module/plugins/internal/MultiAccount.py b/module/plugins/internal/MultiAccount.py
index f9252cc10..90cb0e748 100644
--- a/module/plugins/internal/MultiAccount.py
+++ b/module/plugins/internal/MultiAccount.py
@@ -1,16 +1,12 @@
# -*- coding: utf-8 -*-
-import re
-import time
-
from module.plugins.internal.Account import Account
-from module.plugins.internal.utils import decode, remove_chars, uniqify
class MultiAccount(Account):
__name__ = "MultiAccount"
__type__ = "account"
- __version__ = "0.04"
+ __version__ = "0.05"
__status__ = "broken"
__config__ = [("activated" , "bool" , "Activated" , True ),
@@ -22,248 +18,3 @@ class MultiAccount(Account):
__description__ = """Multi-hoster account plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
- # PERIODICAL_INTERVAL = 1 * 60 * 60 #: 1 hour
- PERIODICAL_LOGIN = False
-
- DOMAIN_REPLACEMENTS = [(r'180upload\.com' , "hundredeightyupload.com"),
- (r'bayfiles\.net' , "bayfiles.com" ),
- (r'cloudnator\.com' , "shragle.com" ),
- (r'dfiles\.eu' , "depositfiles.com" ),
- (r'easy-share\.com' , "crocko.com" ),
- (r'freakshare\.net' , "freakshare.com" ),
- (r'hellshare\.com' , "hellshare.cz" ),
- (r'ifile\.it' , "filecloud.io" ),
- (r'nowdownload\.\w+', "nowdownload.sx" ),
- (r'nowvideo\.\w+' , "nowvideo.sx" ),
- (r'putlocker\.com' , "firedrive.com" ),
- (r'share-?rapid\.cz', "multishare.cz" ),
- (r'ul\.to' , "uploaded.to" ),
- (r'uploaded\.net' , "uploaded.to" ),
- (r'uploadhero\.co' , "uploadhero.com" ),
- (r'zshares\.net' , "zshare.net" ),
- (r'^1' , "one" ),
- (r'^2' , "two" ),
- (r'^3' , "three" ),
- (r'^4' , "four" ),
- (r'^5' , "five" ),
- (r'^6' , "six" ),
- (r'^7' , "seven" ),
- (r'^8' , "eight" ),
- (r'^9' , "nine" ),
- (r'^0' , "zero" )]
-
-
- def init(self):
- self.plugins = []
- self.supported = []
- self.newsupported = []
-
- self.pluginclass = None
- self.pluginmodule = None
- self.plugintype = None
-
- self.init_plugin()
-
-
- def init_plugin(self):
- plugin, self.plugintype = self.pyload.pluginManager.findPlugin(self.classname)
-
- if plugin:
- self.pluginmodule = self.pyload.pluginManager.loadModule(self.plugintype, self.classname)
- self.pluginclass = self.pyload.pluginManager.loadClass(self.plugintype, self.classname)
- else:
- self.log_warning(_("Multi-hoster feature will be deactivated due missing plugin reference"))
- self.set_config('multi', False)
-
-
- def activate(self):
- interval = self.get_config('multi_interval') * 60 * 60
- self.start_periodical(interval, threaded=True)
-
-
- def replace_domains(self, list):
- for r in self.DOMAIN_REPLACEMENTS:
- pattern, repl = r
- regex = re.compile(pattern, re.I | re.U)
- domains = [regex.sub(repl, domain) if regex.match(domain) else domain for domain in list]
-
- return domains
-
-
- def parse_domains(self, list):
- regexp = re.compile(r'^(?:https?://)?(?:www\.)?(?:\w+\.)*((?:[\d.]+|[\w\-^_]{3,63}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)',
- re.I | re.U)
-
- r'^(?:https?://)?(?:www\.)?(?:\w+\.)*((?:[\d.]+|[\w\-^_]{3,63}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)'
-
- domains = [decode(domain).strip().lower() for url in list for domain in regexp.findall(url)]
- return self.replace_domains(uniqify(domains))
-
-
- def _grab_hosters(self):
- try:
- hosterlist = self.grab_hosters(self.user, self.info['login']['password'], self.info['data'])
-
- if hosterlist and isinstance(hosterlist, list):
- domains = self.parse_domains(hosterlist)
- self.info['data']['hosters'] = sorted(domains)
-
- except Exception, e:
- self.log_warning(_("Error loading hoster list for user `%s`") % self.user, e, trace=True)
-
- finally:
- return self.info['data']['hosters']
-
-
- def grab_hosters(self, user, password, data):
- """
- Load list of supported hoster
- :return: List of domain names
- """
- raise NotImplementedError
-
-
- def periodical(self):
- if not self.info['data'].get('hosters'):
- self.log_info(_("Loading hoster list for user `%s`...") % self.user)
- else:
- self.log_info(_("Reloading hoster list for user `%s`...") % self.user)
-
- if self.PERIODICAL_LOGIN and not self.logged:
- self.relogin()
-
- hosters = self._grab_hosters()
-
- self.log_debug("Hoster list for user `%s`: %s" % (self.user, hosters))
-
- old_supported = self.supported
-
- self.supported = []
- self.newsupported = []
- self.plugins = []
-
- self._override()
-
- old_supported = [plugin for plugin in old_supported if plugin not in self.supported]
-
- if old_supported:
- self.log_debug("Unload: %s" % ", ".join(old_supported))
- for plugin in old_supported:
- self.unload_plugin(plugin)
-
- self.set_interval(self.get_config('multi_interval') * 60 * 60)
-
-
- def _override(self):
- excludedList = []
-
- if self.plugintype == "hoster":
- pluginMap = dict((name.lower(), name) for name in self.pyload.pluginManager.hosterPlugins.keys())
- accountList = [account.type.lower() for account in self.pyload.api.getAccounts(False) if account.valid and account.premium]
- else:
- pluginMap = {}
- accountList = [name[::-1].replace("Folder"[::-1], "", 1).lower()[::-1] for name in self.pyload.pluginManager.crypterPlugins.keys()]
-
- for plugin in self.plugins_cached():
- name = remove_chars(plugin, "-.")
-
- if name in accountList:
- excludedList.append(plugin)
- else:
- if name in pluginMap:
- self.supported.append(pluginMap[name])
- else:
- self.newsupported.append(plugin)
-
- if not self.supported and not self.newsupported:
- self.log_error(_("No %s loaded") % self.plugintype)
- return
-
- #: Inject plugin plugin
- self.log_debug("Overwritten %ss: %s" % (self.plugintype, ", ".join(sorted(self.supported))))
-
- for plugin in self.supported:
- hdict = self.pyload.pluginManager.plugins[self.plugintype][plugin]
- hdict['new_module'] = self.pluginmodule
- hdict['new_name'] = self.classname
-
- if excludedList:
- self.log_info(_("%ss not overwritten: %s") % (self.plugintype.capitalize(), ", ".join(sorted(excludedList))))
-
- if self.newsupported:
- plugins = sorted(self.newsupported)
-
- self.log_debug("New %ss: %s" % (self.plugintype, ", ".join(plugins)))
-
- #: Create new regexp
- regexp = r'.*(?P<DOMAIN>%s).*' % "|".join(x.replace('.', '\.') for x in plugins)
- if hasattr(self.pluginclass, "__pattern__") and isinstance(self.pluginclass.__pattern__, basestring) and "://" in self.pluginclass.__pattern__:
- regexp = r'%s|%s' % (self.pluginclass.__pattern__, regexp)
-
- self.log_debug("Regexp: %s" % regexp)
-
- hdict = self.pyload.pluginManager.plugins[self.plugintype][self.classname]
- hdict['pattern'] = regexp
- hdict['re'] = re.compile(regexp)
-
-
- def plugins_cached(self):
- if self.plugins:
- return self.plugins
-
- for _i in xrange(5):
- try:
- pluginset = self._plugin_set(self.grab_hosters())
- break
-
- except Exception, e:
- self.log_warning(e, _("Waiting 1 minute and retry"), trace=True)
- time.sleep(60)
- else:
- self.log_warning(_("No hoster list retrieved"))
- self.interval = self.PERIODICAL_INTERVAL
- return list()
-
- try:
- configmode = self.get_config('pluginmode', 'all')
- if configmode in ("listed", "unlisted"):
- pluginlist = self.get_config('pluginlist', '').replace('|', ',').replace(';', ',').split(',')
- configset = self._plugin_set(pluginlist)
-
- if configmode == "listed":
- pluginset &= configset
- else:
- pluginset -= configset
-
- except Exception, e:
- self.log_error(e)
-
- self.plugins = list(pluginset)
-
- return self.plugins
-
-
- # def unload_plugin(self, plugin):
- # hdict = self.pyload.pluginManager.plugins[self.plugintype][plugin]
- # if "module" in hdict:
- # hdict.pop('module', None)
-
- # if "new_module" in hdict:
- # hdict.pop('new_module', None)
- # hdict.pop('new_name', None)
-
-
- # def deactivate(self):
- # """
- # Remove override for all plugins. Scheduler job is removed by hookmanager
- # """
- # for plugin in self.supported:
- # self.unload_plugin(plugin)
-
- #: Reset pattern
- # hdict = self.pyload.pluginManager.plugins[self.plugintype][self.classname]
-
- # hdict['pattern'] = getattr(self.pluginclass, "__pattern__", r'^unmatchable$')
- # hdict['re'] = re.compile(hdict['pattern'])
diff --git a/module/plugins/internal/MultiCrypter.py b/module/plugins/internal/MultiCrypter.py
index 576d6d4b4..c924ee916 100644
--- a/module/plugins/internal/MultiCrypter.py
+++ b/module/plugins/internal/MultiCrypter.py
@@ -6,14 +6,13 @@ from module.plugins.internal.SimpleCrypter import SimpleCrypter
class MultiCrypter(SimpleCrypter):
__name__ = "MultiCrypter"
__type__ = "hoster"
- __version__ = "0.06"
+ __version__ = "0.07"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
- __config__ = [("activated" , "bool", "Activated" , True),
- ("use_premium" , "bool", "Use premium account if available" , True),
- ("use_subfolder" , "bool", "Save package to subfolder" , True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("use_premium" , "bool" , "Use premium account if available", True ),
+ ("folder_per_package", "Default;Yes;No", "Create folder for each package" , "Default")]
__description__ = """Multi decrypter plugin"""
__license__ = "GPLv3"
diff --git a/module/plugins/internal/MultiHoster.py b/module/plugins/internal/MultiHoster.py
index cbbfcd6dc..5bd4527ae 100644
--- a/module/plugins/internal/MultiHoster.py
+++ b/module/plugins/internal/MultiHoster.py
@@ -3,14 +3,14 @@
import re
from module.plugins.internal.Plugin import Fail
-from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-from module.plugins.internal.utils import encode, replace_patterns, set_cookie, set_cookies
+from module.plugins.internal.SimpleHoster import SimpleHoster
+from module.plugins.internal.misc import encode, replace_patterns, set_cookie, set_cookies
class MultiHoster(SimpleHoster):
__name__ = "MultiHoster"
__type__ = "hoster"
- __version__ = "0.58"
+ __version__ = "0.59"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
@@ -68,7 +68,7 @@ class MultiHoster(SimpleHoster):
super(MultiHoster, self)._process(thread)
except Fail, e:
- if self.get_config("revertfailed", True) and \
+ if self.config.get("revertfailed", True) and \
self.pyload.pluginManager.hosterPlugins[self.classname].get('new_module'):
hdict = self.pyload.pluginManager.hosterPlugins[self.classname]
diff --git a/module/plugins/internal/Notifier.py b/module/plugins/internal/Notifier.py
index d0fd28906..e9f1fab74 100644
--- a/module/plugins/internal/Notifier.py
+++ b/module/plugins/internal/Notifier.py
@@ -3,24 +3,27 @@
import time
from module.plugins.internal.Addon import Addon, Expose
-from module.plugins.internal.utils import isiterable
+from module.plugins.internal.misc import encode, isiterable
class Notifier(Addon):
__name__ = "Notifier"
__type__ = "hook"
- __version__ = "0.04"
+ __version__ = "0.07"
__status__ = "testing"
- __config__ = [("activated" , "bool", "Activated" , False),
- ("notifycaptcha" , "bool", "Notify captcha request" , True ),
- ("notifypackage" , "bool", "Notify package finished" , True ),
- ("notifyprocessed", "bool", "Notify packages processed" , True ),
- ("notifyupdate" , "bool", "Notify plugin updates" , True ),
- ("notifyexit" , "bool", "Notify pyLoad shutdown" , True ),
- ("sendtimewait" , "int" , "Timewait in seconds between notifications", 5 ),
- ("sendpermin" , "int" , "Max notifications per minute" , 12 ),
- ("ignoreclient" , "bool", "Send notifications if client is connected", False)]
+ __config__ = [("activated" , "bool", "Activated" , False),
+ ("captcha" , "bool", "Notify captcha request" , True ),
+ ("reconnection" , "bool", "Notify reconnection request" , False),
+ ("downloadfinished", "bool", "Notify download finished" , True ),
+ ("downloadfailed" , "bool", "Notify download failed" , True ),
+ ("packagefinished" , "bool", "Notify package finished" , True ),
+ ("packagefailed" , "bool", "Notify package failed" , True ),
+ ("update" , "bool", "Notify pyLoad update" , False),
+ ("exit" , "bool", "Notify pyLoad shutdown/restart" , False),
+ ("sendinterval" , "int" , "Interval in seconds between notifications", 1 ),
+ ("sendpermin" , "int" , "Max notifications per minute" , 60 ),
+ ("ignoreclient" , "bool", "Send notifications if client is connected", True )]
__description__ = """Base notifier plugin"""
__license__ = "GPLv3"
@@ -29,21 +32,29 @@ class Notifier(Addon):
def init(self):
self.event_map = {'allDownloadsProcessed': "all_downloads_processed",
- 'plugin_updated' : "plugin_updated" }
+ 'pyload_updated' : "pyload_updated" }
self.last_notify = 0
self.notifications = 0
- def plugin_updated(self, type_plugins):
- if not self.get_config('notifyupdate'):
+ def get_key(self):
+ raise NotImplementedError
+
+
+ def send(self, event, msg, key):
+ raise NotImplementedError
+
+
+ def pyload_updated(self, etag):
+ if not self.config.get('update', True):
return
- self.notify(_("Plugins updated"), str(type_plugins))
+ self.notify(_("pyLoad updated"), etag)
def exit(self):
- if not self.get_config('notifyexit'):
+ if not self.config.get('exit', True):
return
if self.pyload.do_restart:
@@ -53,65 +64,98 @@ class Notifier(Addon):
def captcha_task(self, task):
- if not self.get_config('notifycaptcha'):
+ if not self.config.get('captcha', True):
return
self.notify(_("Captcha"), _("New request waiting user input"))
+ def before_reconnect(self, ip):
+ if not self.config.get('reconnection', False):
+ return
+
+ self.notify(_("Waiting reconnection"), _("Current IP: %s") % ip)
+
+
+ def after_reconnect(self, ip, oldip):
+ if not self.config.get('reconnection', False):
+ return
+
+ self.notify(_("Reconnection failed"), _("Current IP: %s") % ip)
+
+
def package_finished(self, pypack):
- if self.get_config('notifypackage'):
- self.notify(_("Package finished"), pypack.name)
+ if not self.config.get('packagefinished', True):
+ return
+ self.notify(_("Package finished"), pypack.name)
- def all_downloads_processed(self):
- if not self.get_config('notifyprocessed'):
+
+ def package_failed(self, pypack):
+ if not self.config.get('packagefailed', True):
return
- if any(True for pdata in self.pyload.api.getQueue() if pdata.linksdone < pdata.linkstotal):
- self.notify(_("Package failed"), _("One or more packages was not completed successfully"))
- else:
- self.notify(_("All packages finished"))
+ self.notify(_("Package failed"), pypack.name)
- def get_key(self):
- raise NotImplementedError
+ def download_finished(self, pyfile):
+ if not self.config.get('downloadfinished', False):
+ return
+ self.notify(_("Download finished"), pyfile.name)
- def send(self, event, msg, key):
- raise NotImplementedError
+
+ def download_failed(self, pyfile):
+ if self.config.get('downloadfailed', True):
+ return
+
+ self.notify(_("Download failed"), pyfile.name)
+
+
+ def all_downloads_processed(self):
+ self.notify(_("All downloads processed"))
+
+
+ def all_downloads_finished(self):
+ self.notify(_("All downloads finished"))
@Expose
- def notify(self, event, msg="", key=None):
+ def notify(self, event, msg=None, key=None):
key = key or self.get_key()
if not key or isiterable(key) and not all(key):
return
- if self.pyload.isClientConnected() and not self.get_config('ignoreclient'):
+ if isiterable(msg):
+ msg = " | ".join(encode(a).strip() for a in msg if a)
+ else:
+ msg = encode(msg)
+
+ if self.pyload.isClientConnected() and not self.config.get('ignoreclient', False):
return
elapsed_time = time.time() - self.last_notify
- if elapsed_time < self.get_config("sendtimewait"):
+ if elapsed_time < self.config.get("sendinterval", 1):
return
elif elapsed_time > 60:
self.notifications = 0
- elif self.notifications >= self.get_config("sendpermin"):
+ elif self.notifications >= self.config.get("sendpermin", 60):
return
- self.log_info(_("Sending notification..."))
+ self.log_debug("Sending notification...")
try:
- resp = self.send(event, msg, key)
+ self.send(event, msg, key)
except Exception, e:
self.log_error(_("Error sending notification"), e)
return False
else:
+ self.log_debug("Notification sent")
return True
finally:
diff --git a/module/plugins/internal/OCR.py b/module/plugins/internal/OCR.py
index b4e28ca0f..217305459 100644
--- a/module/plugins/internal/OCR.py
+++ b/module/plugins/internal/OCR.py
@@ -14,13 +14,13 @@ import subprocess
# import tempfile
from module.plugins.internal.Plugin import Plugin
-from module.plugins.internal.utils import fs_join
+from module.plugins.internal.misc import encode, fsjoin
class OCR(Plugin):
__name__ = "OCR"
__type__ = "ocr"
- __version__ = "0.21"
+ __version__ = "0.22"
__status__ = "stable"
__description__ = """OCR base plugin"""
@@ -28,20 +28,20 @@ class OCR(Plugin):
__authors__ = [("pyLoad Team", "admin@pyload.org")]
- def __init__(self, plugin):
- self._init(plugin.pyload)
- self.plugin = plugin
+ def __init__(self, pyfile):
+ self._init(pyfile.m.core)
+ self.pyfile = pyfile
self.init()
def _log(self, level, plugintype, pluginname, messages):
messages = (self.__name__,) + messages
- return self.plugin._log(level, plugintype, self.plugin.__name__, messages)
+ return self.pyfile.plugin._log(level, plugintype, self.pyfile.plugin.__name__, messages)
def load_image(self, image):
- self.image = Image.open(image)
- self.pixels = self.image.load()
+ self.img = Image.open(image)
+ self.pixels = self.img.load()
self.result_captcha = ""
@@ -53,29 +53,36 @@ class OCR(Plugin):
def threshold(self, value):
- self.image = self.image.point(lambda a: a * value + 10)
+ self.img = self.img.point(lambda a: a * value + 10)
- def run(self, command):
+ def call_cmd(self, command, *args, **kwargs):
"""
Run a command
"""
- popen = subprocess.Popen(command, bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ call = [command] + args
+ self.log_debug("EXECUTE " + " ".join(call))
+
+ call = map(encode, call)
+ popen = subprocess.Popen(call, bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
popen.wait()
+
output = popen.stdout.read() + " | " + popen.stderr.read()
+
popen.stdout.close()
popen.stderr.close()
+
self.log_debug("Tesseract ReturnCode %d" % popen.returncode, "Output: %s" % output)
def run_tesser(self, subset=False, digits=True, lowercase=True, uppercase=True, pagesegmode=None):
# tmpTif = tempfile.NamedTemporaryFile(suffix=".tif")
try:
- tmpTif = open(fs_join("tmp", "tmpTif_%s.tif" % self.classname), "wb")
+ tmpTif = open(fsjoin("tmp", "tmpTif_%s.tif" % self.classname), "wb")
tmpTif.close()
# tmpTxt = tempfile.NamedTemporaryFile(suffix=".txt")
- tmpTxt = open(fs_join("tmp", "tmpTxt_%s.txt" % self.classname), "wb")
+ tmpTxt = open(fsjoin("tmp", "tmpTxt_%s.txt" % self.classname), "wb")
tmpTxt.close()
except IOError, e:
@@ -83,21 +90,21 @@ class OCR(Plugin):
return
self.log_debug("Saving tiff...")
- self.image.save(tmpTif.name, 'TIFF')
+ self.img.save(tmpTif.name, 'TIFF')
if os.name is "nt":
- tessparams = [os.path.join(pypath, "tesseract", "tesseract.exe")]
+ command = os.path.join(pypath, "tesseract", "tesseract.exe")
else:
- tessparams = ["tesseract"]
+ command = "tesseract"
- tessparams.extend([os.path.abspath(tmpTif.name), os.path.abspath(tmpTxt.name).replace(".txt", "")])
+ args = [os.path.abspath(tmpTif.name), os.path.abspath(tmpTxt.name).replace(".txt", "")]
if pagesegmode:
- tessparams.extend(["-psm", str(pagesegmode)])
+ args.extend(["-psm", str(pagesegmode)])
if subset and (digits or lowercase or uppercase):
# tmpSub = tempfile.NamedTemporaryFile(suffix=".subset")
- with open(fs_join("tmp", "tmpSub_%s.subset" % self.classname), "wb") as tmpSub:
+ with open(fsjoin("tmp", "tmpSub_%s.subset" % self.classname), "wb") as tmpSub:
tmpSub.write("tessedit_char_whitelist ")
if digits:
@@ -108,11 +115,11 @@ class OCR(Plugin):
tmpSub.write("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
tmpSub.write("\n")
- tessparams.append("nobatch")
- tessparams.append(os.path.abspath(tmpSub.name))
+ args.append("nobatch")
+ args.append(os.path.abspath(tmpSub.name))
self.log_debug("Running tesseract...")
- self.run(tessparams)
+ self.call_cmd(command, *args)
self.log_debug("Reading txt...")
try:
@@ -123,30 +130,28 @@ class OCR(Plugin):
self.result_captcha = ""
self.log_info(_("OCR result: ") + self.result_captcha)
- try:
- os.remove(tmpTif.name)
- os.remove(tmpTxt.name)
- if subset and (digits or lowercase or uppercase):
- os.remove(tmpSub.name)
- except OSError, e:
- self.log_warning(e)
+ self.remove(tmpTif.name, trash=False)
+ self.remove(tmpTxt.name, trash=False)
+
+ if subset and (digits or lowercase or uppercase):
+ self.remove(tmpSub.name, trash=False)
- def recognize(self, name):
+ def recognize(self, image):
raise NotImplementedError
def to_greyscale(self):
- if self.image.mode != 'L':
- self.image = self.image.convert('L')
+ if self.img.mode != 'L':
+ self.img = self.img.convert('L')
- self.pixels = self.image.load()
+ self.pixels = self.img.load()
def eval_black_white(self, limit):
- self.pixels = self.image.load()
- w, h = self.image.size
+ self.pixels = self.img.load()
+ w, h = self.img.size
for x in xrange(w):
for y in xrange(h):
if self.pixels[x, y] > limit:
@@ -158,38 +163,38 @@ class OCR(Plugin):
def clean(self, allowed):
pixels = self.pixels
- w, h = self.image.size
+ w, h = self.img.size
for x in xrange(w):
for y in xrange(h):
- if pixels[x, y] == 255:
+ if pixels[x, y] is 255:
continue
#: No point in processing white pixels since we only want to remove black pixel
count = 0
try:
- if pixels[x - 1, y - 1] != 255:
+ if pixels[x - 1, y - 1] is not 255:
count += 1
- if pixels[x - 1, y] != 255:
+ if pixels[x - 1, y] is not 255:
count += 1
- if pixels[x - 1, y + 1] != 255:
+ if pixels[x - 1, y + 1] is not 255:
count += 1
- if pixels[x, y + 1] != 255:
+ if pixels[x, y + 1] is not 255:
count += 1
- if pixels[x + 1, y + 1] != 255:
+ if pixels[x + 1, y + 1] is not 255:
count += 1
- if pixels[x + 1, y] != 255:
+ if pixels[x + 1, y] is not 255:
count += 1
- if pixels[x + 1, y - 1] != 255:
+ if pixels[x + 1, y - 1] is not 255:
count += 1
- if pixels[x, y - 1] != 255:
+ if pixels[x, y - 1] is not 255:
count += 1
except Exception:
@@ -203,7 +208,7 @@ class OCR(Plugin):
#: Second pass: this time set all 1's to 255 (white)
for x in xrange(w):
for y in xrange(h):
- if pixels[x, y] == 1:
+ if pixels[x, y] is 1:
pixels[x, y] = 255
self.pixels = pixels
@@ -213,12 +218,12 @@ class OCR(Plugin):
"""
Rotate by checking each angle and guess most suitable
"""
- w, h = self.image.size
+ w, h = self.img.size
pixels = self.pixels
for x in xrange(w):
for y in xrange(h):
- if pixels[x, y] == 0:
+ if pixels[x, y] is 0:
pixels[x, y] = 155
highest = {}
@@ -226,15 +231,15 @@ class OCR(Plugin):
for angle in xrange(-45, 45):
- tmpimage = self.image.rotate(angle)
+ tmpimage = self.img.rotate(angle)
pixels = tmpimage.load()
- w, h = self.image.size
+ w, h = self.img.size
for x in xrange(w):
for y in xrange(h):
- if pixels[x, y] == 0:
+ if pixels[x, y] is 0:
pixels[x, y] = 255
count = {}
@@ -242,14 +247,14 @@ class OCR(Plugin):
for x in xrange(w):
count[x] = 0
for y in xrange(h):
- if pixels[x, y] == 155:
+ if pixels[x, y] is 155:
count[x] += 1
sum = 0
cnt = 0
for x in count.values():
- if x != 0:
+ if x is not 0:
sum += x
cnt += 1
@@ -270,22 +275,22 @@ class OCR(Plugin):
hkey = key
hvalue = value
- self.image = self.image.rotate(hkey)
- pixels = self.image.load()
+ self.img = self.img.rotate(hkey)
+ pixels = self.img.load()
for x in xrange(w):
for y in xrange(h):
- if pixels[x, y] == 0:
+ if pixels[x, y] is 0:
pixels[x, y] = 255
- if pixels[x, y] == 155:
+ if pixels[x, y] is 155:
pixels[x, y] = 0
self.pixels = pixels
def split_captcha_letters(self):
- captcha = self.image
+ captcha = self.img
started = False
letters = []
width, height = captcha.size
@@ -295,7 +300,7 @@ class OCR(Plugin):
for x in xrange(width):
black_pixel_in_col = False
for y in xrange(height):
- if pixels[x, y] != 255:
+ if pixels[x, y] is not 255:
if not started:
started = True
firstX = x
diff --git a/module/plugins/internal/Plugin.py b/module/plugins/internal/Plugin.py
index bf591d482..71137e496 100644
--- a/module/plugins/internal/Plugin.py
+++ b/module/plugins/internal/Plugin.py
@@ -4,23 +4,30 @@ from __future__ import with_statement
import inspect
import os
+import re
if os.name is not "nt":
import grp
import pwd
import pycurl
+try:
+ import send2trash
+except ImportError:
+ pass
-import module.plugins.internal.utils as utils
+import module.plugins.internal.misc as utils
+from module.network.RequestFactory import getRequest as get_request
from module.plugins.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload as Skip #@TODO: Remove in 0.4.10
-from module.plugins.internal.utils import *
+from module.plugins.internal.misc import (Config, DB, decode, encode, exists, fixurl, fsjoin,
+ format_exc, html_unescape, parse_html_header)
class Plugin(object):
__name__ = "Plugin"
__type__ = "plugin"
- __version__ = "0.61"
+ __version__ = "0.62"
__status__ = "stable"
__config__ = [] #: [("name", "type", "desc", "default")]
@@ -46,10 +53,20 @@ class Plugin(object):
def _init(self, core):
- self.pyload = core
- self.info = {} #: Provide information in dict here
- self.req = None #: Browser instance, see `network.Browser`
- self.last_html = None
+ #: Internal modules
+ self.pyload = core
+ self.db = DB(self)
+ self.config = Config(self)
+
+ #: Provide information in dict here
+ self.info = {}
+
+ #: Browser instance, see `network.Browser`
+ self.req = self.pyload.requestFactory.getRequest(self.classname)
+
+ #: Last loaded html
+ self.last_html = ""
+ self.last_header = {}
def init(self):
@@ -71,124 +88,70 @@ class Plugin(object):
def log_debug(self, *args, **kwargs):
self._log("debug", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
- self.print_exc()
+ self._print_exc()
def log_info(self, *args, **kwargs):
self._log("info", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
- self.print_exc()
+ self._print_exc()
def log_warning(self, *args, **kwargs):
self._log("warning", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
- self.print_exc()
+ self._print_exc()
def log_error(self, *args, **kwargs):
self._log("error", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace', True):
- self.print_exc()
+ self._print_exc()
def log_critical(self, *args, **kwargs):
self._log("critical", self.__type__, self.__name__, args)
if kwargs.get('trace', True):
- self.print_exc()
+ self._print_exc()
- def print_exc(self):
+ def _print_exc(self):
frame = inspect.currentframe()
print format_exc(frame.f_back)
del frame
- def set_permissions(self, path):
- if not os.path.exists(path):
- return
-
- try:
- if self.pyload.config.get("permission", "change_file"):
- if os.path.isfile(path):
- os.chmod(path, int(self.pyload.config.get("permission", "file"), 8))
-
- elif os.path.isdir(path):
- os.chmod(path, int(self.pyload.config.get("permission", "folder"), 8))
-
- except OSError, e:
- self.log_warning(_("Setting path mode failed"), e)
-
- try:
- if os.name is not "nt" and self.pyload.config.get("permission", "change_dl"):
- uid = pwd.getpwnam(self.pyload.config.get("permission", "user"))[2]
- gid = grp.getgrnam(self.pyload.config.get("permission", "group"))[2]
- os.chown(path, uid, gid)
-
- except OSError, e:
- self.log_warning(_("Setting owner and group failed"), e)
-
-
- def set_config(self, option, value, plugin=None):
- """
- Set config value for current plugin
-
- :param option:
- :param value:
- :return:
- """
- self.pyload.api.setConfigValue(plugin or self.classname, option, value, section="plugin")
-
-
- def get_config(self, option, default="", plugin=None):
- """
- Returns config value for current plugin
-
- :param option:
- :return:
- """
+ def remove(self, path, trash=False): #@TODO: Change to `trash=True` in 0.4.10
try:
- return self.pyload.config.getPlugin(plugin or self.classname, option)
+ remove(path, trash)
- except KeyError:
- self.log_debug("Config option `%s` not found, use default `%s`" % (option, default or None)) #@TODO: Restore to `log_warning` in 0.4.10
- return default
+ except (NameError, OSError), e:
+ self.log_warning(_("Error removing `%s`") % os.path.abspath(path), e)
+ return False
-
- def store(self, key, value):
- """
- Saves a value persistently to the database
- """
- value = map(decode, value) if isiterable(value) else decode(value)
- entry = json.dumps(value).encode('base64')
- self.pyload.db.setStorage(self.classname, key, entry)
+ else:
+ self.log_info(_("Path deleted: ") + os.path.abspath(path))
+ return True
- def retrieve(self, key=None, default=None):
- """
- Retrieves saved value or dict of all saved entries if key is None
- """
- entry = self.pyload.db.getStorage(self.classname, key)
+ def set_permissions(self, path):
+ path = encode(path)
- if key:
- if entry is None:
- value = default
- else:
- value = json.loads(entry.decode('base64'))
- else:
- if not entry:
- value = default
- else:
- value = dict((k, json.loads(v.decode('base64'))) for k, v in value.items())
+ if not exists(path):
+ return
- return value
+ file_perms = False
+ dl_perms = False
+ if self.pyload.config.get("permission", "change_file"):
+ permission = self.pyload.config.get("permission", "folder" if os.path.isdir(path) else "file")
+ mode = int(permission, 8)
+ os.chmod(path, mode)
- def delete(self, key):
- """
- Delete entry in db
- """
- self.pyload.db.delStorage(self.classname, key)
+ if os.name is not "nt" and self.pyload.config.get("permission", "change_dl"):
+ uid = pwd.getpwnam(self.pyload.config.get("permission", "user"))[2]
+ gid = grp.getgrnam(self.pyload.config.get("permission", "group"))[2]
+ os.chown(path, uid, gid)
def fail(self, msg):
@@ -214,13 +177,16 @@ class Plugin(object):
"""
if self.pyload.debug:
self.log_debug("LOAD URL " + url,
- *["%s=%s" % (key, safe_format(val, self.info['login']['password']) if self.__type__ == "account" else val)
- for key, val in locals().items() if key not in ("self", "url", "_[1]")])
+ *["%s=%s" % (key, value) for key, value in locals().items()
+ if key not in ("self", "url", "_[1]")])
url = fixurl(url, unquote=True) #: Recheck in 0.4.10
- if req is None:
- req = self.req or self.pyload.requestFactory.getRequest(self.classname)
+ if req is False:
+ req = get_request()
+
+ elif not req:
+ req = self.req
#@TODO: Move to network in 0.4.10
if isinstance(cookies, list):
@@ -240,7 +206,7 @@ class Plugin(object):
req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
elif type(redirect) is int:
- maxredirs = self.get_config("maxredirs", default=5, plugin="UserAgentSwitcher")
+ maxredirs = int(self.pyload.api.getConfigValue("UserAgentSwitcher", "maxredirs", "plugin")) or 5 #@TODO: Remove `int` in 0.4.10
req.http.c.setopt(pycurl.MAXREDIRS, maxredirs)
#@TODO: Move to network in 0.4.10
@@ -257,8 +223,8 @@ class Plugin(object):
frame = inspect.currentframe()
try:
- framefile = fs_join("tmp", self.classname, "%s_line%s.dump.html" %
- (frame.f_back.f_code.co_name, frame.f_back.f_lineno))
+ framefile = fsjoin("tmp", self.classname, "%s_line%s.dump.html"
+ % (frame.f_back.f_code.co_name, frame.f_back.f_lineno))
if not exists(os.path.join("tmp", self.classname)):
os.makedirs(os.path.join("tmp", self.classname))
@@ -272,33 +238,16 @@ class Plugin(object):
finally:
del frame #: Delete the frame or it wont be cleaned
- if not just_header:
- return html
-
- else:
- #@TODO: Move to network in 0.4.10
- header = {'code': req.code}
-
- for line in html.splitlines():
- line = line.strip()
- if not line or ":" not in line:
- continue
-
- key, none, value = line.partition(":")
-
- key = key.strip().lower()
- value = value.strip()
+ #@TODO: Move to network in 0.4.10
+ header = {'code': req.code}
+ header.update(parse_html_header(req.http.header))
- if key in header:
- header_key = header.get(key)
- if type(header_key) is list:
- header_key.append(value)
- else:
- header[key] = [header_key, value]
- else:
- header[key] = value
+ self.last_header = header
+ if just_header:
return header
+ else:
+ return html
def clean(self):
diff --git a/module/plugins/internal/SevenZip.py b/module/plugins/internal/SevenZip.py
index bf33332ea..a8306f393 100644
--- a/module/plugins/internal/SevenZip.py
+++ b/module/plugins/internal/SevenZip.py
@@ -5,13 +5,13 @@ import re
import subprocess
from module.plugins.internal.UnRar import UnRar, ArchiveError, CRCError, PasswordError
-from module.plugins.internal.utils import fs_join, renice
+from module.plugins.internal.misc import encode, fsjoin, renice
class SevenZip(UnRar):
__name__ = "SevenZip"
__type__ = "extractor"
- __version__ = "0.18"
+ __version__ = "0.19"
__status__ = "testing"
__description__ = """7-Zip extractor plugin"""
@@ -20,19 +20,18 @@ class SevenZip(UnRar):
("Michael Nowak" , None )]
- CMD = "7z"
- EXTENSIONS = [".7z", ".xz", ".zip", ".gz", ".gzip", ".tgz", ".bz2", ".bzip2",
- ".tbz2", ".tbz", ".tar", ".wim", ".swm", ".lzma", ".rar", ".cab",
- ".arj", ".z", ".taz", ".cpio", ".rpm", ".deb", ".lzh", ".lha",
- ".chm", ".chw", ".hxs", ".iso", ".msi", ".doc", ".xls", ".ppt",
- ".dmg", ".xar", ".hfs", ".exe", ".ntfs", ".fat", ".vhd", ".mbr",
- ".squashfs", ".cramfs", ".scap"]
+ CMD = "7z"
+ EXTENSIONS = ["7z", "xz", "zip", "gz", "gzip", "tgz", "bz2", "bzip2", "tbz2",
+ "tbz", "tar", "wim", "swm", "lzma", "rar", "cab", "arj", "z",
+ "taz", "cpio", "rpm", "deb", "lzh", "lha", "chm", "chw", "hxs",
+ "iso", "msi", "doc", "xls", "ppt", "dmg", "xar", "hfs", "exe",
+ "ntfs", "fat", "vhd", "mbr", "squashfs", "cramfs", "scap"]
#@NOTE: there are some more uncovered 7z formats
- re_filelist = re.compile(r'([\d\:]+)\s+([\d\:]+)\s+([\w\.]+)\s+(\d+)\s+(\d+)\s+(.+)')
- re_wrongpwd = re.compile(r'(Can not open encrypted archive|Wrong password|Encrypted\s+\=\s+\+)', re.I)
- re_wrongcrc = re.compile(r'CRC Failed|Can not open file', re.I)
- re_version = re.compile(r'7-Zip\s(?:\[64\]\s)?(\d+\.\d+)', re.I)
+ _RE_FILES = re.compile(r'([\d\:]+)\s+([\d\:]+)\s+([\w\.]+)\s+(\d+)\s+(\d+)\s+(.+)')
+ _RE_BADPWD = re.compile(r'(Can not open encrypted archive|Wrong password|Encrypted\s+\=\s+\+)', re.I)
+ _RE_BADCRC = re.compile(r'CRC Failed|Can not open file', re.I)
+ _RE_VERSION = re.compile(r'7-Zip\s(?:\[64\]\s)?(\d+\.\d+)', re.I)
@classmethod
@@ -48,7 +47,7 @@ class SevenZip(UnRar):
return False
else:
- m = cls.re_version.search(out)
+ m = cls._RE_VERSION.search(out)
if m is not None:
cls.VERSION = m.group(1)
@@ -60,33 +59,33 @@ class SevenZip(UnRar):
p = self.call_cmd("l", "-slt", self.target)
out, err = p.communicate()
- if self.re_wrongpwd.search(out):
+ if self._RE_BADPWD.search(out):
raise PasswordError
- elif self.re_wrongpwd.search(err):
+ elif self._RE_BADPWD.search(err):
raise PasswordError
- elif self.re_wrongcrc.search(out):
+ elif self._RE_BADCRC.search(out):
raise CRCError(_("Header protected"))
- elif self.re_wrongcrc.search(err):
+ elif self._RE_BADCRC.search(err):
raise CRCError(err)
def extract(self, password=None):
command = "x" if self.fullpath else "e"
- p = self.call_cmd(command, '-o' + self.out, self.target, password=password)
+ p = self.call_cmd(command, '-o' + self.dest, self.target, password=password)
#: Communicate and retrieve stderr
- self._progress(p)
+ self.progress(p)
err = p.stderr.read().strip()
if err:
- if self.re_wrongpwd.search(err):
+ if self._RE_BADPWD.search(err):
raise PasswordError
- elif self.re_wrongcrc.search(err):
+ elif self._RE_BADCRC.search(err):
raise CRCError(err)
else: #: Raise error if anything is on stderr
@@ -95,8 +94,6 @@ class SevenZip(UnRar):
if p.returncode > 1:
raise ArchiveError(_("Process return code: %d") % p.returncode)
- self.files = self.list(password)
-
def list(self, password=None):
command = "l" if self.fullpath else "l"
@@ -111,9 +108,9 @@ class SevenZip(UnRar):
raise ArchiveError(_("Process return code: %d") % p.returncode)
result = set()
- for groups in self.re_filelist.findall(out):
+ for groups in self._RE_FILES.findall(out):
f = groups[-1].strip()
- result.add(fs_join(self.out, f))
+ result.add(fsjoin(self.dest, f))
return list(result)
@@ -133,9 +130,9 @@ class SevenZip(UnRar):
#@NOTE: return codes are not reliable, some kind of threading, cleanup whatever issue
call = [self.CMD, command] + args + list(xargs)
+ self.log_debug("EXECUTE " + " ".join(call))
- self.log_debug(" ".join(call))
-
+ call = map(encode, call)
p = subprocess.Popen(call, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
renice(p.pid, self.priority)
diff --git a/module/plugins/internal/SimpleCrypter.py b/module/plugins/internal/SimpleCrypter.py
index 5a9bd5c84..97d7a660a 100644
--- a/module/plugins/internal/SimpleCrypter.py
+++ b/module/plugins/internal/SimpleCrypter.py
@@ -4,8 +4,8 @@ import re
from module.network.HTTPRequest import BadHeader
from module.network.RequestFactory import getURL as get_url
-from module.plugins.internal.Crypter import Crypter, create_getInfo, parse_fileInfo
-from module.plugins.internal.utils import parse_name, replace_patterns, set_cookie, set_cookies
+from module.plugins.internal.Crypter import Crypter
+from module.plugins.internal.misc import parse_name, replace_patterns, set_cookie, set_cookies
class SimpleCrypter(Crypter):
@@ -15,11 +15,10 @@ class SimpleCrypter(Crypter):
__status__ = "testing"
__pattern__ = r'^unmatchable$'
- __config__ = [("activated" , "bool", "Activated" , True),
- ("use_premium" , "bool", "Use premium account if available" , True),
- ("use_subfolder" , "bool", "Save package to subfolder" , True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package" , True),
- ("max_wait" , "int" , "Reconnect if waiting time is greater than minutes", 10 )]
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("use_premium" , "bool" , "Use premium account if available" , True ),
+ ("folder_per_package", "Default;Yes;No", "Create folder for each package" , "Default"),
+ ("max_wait" , "int" , "Reconnect if waiting time is greater than minutes", 10 )]
__description__ = """Simple decrypter plugin"""
__license__ = "GPLv3"
@@ -70,8 +69,8 @@ class SimpleCrypter(Crypter):
PAGES_PATTERN = None
NAME_PATTERN = None
- OFFLINE_PATTERN = None
- TEMP_OFFLINE_PATTERN = None
+ OFFLINE_PATTERN = r'[^\w](404\s|[Ii]nvalid|[Oo]ffline|[Dd]elet|[Rr]emov|([Nn]o(t|thing)?|sn\'t) (found|(longer )?(available|exist)))'
+ TEMP_OFFLINE_PATTERN = r'[^\w](503\s|[Mm]aint(e|ai)nance|[Tt]emp([.-]|orarily)|[Mm]irror)'
WAIT_PATTERN = None
PREMIUM_ONLY_PATTERN = None
@@ -132,7 +131,7 @@ class SimpleCrypter(Crypter):
if self.account:
self.req = self.pyload.requestFactory.getRequest(account_name, self.account.user)
- self.premium = self.account.info['data']['premium'] #@NOTE: Avoid one unnecessary get_info call by `self.account.premium` here
+ self.premium = self.account.info['data']['premium'] #@NOTE: Don't call get_info here to reduce overhead
else:
self.req = self.pyload.requestFactory.getRequest(account_name)
self.premium = False
@@ -149,26 +148,24 @@ class SimpleCrypter(Crypter):
def handle_direct(self, pyfile):
- redirect = None
- maxredirs = self.get_config("maxredirs", default=10, plugin="UserAgentSwitcher")
+ maxredirs = int(self.pyload.api.getConfigValue("UserAgentSwitcher", "maxredirs", "plugin")) or 5 #@TODO: Remove `int` in 0.4.10
+ redirect = None
for i in xrange(maxredirs):
redirect = redirect or pyfile.url
self.log_debug("Redirect #%d to: %s" % (i, redirect))
- data = self.load(redirect)
- header = dict(re.findall(r"(?P<name>.+?): (?P<value>.+?)\r?\n", self.req.http.header))
- #Ugly, but there is no direct way to fetch headers AND data
- location = header.get('location')
+ html = self.load(redirect)
+ location = self.last_header.get('location')
if location:
redirect = location
else:
- self.data = data
+ self.data = html
self.links.extend(self.get_links())
return
else:
- self.log_error(_("Too many redirects"))
+ self.log_warning(_("Too many redirects"))
def preload(self):
@@ -282,8 +279,10 @@ class SimpleCrypter(Crypter):
def check_errors(self):
+ self.log_info(_("Checking for link errors..."))
+
if not self.data:
- self.log_debug("No data to check")
+ self.log_warning(_("No data to check"))
return
if self.IP_BLOCKED_PATTERN and re.search(self.IP_BLOCKED_PATTERN, self.data):
@@ -311,33 +310,40 @@ class SimpleCrypter(Crypter):
self.info['error'] = errmsg
self.log_warning(errmsg)
- if re.search('limit|wait|slot', errmsg, re.I):
+ if re.search(self.TEMP_OFFLINE_PATTERN, errmsg):
+ self.temp_offline()
+
+ elif re.search(self.OFFLINE_PATTERN, errmsg):
+ self.offline()
+
+ elif re.search(r'limit|wait|slot', errmsg, re.I):
wait_time = parse_time(errmsg)
- self.wait(wait_time, reconnect=wait_time > self.get_config("max_wait", 10) * 60)
+ self.wait(wait_time, reconnect=wait_time > self.config.get("max_wait", 10) * 60)
self.restart(_("Download limit exceeded"))
- elif re.search('country|ip|region|nation', errmsg, re.I):
+ elif re.search(r'country|ip|region|nation', errmsg, re.I):
self.fail(_("Connection from your current IP address is not allowed"))
- elif re.search('captcha|code', errmsg, re.I):
+ elif re.search(r'captcha|code', errmsg, re.I):
self.retry_captcha()
- elif re.search('countdown|expired', errmsg, re.I):
+ elif re.search(r'countdown|expired', errmsg, re.I):
self.retry(10, 60, _("Link expired"))
- elif re.search('maint(e|ai)nance|temp', errmsg, re.I):
+ elif re.search(r'503|maint(e|ai)nance|temp|mirror', errmsg, re.I):
self.temp_offline()
- elif re.search('up to|size', errmsg, re.I):
+ elif re.search(r'up to|size', errmsg, re.I):
self.fail(_("Link list too large for free decrypt"))
- elif re.search('offline|delet|remov|not? (found|(longer)? available)', errmsg, re.I):
+ elif re.search(r'404|sorry|offline|delet|remov|(no(t|thing)?|sn\'t) (found|(longer )?(available|exist))',
+ errmsg, re.I):
self.offline()
- elif re.search('filename', errmsg, re.I):
+ elif re.search(r'filename', errmsg, re.I):
self.fail(_("Invalid url"))
- elif re.search('premium', errmsg, re.I):
+ elif re.search(r'premium', errmsg, re.I):
self.fail(_("Link can be decrypted by premium users only"))
else:
@@ -354,6 +360,7 @@ class SimpleCrypter(Crypter):
waitmsg = m.group(0).strip()
wait_time = parse_time(waitmsg)
- self.wait(wait_time, reconnect=wait_time > self.get_config("max_wait", 10) * 60)
+ self.wait(wait_time, reconnect=wait_time > self.config.get("max_wait", 10) * 60)
+ self.log_info(_("No errors found"))
self.info.pop('error', None)
diff --git a/module/plugins/internal/SimpleHoster.py b/module/plugins/internal/SimpleHoster.py
index 4d7697d57..c6e915cc4 100644
--- a/module/plugins/internal/SimpleHoster.py
+++ b/module/plugins/internal/SimpleHoster.py
@@ -7,9 +7,9 @@ import time
from module.network.HTTPRequest import BadHeader
from module.network.RequestFactory import getURL as get_url
-from module.plugins.internal.Hoster import Hoster, create_getInfo, parse_fileInfo
+from module.plugins.internal.Hoster import Hoster
from module.plugins.internal.Plugin import Fail
-from module.plugins.internal.utils import (encode, parse_name, parse_size,
+from module.plugins.internal.misc import (encode, parse_name, parse_size,
parse_time, replace_patterns)
@@ -43,7 +43,7 @@ class SimpleHoster(Hoster):
example: SIZE_PATTERN = r'(?P<S>file_size) (?P<U>size_unit)'
HASHSUM_PATTERN: (optional) Hash code and type of the file
- example: HASHSUM_PATTERN = r'(?P<H>hash_code) (?P<T>MD5)'
+ example: HASHSUM_PATTERN = r'(?P<D>hash_digest) (?P<H>MD5)'
OFFLINE_PATTERN: (mandatory) Check if the page is unreachable
example: OFFLINE_PATTERN = r'File (deleted|not found)'
@@ -101,6 +101,7 @@ class SimpleHoster(Hoster):
LOGIN_PREMIUM = False #: Set to True to require premium account login
LEECH_HOSTER = False #: Set to True to leech other hoster link (as defined in handle_multi method)
TEXT_ENCODING = True #: Set to encoding name if encoding value in http header is not correct
+ # TRANSLATE_ERROR = True
LINK_PATTERN = None
LINK_FREE_PATTERN = None
@@ -109,9 +110,9 @@ class SimpleHoster(Hoster):
INFO_PATTERN = None
NAME_PATTERN = None
SIZE_PATTERN = None
- HASHSUM_PATTERN = None
- OFFLINE_PATTERN = None
- TEMP_OFFLINE_PATTERN = None
+ HASHSUM_PATTERN = r'[^\w](?P<H>(CRC|crc)(-?32)?|(MD|md)-?5|(SHA|sha)-?(1|224|256|384|512)).*(:|=|>)[ ]*(?P<D>(?:[a-z0-9]|[A-Z0-9]){8,})'
+ OFFLINE_PATTERN = r'[^\w](404\s|[Ii]nvalid|[Oo]ffline|[Dd]elet|[Rr]emov|([Nn]o(t|thing)?|sn\'t) (found|(longer )?(available|exist)))'
+ TEMP_OFFLINE_PATTERN = r'[^\w](503\s|[Mm]aint(e|ai)nance|[Tt]emp([.-]|orarily)|[Mm]irror)'
WAIT_PATTERN = None
PREMIUM_ONLY_PATTERN = None
@@ -187,8 +188,8 @@ class SimpleHoster(Hoster):
info['size'] = parse_size(info['size'], unit)
if 'H' in info['pattern']:
- hashtype = info['pattern']['T'] if 'T' in info['pattern'] else "hash"
- info[hashtype] = info['pattern']['H']
+ type = info['pattern']['H'].strip('-').upper()
+ info['hash'][type] = info['pattern']['D']
return info
@@ -246,51 +247,54 @@ class SimpleHoster(Hoster):
def process(self, pyfile):
self.prepare()
+ #@TODO: Remove `handle_multi`, use MultiHoster instead
if self.leech_dl:
self.log_info(_("Processing as debrid download..."))
self.handle_multi(pyfile)
- if not self.link and not was_downloaded():
- self.log_info(_("Failed to leech url"))
-
else:
- if not self.link and self.direct_dl and not self.last_download:
+ if not self.link and self.direct_dl:
self.log_info(_("Looking for direct download link..."))
self.handle_direct(pyfile)
- if self.link or self.last_download:
+ if self.link:
self.log_info(_("Direct download link detected"))
else:
self.log_info(_("Direct download link not found"))
- if not self.link and not self.last_download:
+ if not self.link:
self.preload()
+ self.check_errors()
if self.info.get('status', 3) is not 2:
self.grab_info()
+ self.check_status()
+ self.check_duplicates()
- if self.premium and (not self.CHECK_TRAFFIC or self.check_traffic()):
+ if self.premium and (not self.CHECK_TRAFFIC or not self.out_of_traffic()):
self.log_info(_("Processing as premium download..."))
self.handle_premium(pyfile)
- elif not self.LOGIN_ACCOUNT or (not self.CHECK_TRAFFIC or self.check_traffic()):
+ elif not self.LOGIN_ACCOUNT or (not self.CHECK_TRAFFIC or not self.out_of_traffic()):
self.log_info(_("Processing as free download..."))
self.handle_free(pyfile)
- if not self.link and not self.last_download:
- self.error(_("%s download link not found") % ("Premium" if self.premium else "Free"))
-
- if not self.last_download:
+ if self.link and not self.last_download:
self.log_info(_("Downloading file..."))
self.download(self.link, disposition=self.DISPOSITION)
+ def _check_download(self):
+ super(SimpleHoster, self)._check_download()
+ self.check_download()
+
+
def check_download(self):
super(SimpleHoster, self).check_download()
- self.log_info(_("Checking downloaded file with built-in rules..."))
+ self.log_info(_("Checking file (with built-in rules)..."))
for r, p in self.FILE_ERRORS:
- errmsg = self.check_file({r: re.compile(p)})
+ errmsg = self.scan_download({r: re.compile(p)})
if errmsg is not None:
errmsg = errmsg.strip().capitalize()
@@ -305,19 +309,21 @@ class SimpleHoster(Hoster):
self.restart(errmsg)
else:
if self.CHECK_FILE:
- self.log_info(_("Checking downloaded file with custom rules..."))
+ self.log_info(_("Checking file (with custom rules)..."))
with open(encode(self.last_download), "rb") as f:
self.data = f.read(1048576) #@TODO: Recheck in 0.4.10
self.check_errors()
- self.log_info(_("File is OK"))
+ self.log_info(_("No errors found"))
def check_errors(self):
+ self.log_info(_("Checking for link errors..."))
+
if not self.data:
- self.log_debug("No data to check")
+ self.log_warning(_("No data to check"))
return
if self.IP_BLOCKED_PATTERN and re.search(self.IP_BLOCKED_PATTERN, self.data):
@@ -345,7 +351,7 @@ class SimpleHoster(Hoster):
self.log_warning(errmsg)
wait_time = parse_time(errmsg)
- self.wait(wait_time, reconnect=wait_time > self.get_config("max_wait", 10) * 60)
+ self.wait(wait_time, reconnect=wait_time > self.config.get("max_wait", 10) * 60)
self.restart(_("Download limit exceeded"))
if self.HAPPY_HOUR_PATTERN and re.search(self.HAPPY_HOUR_PATTERN, self.data):
@@ -366,33 +372,40 @@ class SimpleHoster(Hoster):
self.info['error'] = errmsg
self.log_warning(errmsg)
- if re.search('limit|wait|slot', errmsg, re.I):
+ if re.search(self.TEMP_OFFLINE_PATTERN, errmsg):
+ self.temp_offline()
+
+ elif re.search(self.OFFLINE_PATTERN, errmsg):
+ self.offline()
+
+ elif re.search(r'limit|wait|slot', errmsg, re.I):
wait_time = parse_time(errmsg)
- self.wait(wait_time, reconnect=wait_time > self.get_config("max_wait", 10) * 60)
+ self.wait(wait_time, reconnect=wait_time > self.config.get("max_wait", 10) * 60)
self.restart(_("Download limit exceeded"))
- elif re.search('country|ip|region|nation', errmsg, re.I):
+ elif re.search(r'country|ip|region|nation', errmsg, re.I):
self.fail(_("Connection from your current IP address is not allowed"))
- elif re.search('captcha|code', errmsg, re.I):
+ elif re.search(r'captcha|code', errmsg, re.I):
self.retry_captcha()
- elif re.search('countdown|expired', errmsg, re.I):
+ elif re.search(r'countdown|expired', errmsg, re.I):
self.retry(10, 60, _("Link expired"))
- elif re.search('maint(e|ai)nance|temp', errmsg, re.I):
+ elif re.search(r'503|maint(e|ai)nance|temp|mirror', errmsg, re.I):
self.temp_offline()
- elif re.search('up to|size', errmsg, re.I):
+ elif re.search(r'up to|size', errmsg, re.I):
self.fail(_("File too large for free download"))
- elif re.search('offline|delet|remov|not? (found|(longer)? available)', errmsg, re.I):
+ elif re.search(r'404|sorry|offline|delet|remov|(no(t|thing)?|sn\'t) (found|(longer )?(available|exist))',
+ errmsg, re.I):
self.offline()
- elif re.search('filename', errmsg, re.I):
+ elif re.search(r'filename', errmsg, re.I):
self.fail(_("Invalid url"))
- elif re.search('premium', errmsg, re.I):
+ elif re.search(r'premium', errmsg, re.I):
self.fail(_("File can be downloaded by premium users only"))
else:
@@ -409,8 +422,9 @@ class SimpleHoster(Hoster):
waitmsg = m.group(0).strip()
wait_time = parse_time(waitmsg)
- self.wait(wait_time, reconnect=wait_time > self.get_config("max_wait", 10) * 60)
+ self.wait(wait_time, reconnect=wait_time > self.config.get("max_wait", 10) * 60)
+ self.log_info(_("No errors found"))
self.info.pop('error', None)
@@ -431,10 +445,12 @@ class SimpleHoster(Hoster):
def handle_free(self, pyfile):
if not self.LINK_FREE_PATTERN:
- self.error(_("Free download not implemented"))
+ self.fail(_("Free download not implemented"))
m = re.search(self.LINK_FREE_PATTERN, self.data)
- if m is not None:
+ if m is None:
+ self.error(_("Free download link not found"))
+ else:
self.link = m.group(1)
@@ -444,5 +460,7 @@ class SimpleHoster(Hoster):
self.restart(premium=False)
m = re.search(self.LINK_PREMIUM_PATTERN, self.data)
- if m is not None:
+ if m is None:
+ self.error(_("Premium download link not found"))
+ else:
self.link = m.group(1)
diff --git a/module/plugins/internal/UnRar.py b/module/plugins/internal/UnRar.py
index 963ca2a2e..b9c91bce8 100644
--- a/module/plugins/internal/UnRar.py
+++ b/module/plugins/internal/UnRar.py
@@ -6,34 +6,32 @@ import string
import subprocess
from module.plugins.internal.Extractor import Extractor, ArchiveError, CRCError, PasswordError
-from module.plugins.internal.utils import decode, fs_join, renice
+from module.plugins.internal.misc import decode, encode, fsjoin, renice
class UnRar(Extractor):
__name__ = "UnRar"
__type__ = "extractor"
- __version__ = "1.29"
+ __version__ = "1.30"
__status__ = "testing"
- __description__ = """Rar extractor plugin"""
+ __description__ = """RAR extractor plugin"""
__license__ = "GPLv3"
__authors__ = [("RaNaN" , "RaNaN@pyload.org" ),
("Walter Purcaro", "vuolter@gmail.com"),
("Immenz" , "immenz@gmx.net" )]
- CMD = "unrar"
- EXTENSIONS = [".rar"]
+ CMD = "unrar"
+ EXTENSIONS = ["rar", "zip", "cab", "arj", "lzh", "tar", "gz", "ace", "uue",
+ "bz2", "jar", "iso", "7z", "xz", "z"]
- re_multipart = re.compile(r'\.(part|r)(\d+)(?:\.rar)?(\.rev|\.bad)?', re.I)
-
- re_filefixed = re.compile(r'Building (.+)')
- re_filelist = re.compile(r'^(.)(\s*[\w\-.]+)\s+(\d+\s+)+(?:\d+\%\s+)?[\d\-]{8}\s+[\d\:]{5}', re.I | re.M)
-
- re_wrongpwd = re.compile(r'password', re.I)
- re_wrongcrc = re.compile(r'encrypted|damaged|CRC failed|checksum error|corrupt', re.I)
-
- re_version = re.compile(r'(?:UN)?RAR\s(\d+\.\d+)', re.I)
+ _RE_PART = re.compile(r'\.(part|r)\d+(\.rar|\.rev)?(\.bad)?', re.I)
+ _RE_FIXNAME = re.compile(r'Building (.+)')
+ _RE_FILES = re.compile(r'^(.)(\s*[\w\-.]+)\s+(\d+\s+)+(?:\d+\%\s+)?[\d\-]{8}\s+[\d\:]{5}', re.I | re.M)
+ _RE_BADPWD = re.compile(r'password', re.I)
+ _RE_BADCRC = re.compile(r'encrypted|damaged|CRC failed|checksum error|corrupt', re.I)
+ _RE_VERSION = re.compile(r'(?:UN)?RAR\s(\d+\.\d+)', re.I)
@classmethod
@@ -62,7 +60,7 @@ class UnRar(Extractor):
except OSError:
return False
- m = cls.re_version.search(out)
+ m = cls._RE_VERSION.search(out)
if m is not None:
cls.VERSION = m.group(1)
@@ -71,21 +69,21 @@ class UnRar(Extractor):
@classmethod
def ismultipart(cls, filename):
- return True if cls.re_multipart.search(filename) else False
+ return True if cls._RE_PART.search(filename) else False
def verify(self, password=None):
p = self.call_cmd("l", "-v", self.target, password=password)
out, err = p.communicate()
- if self.re_wrongpwd.search(err):
+ if self._RE_BADPWD.search(err):
raise PasswordError
- if self.re_wrongcrc.search(err):
+ if self._RE_BADCRC.search(err):
raise CRCError(err)
#: Output only used to check if passworded files are present
- for attr in self.re_filelist.findall(out):
+ for attr in self._RE_FILES.findall(out):
if attr[0].startswith("*"):
raise PasswordError
@@ -94,14 +92,14 @@ class UnRar(Extractor):
p = self.call_cmd("rc", self.target)
#: Communicate and retrieve stderr
- self._progress(p)
+ self.progress(p)
err = p.stderr.read().strip()
if err or p.returncode:
p = self.call_cmd("r", self.target)
# communicate and retrieve stderr
- self._progress(p)
+ self.progress(p)
err = p.stderr.read().strip()
if err or p.returncode:
@@ -109,14 +107,14 @@ class UnRar(Extractor):
else:
dir = os.path.dirname(filename)
- name = re_filefixed.search(out).group(1)
+ name = _RE_FIXNAME.search(out).group(1)
self.filename = os.path.join(dir, name)
return True
- def _progress(self, process):
+ def progress(self, process):
s = ""
while True:
c = process.stdout.read(1)
@@ -125,7 +123,7 @@ class UnRar(Extractor):
break
#: Reading a percentage sign -> set progress and restart
if c == "%":
- self.notify_progress(int(s))
+ self.notifyprogress(int(s))
s = ""
#: Not reading a digit -> therefore restart
elif c not in string.digits:
@@ -138,17 +136,17 @@ class UnRar(Extractor):
def extract(self, password=None):
command = "x" if self.fullpath else "e"
- p = self.call_cmd(command, self.target, self.out, password=password)
+ p = self.call_cmd(command, self.target, self.dest, password=password)
#: Communicate and retrieve stderr
- self._progress(p)
+ self.progress(p)
err = p.stderr.read().strip()
if err:
- if self.re_wrongpwd.search(err):
+ if self._RE_BADPWD.search(err):
raise PasswordError
- elif self.re_wrongcrc.search(err):
+ elif self._RE_BADCRC.search(err):
raise CRCError(err)
else: #: Raise error if anything is on stderr
@@ -157,18 +155,16 @@ class UnRar(Extractor):
if p.returncode:
raise ArchiveError(_("Process return code: %d") % p.returncode)
- self.files = self.list(password)
-
- def items(self):
+ def chunks(self):
dir, name = os.path.split(self.filename)
#: Actually extracted file
files = [self.filename]
#: eventually Multipart Files
- files.extend(fs_join(dir, os.path.basename(file)) for file in filter(self.ismultipart, os.listdir(dir))
- if re.sub(self.re_multipart, ".rar", name) == re.sub(self.re_multipart, ".rar", file))
+ files.extend(fsjoin(dir, os.path.basename(file)) for file in filter(self.ismultipart, os.listdir(dir))
+ if re.sub(self._RE_PART, "", name) == re.sub(self._RE_PART, "", file))
return files
@@ -189,12 +185,12 @@ class UnRar(Extractor):
if not self.fullpath and self.VERSION.startswith('5'):
#@NOTE: Unrar 5 always list full path
for f in decode(out).splitlines():
- f = fs_join(self.out, os.path.basename(f.strip()))
+ f = fsjoin(self.dest, os.path.basename(f.strip()))
if os.path.isfile(f):
- result.add(fs_join(self.out, os.path.basename(f)))
+ result.add(fsjoin(self.dest, os.path.basename(f)))
else:
for f in decode(out).splitlines():
- result.add(fs_join(self.out, f.strip()))
+ result.add(fsjoin(self.dest, f.strip()))
return list(result)
@@ -226,9 +222,9 @@ class UnRar(Extractor):
#@NOTE: return codes are not reliable, some kind of threading, cleanup whatever issue
call = [self.CMD, command] + args + list(xargs)
+ self.log_debug("EXECUTE " + " ".join(call))
- self.log_debug(" ".join(call))
-
+ call = map(encode, call)
p = subprocess.Popen(call, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
renice(p.pid, self.priority)
diff --git a/module/plugins/internal/UnTar.py b/module/plugins/internal/UnTar.py
new file mode 100644
index 000000000..f2a140ca7
--- /dev/null
+++ b/module/plugins/internal/UnTar.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import with_statement
+
+import sys
+import tarfile
+
+from module.plugins.internal.Extractor import Extractor, ArchiveError, CRCError
+from module.plugins.internal.misc import encode
+
+
+class UnTar(Extractor):
+ __name__ = "UnTar"
+ __type__ = "extractor"
+ __version__ = "0.01"
+ __status__ = "stable"
+
+ __description__ = """TAR extractor plugin"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ VERSION = "%s.%s.%s" % (sys.version_info[0], sys.version_info[1], sys.version_info[2])
+
+
+ @classmethod
+ def isarchive(cls, filename):
+ return tarfile.is_tarfile(encode(filename))
+
+
+ @classmethod
+ def find(cls):
+ return sys.version_info[:2] >= (2, 5)
+
+
+ def list(self, password=None):
+ with tarfile.open(self.target) as t:
+ return t.getnames()
+
+
+ def verify(self, password=None):
+ try:
+ t = tarfile.open(self.target, errorlevel=1)
+
+ except tarfile.CompressionError, e:
+ raise CRCError(e)
+
+ except (OSError, tarfile.TarError), e:
+ raise ArchiveError(e)
+
+ else:
+ t.close()
+
+
+ def extract(self, password=None):
+ self.verify()
+
+ try:
+ with tarfile.open(self.target, errorlevel=2) as t:
+ t.extractall(self.dest)
+
+ except tarfile.ExtractError, e:
+ self.log_warning(e)
+
+ except tarfile.CompressionError, e:
+ raise CRCError(e)
+
+ except (OSError, tarfile.TarError), e:
+ raise ArchiveError(e)
diff --git a/module/plugins/internal/UnZip.py b/module/plugins/internal/UnZip.py
index ff929ae00..50ab80da3 100644
--- a/module/plugins/internal/UnZip.py
+++ b/module/plugins/internal/UnZip.py
@@ -2,26 +2,30 @@
from __future__ import with_statement
-import os
import sys
import zipfile
from module.plugins.internal.Extractor import Extractor, ArchiveError, CRCError, PasswordError
+from module.plugins.internal.misc import encode
class UnZip(Extractor):
__name__ = "UnZip"
__type__ = "extractor"
- __version__ = "1.20"
+ __version__ = "1.21"
__status__ = "stable"
- __description__ = """Zip extractor plugin"""
+ __description__ = """ZIP extractor plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- VERSION = "%s.%s.%s" % (sys.version_info[0], sys.version_info[1], sys.version_info[2])
- EXTENSIONS = [".zip", ".zip64"]
+ VERSION = "%s.%s.%s" % (sys.version_info[0], sys.version_info[1], sys.version_info[2])
+
+
+ @classmethod
+ def isarchive(cls, filename):
+ return zipfile.is_zipfile(encode(filename))
@classmethod
@@ -30,49 +34,35 @@ class UnZip(Extractor):
def list(self, password=None):
- with zipfile.ZipFile(self.target, 'r', allowZip64=True) as z:
+ with zipfile.ZipFile(self.target, 'r') as z:
z.setpassword(password)
return z.namelist()
def verify(self, password=None):
- with zipfile.ZipFile(self.target, 'r', allowZip64=True) as z:
- z.setpassword(password)
-
- try:
- badfile = z.testzip()
-
- except RuntimeError, e:
- if "encrypted" in e.args[0] or "Bad password" in e.args[0]:
- raise PasswordError
- else:
- raise CRCError("Archive damaged")
-
- else:
- if badfile:
- raise CRCError(badfile)
-
-
-
- def extract(self, password=None):
try:
- with zipfile.ZipFile(self.target, 'r', allowZip64=True) as z:
+ with zipfile.ZipFile(self.target, 'r') as z:
z.setpassword(password)
-
- badfile = z.testzip()
-
- if badfile:
+ if z.testzip():
raise CRCError(badfile)
- else:
- z.extractall(self.out)
except (zipfile.BadZipfile, zipfile.LargeZipFile), e:
raise ArchiveError(e)
except RuntimeError, e:
if "encrypted" in e.args[0] or "Bad password" in e.args[0]:
- raise PasswordError
+ raise PasswordError(e)
else:
- raise ArchiveError(e)
- else:
- self.files = z.namelist()
+ raise CRCError(e)
+
+
+ def extract(self, password=None):
+ self.verify(password)
+
+ try:
+ with zipfile.ZipFile(self.target, 'r') as z:
+ z.setpassword(password)
+ z.extractall(self.dest)
+
+ except RuntimeError, e:
+ raise ArchiveError(e)
diff --git a/module/plugins/internal/XFSAccount.py b/module/plugins/internal/XFSAccount.py
index 5e93f3fe4..f5aa37c81 100644
--- a/module/plugins/internal/XFSAccount.py
+++ b/module/plugins/internal/XFSAccount.py
@@ -5,13 +5,13 @@ import time
import urlparse
from module.plugins.internal.MultiAccount import MultiAccount
-from module.plugins.internal.utils import parse_html_form, parse_time, set_cookie
+from module.plugins.internal.misc import parse_html_form, parse_time, set_cookie
class XFSAccount(MultiAccount):
__name__ = "XFSAccount"
__type__ = "account"
- __version__ = "0.56"
+ __version__ = "0.57"
__status__ = "stable"
__config__ = [("activated" , "bool" , "Activated" , True ),
@@ -197,8 +197,10 @@ class XFSAccount(MultiAccount):
def check_errors(self):
+ self.log_info(_("Checking for link errors..."))
+
if not self.data:
- self.log_debug("No data to check")
+ self.log_warning(_("No data to check"))
return
m = re.search(self.LOGIN_BAN_PATTERN, self.data)
@@ -231,3 +233,5 @@ class XFSAccount(MultiAccount):
self.timeout = self.LOGIN_TIMEOUT
self.fail_login(errmsg)
+
+ self.log_info(_("No errors found"))
diff --git a/module/plugins/internal/XFSCrypter.py b/module/plugins/internal/XFSCrypter.py
index 7e0c14fe1..d80276cfb 100644
--- a/module/plugins/internal/XFSCrypter.py
+++ b/module/plugins/internal/XFSCrypter.py
@@ -1,21 +1,20 @@
# -*- coding: utf-8 -*-
-from module.plugins.internal.SimpleCrypter import SimpleCrypter, create_getInfo
-from module.plugins.internal.utils import set_cookie
+from module.plugins.internal.SimpleCrypter import SimpleCrypter
+from module.plugins.internal.misc import set_cookie
class XFSCrypter(SimpleCrypter):
__name__ = "XFSCrypter"
__type__ = "crypter"
- __version__ = "0.22"
+ __version__ = "0.23"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
- __config__ = [("activated" , "bool", "Activated" , True),
- ("use_premium" , "bool", "Use premium account if available" , True),
- ("use_subfolder" , "bool", "Save package to subfolder" , True),
- ("subfolder_per_package", "bool", "Create a subfolder for each package" , True),
- ("max_wait" , "int" , "Reconnect if waiting time is greater than minutes", 10 )]
+ __config__ = [("activated" , "bool" , "Activated" , True ),
+ ("use_premium" , "bool" , "Use premium account if available" , True ),
+ ("folder_per_package", "Default;Yes;No", "Create folder for each package" , "Default"),
+ ("max_wait" , "int" , "Reconnect if waiting time is greater than minutes", 10 )]
__description__ = """XFileSharing decrypter plugin"""
__license__ = "GPLv3"
diff --git a/module/plugins/internal/XFSHoster.py b/module/plugins/internal/XFSHoster.py
index e8c2073bd..dabf1457a 100644
--- a/module/plugins/internal/XFSHoster.py
+++ b/module/plugins/internal/XFSHoster.py
@@ -1,18 +1,19 @@
# -*- coding: utf-8 -*-
+import operator
import random
import re
from module.plugins.captcha.ReCaptcha import ReCaptcha
from module.plugins.captcha.SolveMedia import SolveMedia
-from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-from module.plugins.internal.utils import html_unescape, seconds_to_midnight, set_cookie
+from module.plugins.internal.SimpleHoster import SimpleHoster
+from module.plugins.internal.misc import html_unescape, seconds_to_midnight, set_cookie
class XFSHoster(SimpleHoster):
__name__ = "XFSHoster"
__type__ = "hoster"
- __version__ = "0.71"
+ __version__ = "0.72"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
@@ -230,12 +231,12 @@ class XFSHoster(SimpleHoster):
self.log_debug(captcha_div)
- inputs['code'] = "".join(a[1] for a in sorted(numerals, key=lambda num: int(num[0])))
+ inputs['code'] = "".join(a[1] for a in sorted(numerals, key=operator.itemgetter(0)))
self.log_debug("Captcha code: %s" % inputs['code'], numerals)
return
- recaptcha = ReCaptcha(self)
+ recaptcha = ReCaptcha(self.pyfile)
try:
captcha_key = re.search(self.RECAPTCHA_PATTERN, self.data).group(1)
@@ -246,10 +247,11 @@ class XFSHoster(SimpleHoster):
self.log_debug("ReCaptcha key: %s" % captcha_key)
if captcha_key:
+ self.captcha = recaptcha
inputs['recaptcha_response_field'], inputs['recaptcha_challenge_field'] = recaptcha.challenge(captcha_key)
return
- solvemedia = SolveMedia(self)
+ solvemedia = SolveMedia(self.pyfile)
try:
captcha_key = re.search(self.SOLVEMEDIA_PATTERN, self.data).group(1)
@@ -260,4 +262,5 @@ class XFSHoster(SimpleHoster):
self.log_debug("SolveMedia key: %s" % captcha_key)
if captcha_key:
+ self.captcha = solvemedia
inputs['adcopy_response'], inputs['adcopy_challenge'] = solvemedia.challenge(captcha_key)
diff --git a/module/plugins/internal/misc.py b/module/plugins/internal/misc.py
new file mode 100644
index 000000000..2cd843109
--- /dev/null
+++ b/module/plugins/internal/misc.py
@@ -0,0 +1,892 @@
+# -*- coding: utf-8 -*-
+#
+#@TODO: Move to misc directory in 0.4.10
+
+from __future__ import with_statement
+
+# import HTMLParser #@TODO: Use in 0.4.10
+import datetime
+import hashlib
+import htmlentitydefs
+import itertools
+import os
+import re
+import shutil
+import string
+import sys
+import time
+import traceback
+import urllib
+import urlparse
+import xml.sax.saxutils #@TODO: Remove in 0.4.10
+import zlib
+
+try:
+ import simplejson as json
+
+except ImportError:
+ import json
+
+
+#@TODO: Remove in 0.4.10
+class misc(object):
+ __name__ = "misc"
+ __type__ = "plugin"
+ __version__ = "0.11"
+ __status__ = "stable"
+
+ __pattern__ = r'^unmatchable$'
+ __config__ = []
+
+ __description__ = """Dummy utils class"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+class Config(object):
+
+ def __init__(self, plugin):
+ self.plugin = plugin
+
+
+ def set(self, option, value):
+ """
+ Set config value for current plugin
+
+ :param option:
+ :param value:
+ :return:
+ """
+ self.plugin.pyload.api.setConfigValue(self.plugin.classname, option, value, section="plugin")
+
+
+ def get(self, option, default=None):
+ """
+ Returns config value for current plugin
+
+ :param option:
+ :return:
+ """
+ try:
+ return self.plugin.pyload.config.getPlugin(self.plugin.classname, option)
+
+ except KeyError:
+ self.plugin.log_debug("Config option `%s` not found, use default `%s`" % (option, default)) #@TODO: Restore to `log_warning` in 0.4.10
+ return default
+
+
+class DB(object):
+
+ def __init__(self, plugin):
+ self.plugin = plugin
+
+
+ def store(self, key, value):
+ """
+ Saves a value persistently to the database
+ """
+ value = map(decode, value) if isiterable(value) else decode(value)
+ entry = json.dumps(value).encode('base64')
+ self.plugin.pyload.db.setStorage(self.plugin.classname, key, entry)
+
+
+ def retrieve(self, key=None, default=None):
+ """
+ Retrieves saved value or dict of all saved entries if key is None
+ """
+ entry = self.plugin.pyload.db.getStorage(self.plugin.classname, key)
+
+ if key:
+ if entry is None:
+ value = default
+ else:
+ value = json.loads(entry.decode('base64'))
+ else:
+ if not entry:
+ value = default
+ else:
+ value = dict((k, json.loads(v.decode('base64'))) for k, v in value.items())
+
+ return value
+
+
+ def delete(self, key):
+ """
+ Delete entry in db
+ """
+ self.plugin.pyload.db.delStorage(self.plugin.classname, key)
+
+
+class Periodical(object):
+
+ def __init__(self, plugin, task=lambda x: x, interval=None):
+ self.plugin = plugin
+ self.task = task
+ self.cb = None
+ self.interval = interval
+
+
+ def set_interval(self, value):
+ newinterval = max(0, value)
+
+ if newinterval != value:
+ return False
+
+ if newinterval != self.interval:
+ self.interval = newinterval
+
+ return True
+
+
+ def start(self, interval=None, threaded=False, delay=0):
+ if interval is not None and self.set_interval(interval) is False:
+ return False
+ else:
+ self.cb = self.plugin.pyload.scheduler.addJob(max(1, delay), self._task, [threaded], threaded=threaded)
+ return True
+
+
+ def restart(self, *args, **kwargs):
+ self.stop()
+ return self.start(*args, **kwargs)
+
+
+ def stop(self):
+ try:
+ return self.plugin.pyload.scheduler.removeJob(self.cb)
+
+ except Exception:
+ return False
+
+ finally:
+ self.cb = None
+
+
+ def _task(self, threaded):
+ try:
+ self.task()
+
+ except Exception, e:
+ self.log_error(_("Error performing periodical task"), e)
+
+ self.restart(threaded=threaded, delay=self.interval)
+
+
+class SimpleQueue(object):
+
+ def __init__(self, plugin, storage="queue"):
+ self.plugin = plugin
+ self.storage = storage
+
+
+ def get(self):
+ return self.plugin.db.retrieve(self.storage, default=[])
+
+
+ def set(self, value):
+ return self.plugin.db.store(self.storage, value)
+
+
+ def delete(self):
+ return self.plugin.db.delete(self.storage)
+
+
+ def add(self, item):
+ queue = self.get()
+ if item not in queue:
+ return self.set(queue + [item])
+ else:
+ return True
+
+
+ def remove(self, item):
+ queue = self.get()
+ try:
+ queue.remove(item)
+
+ except ValueError:
+ pass
+
+ if isinstance(queue, list):
+ return self.delete()
+
+ return self.set(queue)
+
+
+def lock(fn):
+ def new(*args):
+ args[0].lock.acquire()
+ try:
+ return fn(*args)
+
+ finally:
+ args[0].lock.release()
+
+ return new
+
+
+def format_time(value):
+ dt = datetime.datetime(1, 1, 1) + datetime.timedelta(seconds=abs(int(value)))
+ days = ("%d days and " % (dt.day - 1)) if dt.day > 1 else ""
+ return days + ", ".join("%d %ss" % (getattr(dt, attr), attr)
+ for attr in ("hour", "minute", "second")
+ if getattr(dt, attr))
+
+
+def format_size(value):
+ for unit in ('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'):
+ if abs(value) < 1024.0:
+ return "%3.2f %s" % (value, unit)
+ else:
+ value /= 1024.0
+
+ return "%.2f %s" % (value, 'EiB')
+
+
+def compare_time(start, end):
+ start = map(int, start)
+ end = map(int, end)
+
+ if start == end:
+ return True
+
+ now = list(time.localtime()[3:5])
+
+ if start < end:
+ if now < end:
+ return True
+
+ elif now > start or now < end:
+ return True
+
+ return False
+
+
+def free_space(folder):
+ if os.name is "nt":
+ import ctypes
+
+ free_bytes = ctypes.c_ulonglong(0)
+ ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(folder),
+ None,
+ None,
+ ctypes.pointer(free_bytes))
+ return free_bytes.value
+
+ else:
+ s = os.statvfs(folder)
+ return s.f_frsize * s.f_bavail
+
+
+def fsbsize(path):
+ """
+ Get optimal file system buffer size (in bytes) for I/O calls
+ """
+ path = encode(path)
+
+ if os.name is "nt":
+ import ctypes
+
+ drive = "%s\\" % os.path.splitdrive(path)[0]
+ cluster_sectors, sector_size = ctypes.c_longlong(0)
+
+ ctypes.windll.kernel32.GetDiskFreeSpaceW(ctypes.c_wchar_p(drive),
+ ctypes.pointer(cluster_sectors),
+ ctypes.pointer(sector_size),
+ None,
+ None)
+ return cluster_sectors * sector_size
+
+ else:
+ return os.statvfs(path).f_frsize
+
+
+def uniqify(seq):
+ """
+ Remove duplicates from list preserving order
+ Originally by Dave Kirby
+ """
+ seen = set()
+ seen_add = seen.add
+ return [x for x in seq if x not in seen and not seen_add(x)]
+
+
+def has_method(obj, name):
+ """
+ Check if name was defined in obj (return false if inhereted)
+ """
+ return hasattr(obj, '__dict__') and name in obj.__dict__
+
+
+def html_unescape(text):
+ """
+ Removes HTML or XML character references and entities from a text string
+ """
+ return xml.sax.saxutils.unescape(text)
+ #@TODO: Replace in 0.4.10 with:
+ # h = HTMLParser.HTMLParser()
+ # return h.unescape(text)
+
+
+def isiterable(obj):
+ """
+ Check if object is iterable (string excluded)
+ """
+ return hasattr(obj, "__iter__")
+
+
+def get_console_encoding(enc):
+ if os.name is "nt":
+ if enc is "cp65001": #: aka UTF-8
+ enc = "cp850"
+ # print "WARNING: Windows codepage 65001 (UTF-8) is not supported, used `%s` instead" % enc
+ else:
+ enc = "utf8"
+
+ return enc
+
+
+#@NOTE: Revert to `decode` in Python 3
+def decode(value, encoding=None, errors='strict'):
+ """
+ Encoded string (default to own system encoding) -> unicode string
+ """
+ if type(value) is str:
+ res = unicode(value, encoding or get_console_encoding(sys.stdout.encoding), errors)
+
+ elif type(value) is unicode:
+ res = value
+
+ else:
+ res = unicode(value)
+
+ return res
+
+
+def transcode(value, decoding, encoding):
+ return value.decode(decoding).encode(encoding)
+
+
+def encode(value, encoding='utf-8', errors='backslashreplace'):
+ """
+ Unicode string -> encoded string (default to UTF-8)
+ """
+ if type(value) is unicode:
+ res = value.encode(encoding, errors)
+
+ elif type(value) is str:
+ decoding = get_console_encoding(sys.stdin.encoding)
+ if encoding == decoding:
+ res = value
+ else:
+ res = transcode(value, decoding, encoding)
+
+ else:
+ res = str(value)
+
+ return res
+
+
+def exists(path):
+ path = encode(path)
+
+ if os.path.exists(path):
+ if os.name is "nt":
+ dir, name = os.path.split(path.rstrip(os.sep))
+ return name in os.listdir(dir)
+ else:
+ return True
+ else:
+ return False
+
+
+def remove(self, path, trash=True):
+ path = encode(path)
+
+ if not exists(path):
+ return
+
+ if trash:
+ send2trash.send2trash(path)
+
+ elif os.path.isdir(path):
+ shutil.rmtree(path, ignore_errors=True)
+
+ else:
+ os.remove(path)
+
+
+def fsjoin(*args):
+ """
+ Like os.path.join, but encoding aware
+ (for safe-joining see `safejoin`)
+ """
+ return encode(os.path.join(args))
+
+
+def remove_chars(value, repl):
+ """
+ Remove all chars in repl from string
+ """
+ if type(repl) is unicode:
+ for badc in list(repl):
+ value = value.replace(badc, "")
+ return value
+
+ elif type(value) is unicode:
+ return value.translate(dict((ord(s), None) for s in repl))
+
+ elif type(value) is str:
+ return value.translate(string.maketrans("", ""), repl)
+
+
+def fixurl(url, unquote=None):
+ old = url
+ url = urllib.unquote(url)
+
+ if unquote is None:
+ unquote = url is old
+
+ url = html_unescape(decode(url).decode('unicode-escape'))
+ url = re.sub(r'(?<!:)/{2,}', '/', url).strip().lstrip('.')
+
+ if not unquote:
+ url = urllib.quote(url)
+
+ return url
+
+
+def truncate(name, length):
+ max_trunc = len(name) / 2
+ if length > max_trunc:
+ raise OSError("File name too long")
+
+ trunc = int((len(name) - length) / 3)
+ return "%s~%s" % (name[:trunc * 2], name[-trunc:])
+
+
+#@TODO: Recheck in 0.4.10
+def safepath(value):
+ """
+ Remove invalid characters and truncate the path if needed
+ """
+ drive, filename = os.path.splitdrive(value)
+ filename = os.path.join(*map(safename, filename.split(os.sep)))
+ path = os.path.abspath(drive + filename)
+
+ try:
+ if os.name is not "nt":
+ return
+
+ length = len(path) - 259
+ if length < 1:
+ return
+
+ dirname, basename = os.path.split(filename)
+ name, ext = os.path.splitext(basename)
+ path = drive + dirname + truncate(name, length) + ext
+
+ finally:
+ return path
+
+
+def safejoin(*args):
+ """
+ os.path.join + safepath
+ """
+ return safepath(os.path.join(*args))
+
+
+def safename(value):
+ """
+ Remove invalid characters
+ """
+ repl = '<>:"/\\|?*' if os.name is "nt" else '\0/\\"'
+ name = remove_chars(value, repl)
+ return name
+
+
+def parse_name(value, safechar=True):
+ path = fixurl(decode(value), unquote=False)
+ url_p = urlparse.urlparse(path.rstrip('/'))
+ name = (url_p.path.split('/')[-1] or
+ url_p.query.split('=', 1)[::-1][0].split('&', 1)[0] or
+ url_p.netloc.split('.', 1)[0])
+
+ name = urllib.unquote(name)
+ return safename(name) if safechar else name
+
+
+def parse_size(value, unit=""): #: returns bytes
+ m = re.match(r"([\d.,]+)\s*([\w^_]*)", str(value).lower())
+
+ if m is None:
+ return 0
+
+ size = float(m.group(1).replace(',', '.'))
+ unit = (unit.strip().lower() or m.group(2) or "byte")[0]
+
+ if unit is "b":
+ return int(size)
+
+ sizeunits = ['b', 'k', 'm', 'g', 't', 'p', 'e']
+ sizemap = dict((u, i * 10) for i, u in enumerate(sizeunits))
+ magnitude = sizemap[unit]
+
+ i, d = divmod(size, 1)
+ integer = int(i) << magnitude
+ decimal = int(d * (1024 ** (magnitude / 10)))
+
+ return integer + decimal
+
+
+def str2int(value):
+ try:
+ return int(value)
+ except:
+ pass
+
+ ones = ("zero", "one", "two", "three", "four", "five", "six", "seven", "eight",
+ "nine", "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen",
+ "sixteen", "seventeen", "eighteen", "nineteen")
+ tens = ("", "", "twenty", "thirty", "forty", "fifty", "sixty", "seventy",
+ "eighty", "ninety")
+
+ o_tuple = [(w, i) for i, w in enumerate(ones)]
+ t_tuple = [(w, i * 10) for i, w in enumerate(tens)]
+
+ numwords = dict(o_tuple + t_tuple)
+ tokens = re.split(r"[\s\-]+", value.lower())
+
+ try:
+ return sum(numwords[word] for word in tokens)
+ except:
+ return 0
+
+
+def parse_time(value):
+ if re.search("da(il)?y|today", value):
+ seconds = seconds_to_midnight()
+
+ else:
+ regex = re.compile(r'(\d+| (?:this|an?) )\s*(hr|hour|min|sec|)', re.I)
+ seconds = sum((int(v) if v.strip() not in ("this", "a", "an") else 1) *
+ {'hr': 3600, 'hour': 3600, 'min': 60, 'sec': 1, '': 1}[u.lower()]
+ for v, u in regex.findall(value))
+ return seconds
+
+
+def timestamp():
+ return int(time.time() * 1000)
+
+
+def check_module(module):
+ try:
+ __import__(module)
+
+ except Exception:
+ return False
+
+ else:
+ return True
+
+
+def check_prog(command):
+ pipe = subprocess.PIPE
+ try:
+ subprocess.call(command, stdout=pipe, stderr=pipe)
+
+ except Exception:
+ return False
+
+ else:
+ return True
+
+
+def isexecutable(filename):
+ file = encode(filename)
+ return os.path.isfile(file) and os.access(file, os.X_OK)
+
+
+def which(filename):
+ """
+ Works exactly like the unix command which
+ Courtesy of http://stackoverflow.com/a/377028/675646
+ """
+ dirname, basename = os.path.split(filename)
+
+ if dirname:
+ return filename if isexecutable(filename) else None
+
+ else:
+ for path in os.environ['PATH'].split(os.pathsep):
+ filename = os.path.join(path.strip('"'), filename)
+ if isexecutable(filename):
+ return filename
+
+
+def format_exc(frame=None):
+ """
+ Format call-stack and display exception information (if availible)
+ """
+ exc_info = sys.exc_info()
+ exc_desc = ""
+
+ callstack = traceback.extract_stack(frame)
+ callstack = callstack[:-1]
+
+ if exc_info[0] is not None:
+ exception_callstack = traceback.extract_tb(exc_info[2])
+
+ if callstack[-1][0] == exception_callstack[0][0]: #@NOTE: Does this exception belongs to us?
+ callstack = callstack[:-1]
+ callstack.extend(exception_callstack)
+ exc_desc = "".join(traceback.format_exception_only(exc_info[0], exc_info[1]))
+
+ msg = "Traceback (most recent call last):\n"
+ msg += "".join(traceback.format_list(callstack))
+ msg += exc_desc
+
+ return msg
+
+
+def seconds_to_nexthour(strict=False):
+ now = datetime.datetime.today()
+ nexthour = now.replace(minute=0 if strict else 1, second=0, microsecond=0) + datetime.timedelta(hours=1)
+ return (nexthour - now).seconds
+
+
+def seconds_to_midnight(utc=None, strict=False):
+ if utc is None:
+ now = datetime.datetime.today()
+ else:
+ now = datetime.datetime.utcnow() + datetime.timedelta(hours=utc)
+
+ midnight = now.replace(hour=0, minute=0 if strict else 1, second=0, microsecond=0) + datetime.timedelta(days=1)
+
+ return (midnight - now).seconds
+
+
+def replace_patterns(value, rules):
+ for r in rules:
+ try:
+ pattern, repl, flags = r
+
+ except ValueError:
+ pattern, repl = r
+ flags = 0
+
+ value = re.sub(pattern, repl, value, flags)
+
+ return value
+
+
+#@TODO: Remove in 0.4.10 and fix exp in CookieJar.setCookie
+def set_cookie(cj, domain, name, value, path='/', exp=time.time() + 180 * 24 * 3600):
+ args = map(encode, [domain, name, value, path]) + [int(exp)]
+ return cj.setCookie(*args)
+
+
+def set_cookies(cj, cookies):
+ for cookie in cookies:
+ if not isinstance(cookie, tuple):
+ continue
+
+ if len(cookie) is not 3:
+ continue
+
+ set_cookie(cj, *cookie)
+
+
+def parse_html_header(header):
+ hdict = {}
+ regexp = r'[ ]*(?P<key>.+?)[ ]*:[ ]*(?P<value>.+?)[ ]*\r?\n'
+
+ for key, value in re.findall(regexp, header.lower()):
+ if key in hdict:
+ header_key = hdict.get(key)
+ if type(header_key) is list:
+ header_key.append(value)
+ else:
+ hdict[key] = [header_key, value]
+ else:
+ hdict[key] = value
+
+ return hdict
+
+
+def parse_html_tag_attr_value(attr_name, tag):
+ m = re.search(r"%s\s*=\s*([\"']?)((?<=\")[^\"]+|(?<=')[^']+|[^>\s\"'][^>\s]*)\1" % attr_name, tag, re.I)
+ return m.group(2) if m else None
+
+
+def parse_html_form(attr_str, html, input_names={}):
+ for form in re.finditer(r"(?P<TAG><form[^>]*%s[^>]*>)(?P<CONTENT>.*?)</?(form|body|html)[^>]*>" % attr_str,
+ html, re.I | re.S):
+ inputs = {}
+ action = parse_html_tag_attr_value("action", form.group('TAG'))
+
+ for inputtag in re.finditer(r'(<(input|textarea)[^>]*>)([^<]*(?=</\2)|)', form.group('CONTENT'), re.I | re.S):
+ name = parse_html_tag_attr_value("name", inputtag.group(1))
+ if name:
+ value = parse_html_tag_attr_value("value", inputtag.group(1))
+ if not value:
+ inputs[name] = inputtag.group(3) or ""
+ else:
+ inputs[name] = value
+
+ if not input_names:
+ #: No attribute check
+ return action, inputs
+ else:
+ #: Check input attributes
+ for key, value in input_names.items():
+ if key in inputs:
+ if isinstance(value, basestring) and inputs[key] is value:
+ continue
+ elif isinstance(value, tuple) and inputs[key] in value:
+ continue
+ elif hasattr(value, "search") and re.match(value, inputs[key]):
+ continue
+ else:
+ break #: Attibute value does not match
+ else:
+ break #: Attibute name does not match
+ else:
+ return action, inputs #: Passed attribute check
+
+ return {}, None #: No matching form found
+
+
+def chunks(iterable, size):
+ it = iter(iterable)
+ item = list(itertools.islice(it, size))
+ while item:
+ yield item
+ item = list(itertools.islice(it, size))
+
+
+def renice(pid, value):
+ if not value or os.name is "nt":
+ return
+
+ try:
+ subprocess.Popen(["renice", str(value), str(pid)],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=-1)
+ except Exception:
+ pass
+
+
+def forward(source, destination):
+ try:
+ bufsize = 1024
+ bufdata = source.recv(bufsize)
+ while bufdata:
+ destination.sendall(bufdata)
+ bufdata = source.recv(bufsize)
+ finally:
+ destination.shutdown(socket.SHUT_WR)
+
+
+def compute_checksum(filename, hashtype):
+ file = encode(filename)
+
+ if not exists(file):
+ return None
+
+ buf = fsbsize()
+
+ if hashtype in ("adler32", "crc32"):
+ hf = getattr(zlib, hashtype)
+ last = 0
+
+ with open(file, "rb") as f:
+ for chunk in iter(lambda: f.read(buf), b''):
+ last = hf(chunk, last)
+
+ return "%x" % last
+
+ elif hashtype in hashlib.algorithms_available:
+ h = hashlib.new(hashtype)
+
+ with open(file, "rb") as f:
+ for chunk in iter(lambda: f.read(buf * h.block_size), b''):
+ h.update(chunk)
+
+ return h.hexdigest()
+
+ else:
+ return None
+
+
+def copy_tree(src, dst, overwrite=False, preserve_metadata=False):
+ pmode = preserve_metadata or overwrite is None
+ mtime = os.path.getmtime
+ copy = shutil.copy2 if pmode else shutil.copy
+
+ if preserve_metadata and not exists(dst):
+ return shutil.copytree(src, dst)
+
+ for src_dir, dirs, files in os.walk(src, topdown=False):
+ dst_dir = src_dir.replace(src, dst, 1)
+
+ if not exists(dst_dir):
+ os.makedirs(dst_dir)
+ if pmode:
+ shutil.copystat(src_dir, dst_dir)
+
+ elif pmode:
+ if overwrite or overwrite is None and mtime(src_dir) > mtime(dst_dir):
+ shutil.copystat(src_dir, dst_dir)
+
+ for filename in files:
+ src_file = fsjoin(src_dir, filename)
+ dst_file = fsjoin(dst_dir, filename)
+
+ if exists(dst_file):
+ if overwrite or overwrite is None and mtime(src_file) > mtime(dst_file):
+ os.remove(dst_file)
+ else:
+ continue
+
+ copy(src_file, dst_dir)
+
+
+def move_tree(src, dst, overwrite=False):
+ mtime = os.path.getmtime
+
+ for src_dir, dirs, files in os.walk(src, topdown=False):
+ dst_dir = src_dir.replace(src, dst, 1)
+ del_dir = True
+
+ if not exists(dst_dir):
+ os.makedirs(dst_dir)
+ shutil.copystat(src_dir, dst_dir)
+
+ elif overwrite or overwrite is None and mtime(src_dir) > mtime(dst_dir):
+ shutil.copystat(src_dir, dst_dir)
+
+ else:
+ del_dir = False
+
+ for filename in files:
+ src_file = fsjoin(src_dir, filename)
+ dst_file = fsjoin(dst_dir, filename)
+
+ if exists(dst_file):
+ if overwrite or overwrite is None and mtime(src_file) > mtime(dst_file):
+ os.remove(dst_file)
+ else:
+ continue
+
+ shutil.move(src_file, dst_dir)
+
+ if not del_dir:
+ continue
+
+ try:
+ os.rmdir(src_dir)
+ except OSError:
+ pass
diff --git a/module/plugins/internal/utils.py b/module/plugins/internal/utils.py
deleted file mode 100644
index 02077cffd..000000000
--- a/module/plugins/internal/utils.py
+++ /dev/null
@@ -1,482 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#@TODO: Move to utils directory 0.4.10
-
-import datetime
-import htmlentitydefs
-import itertools
-import os
-import re
-import string
-import sys
-import time
-import traceback
-import urllib
-import urlparse
-
-try:
- import HTMLParser
-
-except ImportError: #@TODO: Remove in 0.4.10
- import xml.sax.saxutils
-
-try:
- import simplejson as json
-
-except ImportError:
- import json
-
-
-class utils(object):
- __name__ = "utils"
- __type__ = "plugin"
- __version__ = "0.09"
- __status__ = "stable"
-
- __pattern__ = r'^unmatchable$'
- __config__ = []
-
- __description__ = """Dummy utils class"""
- __license__ = "GPLv3"
- __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
-
-
-def lock(fn):
- def new(*args):
- # print "Handler: %s args: %s" % (fn, args[1:])
- args[0].lock.acquire()
- try:
- return fn(*args)
-
- finally:
- args[0].lock.release()
-
- return new
-
-
-def format_time(value):
- dt = datetime.datetime(1, 1, 1) + datetime.timedelta(seconds=abs(int(value)))
- days = ("%d days and " % (dt.day - 1)) if dt.day > 1 else ""
- return days + ", ".join("%d %ss" % (getattr(dt, attr), attr) for attr in ("hour", "minute", "second")
- if getattr(dt, attr))
-
-
-def format_size(value):
- size = int(value)
- steps = 0
- sizes = ('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB')
- while size > 1000:
- size /= 1024.0
- steps += 1
- return "%.2f %s" % (size, sizes[steps])
-
-
-def safe_format(value, unsafe):
- """
- Returns the content of value omitting sensitive information
-
- Args:
- value: value to format
- unsafe: string or list: sensitive word(s) to remove
- """
- if isinstance(value, basestring):
- if isinstance(unsafe, basestring):
- return "'%s'" % ("**********" if value == unsafe else value)
-
- elif isinstance(unsafe, list):
- return "'%s'" % ("**********" if value in unsafe else value)
-
- elif isinstance(value, dict):
- return "{%s}" % ", ".join("'%s': %s" % (k, safe_format(v, unsafe)) for k, v in value.iteritems())
-
- elif isinstance(value, list):
- return "[%s]" % ", ".join("%s" % safe_format(v, unsafe) for v in value)
-
- elif isinstance(value, tuple):
- return "(%s)" % ", ".join("%s" % safe_format(v, unsafe) for v in value)
-
- elif isinstance(value, set):
- return "set([%s])" % ", ".join("%s" % safe_format(v, unsafe) for v in value)
-
- return repr(value)
-
-
-def compare_time(start, end):
- start = map(int, start)
- end = map(int, end)
-
- if start == end:
- return True
-
- now = list(time.localtime()[3:5])
-
- if start < end:
- if now < end:
- return True
-
- elif now > start or now < end:
- return True
-
- return False
-
-
-def uniqify(seq):
- """
- Remove duplicates from list preserving order
- Originally by Dave Kirby
- """
- seen = set()
- seen_add = seen.add
- return [x for x in seq if x not in seen and not seen_add(x)]
-
-
-def has_method(obj, name):
- """
- Check if name was defined in obj (return false if inhereted)
- """
- return hasattr(obj, '__dict__') and name in obj.__dict__
-
-
-def html_unescape(text):
- """
- Removes HTML or XML character references and entities from a text string
- """
- try:
- h = HTMLParser.HTMLParser()
- return h.unescape(text)
-
- except NameError: #@TODO: Remove in 0.4.10
- return xml.sax.saxutils.unescape(text)
-
-
-def isiterable(obj):
- return hasattr(obj, "__iter__")
-
-
-def get_console_encoding(enc):
- if os.name is "nt":
- if enc is "cp65001": #: aka UTF-8
- enc = "cp850"
- print "WARNING: Windows codepage 65001 (UTF-8) is not supported, used `%s` instead" % enc
- else:
- enc = "utf8"
-
- return enc
-
-
-#@NOTE: Revert to `decode` in Python 3
-def decode(value, encoding=None):
- """
- Encoded string (default to UTF-8) -> unicode string
- """
- if type(value) is str:
- try:
- # res = value.decode(encoding or 'utf-8')
- res = unicode(value, encoding or 'utf-8')
-
- except UnicodeDecodeError, e:
- if encoding:
- raise UnicodeDecodeError(e)
-
- encoding = get_console_encoding(sys.stdout.encoding)
- # res = value.decode(encoding)
- res = unicode(value, encoding)
-
- elif type(value) is unicode:
- res = value
-
- else:
- res = unicode(value)
-
- return res
-
-
-def encode(value, encoding=None, decoding=None):
- """
- Unicode or decoded string -> encoded string (default to UTF-8)
- """
- if type(value) is unicode:
- res = value.encode(encoding or "utf-8")
-
- # elif type(value) is str:
- # res = encode(decode(value, decoding), encoding)
-
- else:
- res = str(value)
-
- return res
-
-
-def fs_join(*args):
- """
- Like os.path.join, but encoding aware
- """
- return os.path.join(*map(encode, args))
-
-
-def exists(path):
- if os.path.exists(path):
- if os.name is "nt":
- dir, name = os.path.split(path.rstrip(os.sep))
- return name in os.listdir(dir)
- else:
- return True
- else:
- return False
-
-
-def remove_chars(value, repl):
- """
- Remove all chars in repl from string
- """
- if type(repl) is unicode:
- for badc in list(repl):
- value = value.replace(badc, "")
- return value
-
- elif type(value) is unicode:
- return value.translate(dict((ord(s), None) for s in repl))
-
- elif type(value) is str:
- return value.translate(string.maketrans("", ""), repl)
-
-
-def fixurl(url, unquote=None):
- old = url
- url = urllib.unquote(url)
-
- if unquote is None:
- unquote = url is old
-
- url = html_unescape(decode(url).decode('unicode-escape'))
- url = re.sub(r'(?<!:)/{2,}', '/', url).strip().lstrip('.')
-
- if not unquote:
- url = urllib.quote(url)
-
- return url
-
-
-def fixname(value):
- repl = '<>:"/\\|?*' if os.name is "nt" else '\0/\\"'
- return remove_chars(value, repl)
-
-
-def parse_name(value, safechar=True):
- path = fixurl(decode(value), unquote=False)
- url_p = urlparse.urlparse(path.rstrip('/'))
- name = (url_p.path.split('/')[-1] or
- url_p.query.split('=', 1)[::-1][0].split('&', 1)[0] or
- url_p.netloc.split('.', 1)[0])
-
- name = urllib.unquote(name)
- return fixname(name) if safechar else name
-
-
-def parse_size(value, unit=""): #: returns bytes
- m = re.match(r"([\d.,]+)\s*([\w^_]*)", str(value).lower())
-
- if m is None:
- return 0
-
- traffic = float(m.group(1).replace(',', '.'))
- unit = (unit.strip().lower() or m.group(2) or "byte")[0]
-
- if unit is "b":
- return int(traffic)
-
- sizes = ['b', 'k', 'm', 'g', 't', 'p', 'e']
- sizemap = dict((u, i * 10) for i, u in enumerate(sizes))
-
- increment = sizemap[unit]
- integer, decimal = map(int, ("%.3f" % traffic).split('.'))
-
- return (integer << increment) + (decimal << increment - 10)
-
-
-def str2int(value):
- try:
- return int(value)
- except:
- pass
-
- ones = ("zero", "one", "two", "three", "four", "five", "six", "seven", "eight",
- "nine", "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen",
- "sixteen", "seventeen", "eighteen", "nineteen")
- tens = ("", "", "twenty", "thirty", "forty", "fifty", "sixty", "seventy",
- "eighty", "ninety")
-
- o_tuple = [(w, i) for i, w in enumerate(ones)]
- t_tuple = [(w, i * 10) for i, w in enumerate(tens)]
-
- numwords = dict(o_tuple + t_tuple)
- tokens = re.split(r"[\s\-]+", value.lower())
-
- try:
- return sum(numwords[word] for word in tokens)
- except:
- return 0
-
-
-def parse_time(value):
- if re.search("da(il)?y|today", value):
- seconds = seconds_to_midnight()
-
- else:
- regex = re.compile(r'(\d+| (?:this|an?) )\s*(hr|hour|min|sec|)', re.I)
- seconds = sum((int(v) if v.strip() not in ("this", "a", "an") else 1) *
- {'hr': 3600, 'hour': 3600, 'min': 60, 'sec': 1, '': 1}[u.lower()]
- for v, u in regex.findall(value))
- return seconds
-
-
-def timestamp():
- return int(time.time() * 1000)
-
-
-def which(program):
- """
- Works exactly like the unix command which
- Courtesy of http://stackoverflow.com/a/377028/675646
- """
- isExe = lambda x: os.path.isfile(x) and os.access(x, os.X_OK)
-
- fpath, fname = os.path.split(program)
-
- if fpath:
- if isExe(program):
- return program
- else:
- for path in os.environ['PATH'].split(os.pathsep):
- exe_file = os.path.join(path.strip('"'), program)
- if isExe(exe_file):
- return exe_file
-
-
-def format_exc(frame=None):
- """
- Format call-stack and display exception information (if availible)
- """
- exception_info = sys.exc_info()
- callstack_list = traceback.extract_stack(frame)
- callstack_list = callstack_list[:-1]
-
- exception_desc = ""
- if exception_info[0] is not None:
- exception_callstack_list = traceback.extract_tb(exception_info[2])
- if callstack_list[-1][0] == exception_callstack_list[0][0]: #Does this exception belongs to us?
- callstack_list = callstack_list[:-1]
- callstack_list.extend(exception_callstack_list)
- exception_desc = "".join(traceback.format_exception_only(exception_info[0], exception_info[1]))
-
- traceback_str = "Traceback (most recent call last):\n"
- traceback_str += "".join(traceback.format_list(callstack_list))
- traceback_str += exception_desc
-
- return traceback_str
-
-
-def seconds_to_nexthour(strict=False):
- now = datetime.datetime.today()
- nexthour = now.replace(minute=0 if strict else 1, second=0, microsecond=0) + datetime.timedelta(hours=1)
- return (nexthour - now).seconds
-
-
-def seconds_to_midnight(utc=None, strict=False):
- if utc is None:
- now = datetime.datetime.today()
- else:
- now = datetime.datetime.utcnow() + datetime.timedelta(hours=utc)
-
- midnight = now.replace(hour=0, minute=0 if strict else 1, second=0, microsecond=0) + datetime.timedelta(days=1)
-
- return (midnight - now).seconds
-
-
-def replace_patterns(value, rules):
- for r in rules:
- try:
- pattern, repl, flags = r
-
- except ValueError:
- pattern, repl = r
- flags = 0
-
- value = re.sub(pattern, repl, value, flags)
-
- return value
-
-
-#@TODO: Remove in 0.4.10 and fix exp in CookieJar.setCookie
-def set_cookie(cj, domain, name, value, path='/', exp=time.time() + 180 * 24 * 3600):
- return cj.setCookie(encode(domain), encode(name), encode(value), encode(path), int(exp))
-
-
-def set_cookies(cj, cookies):
- for cookie in cookies:
- if isinstance(cookie, tuple) and len(cookie) == 3:
- set_cookie(cj, *cookie)
-
-
-def parse_html_tag_attr_value(attr_name, tag):
- m = re.search(r"%s\s*=\s*([\"']?)((?<=\")[^\"]+|(?<=')[^']+|[^>\s\"'][^>\s]*)\1" % attr_name, tag, re.I)
- return m.group(2) if m else None
-
-
-def parse_html_form(attr_str, html, input_names={}):
- for form in re.finditer(r"(?P<TAG><form[^>]*%s[^>]*>)(?P<CONTENT>.*?)</?(form|body|html)[^>]*>" % attr_str,
- html, re.I | re.S):
- inputs = {}
- action = parse_html_tag_attr_value("action", form.group('TAG'))
-
- for inputtag in re.finditer(r'(<(input|textarea)[^>]*>)([^<]*(?=</\2)|)', form.group('CONTENT'), re.I | re.S):
- name = parse_html_tag_attr_value("name", inputtag.group(1))
- if name:
- value = parse_html_tag_attr_value("value", inputtag.group(1))
- if not value:
- inputs[name] = inputtag.group(3) or ""
- else:
- inputs[name] = value
-
- if not input_names:
- #: No attribute check
- return action, inputs
- else:
- #: Check input attributes
- for key, val in input_names.items():
- if key in inputs:
- if isinstance(val, basestring) and inputs[key] is val:
- continue
- elif isinstance(val, tuple) and inputs[key] in val:
- continue
- elif hasattr(val, "search") and re.match(val, inputs[key]):
- continue
- else:
- break #: Attibute value does not match
- else:
- break #: Attibute name does not match
- else:
- return action, inputs #: Passed attribute check
-
- return {}, None #: No matching form found
-
-
-def chunks(iterable, size):
- it = iter(iterable)
- item = list(itertools.islice(it, size))
- while item:
- yield item
- item = list(itertools.islice(it, size))
-
-
-def renice(pid, value):
- if not value or os.name is "nt":
- return
-
- try:
- subprocess.Popen(["renice", str(value), str(pid)],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- bufsize=-1)
- except Exception:
- pass