summaryrefslogtreecommitdiffstats
path: root/module/plugins/internal
diff options
context:
space:
mode:
authorGravatar jansohn <jansohn@users.noreply.github.com> 2015-10-02 10:09:26 +0200
committerGravatar jansohn <jansohn@users.noreply.github.com> 2015-10-02 10:09:26 +0200
commit3a08656c5665f4b8db98744fb323e64b8630e084 (patch)
tree28f9f62ffc57888b76ca32540dbf5af3a4cfc8d0 /module/plugins/internal
parentMerge pull request #1 from pyload/stable (diff)
parent[Account] Improve parse_traffic method + code cosmetics (diff)
downloadpyload-3a08656c5665f4b8db98744fb323e64b8630e084.tar.xz
Merge pull request #2 from pyload/stable
sync with stable
Diffstat (limited to 'module/plugins/internal')
-rw-r--r--module/plugins/internal/Account.py465
-rw-r--r--module/plugins/internal/Addon.py24
-rw-r--r--module/plugins/internal/Base.py503
-rw-r--r--module/plugins/internal/Captcha.py27
-rw-r--r--module/plugins/internal/Container.py24
-rw-r--r--module/plugins/internal/Crypter.py39
-rw-r--r--module/plugins/internal/Extractor.py57
-rw-r--r--module/plugins/internal/Hook.py2
-rw-r--r--module/plugins/internal/Hoster.py595
-rw-r--r--module/plugins/internal/MultiAccount.py (renamed from module/plugins/internal/MultiHook.py)59
-rw-r--r--module/plugins/internal/MultiCrypter.py9
-rw-r--r--module/plugins/internal/MultiHoster.py24
-rw-r--r--module/plugins/internal/OCR.py8
-rw-r--r--module/plugins/internal/Plugin.py237
-rw-r--r--module/plugins/internal/SevenZip.py42
-rw-r--r--module/plugins/internal/SimpleCrypter.py53
-rw-r--r--module/plugins/internal/SimpleHoster.py309
-rw-r--r--module/plugins/internal/UnRar.py53
-rw-r--r--module/plugins/internal/UnZip.py15
-rw-r--r--module/plugins/internal/XFSAccount.py70
-rw-r--r--module/plugins/internal/XFSCrypter.py31
-rw-r--r--module/plugins/internal/XFSHoster.py51
22 files changed, 1498 insertions, 1199 deletions
diff --git a/module/plugins/internal/Account.py b/module/plugins/internal/Account.py
index 2713e8da4..ad78403de 100644
--- a/module/plugins/internal/Account.py
+++ b/module/plugins/internal/Account.py
@@ -1,19 +1,18 @@
# -*- coding: utf-8 -*-
-import copy
import random
import time
import threading
-import traceback
-from module.plugins.internal.Plugin import Plugin
-from module.utils import compare_time, lock, parseFileSize as parse_size
+from module.plugins.Plugin import SkipDownload as Skip
+from module.plugins.internal.Plugin import Plugin, parse_size
+from module.utils import compare_time, lock
class Account(Plugin):
__name__ = "Account"
__type__ = "account"
- __version__ = "0.17"
+ __version__ = "0.53"
__status__ = "testing"
__description__ = """Base account plugin"""
@@ -21,18 +20,23 @@ class Account(Plugin):
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- LOGIN_TIMEOUT = 10 * 60 #: After that time (in minutes) pyload will relogin the account
- INFO_THRESHOLD = 30 * 60 #: After that time (in minutes) account data will be reloaded
+ LOGIN_TIMEOUT = 10 * 60 #: Relogin accounts every 10 minutes
+ AUTO_TIMEOUT = True #: Automatically adjust relogin interval
def __init__(self, manager, accounts):
self._init(manager.core)
- self.lock = threading.RLock()
- self.accounts = accounts #@TODO: Remove in 0.4.10
+ self.manager = manager
+ self.lock = threading.RLock()
+
+ self.accounts = accounts #@TODO: Recheck in 0.4.10
+ self.user = None
+
+ self.interval = self.LOGIN_TIMEOUT
+ self.auto_timeout = self.interval if self.AUTO_TIMEOUT else False
self.init()
- self.init_accounts(accounts)
def init(self):
@@ -42,218 +46,172 @@ class Account(Plugin):
pass
- def login(self, user, password, data, req):
+ @property
+ def logged(self):
"""
- Login into account, the cookies will be saved so user can be recognized
+ Checks if user is still logged in
"""
- pass
-
-
- @lock
- def _login(self, user):
- try:
- info = self.info[user]
- info['login']['timestamp'] = time.time() #: Set timestamp for login
-
- self.req = self.get_request(user)
- self.login(user, info['login']['password'], info['data'], self.req)
-
- except Exception, e:
- self.log_warning(_("Could not login user `%s`") % user, e)
- res = info['login']['valid'] = False
- self.accounts[user]['valid'] = False #@TODO: Remove in 0.4.10
+ if not self.user:
+ return False
- if self.pyload.debug:
- traceback.print_exc()
+ self.sync()
+ if self.info['login']['timestamp'] + self.interval < time.time():
+ self.log_debug("Reached login timeout for user `%s`" % self.user)
+ return False
else:
- res = info['login']['valid'] = True
- self.accounts[user]['valid'] = True #@TODO: Remove in 0.4.10
-
- finally:
- self.clean()
- return res
-
+ return True
- def relogin(self, user):
- self.log_info(_("Relogin user `%s`...") % user)
- req = self.get_request(user)
- if req:
- req.clearCookies()
- self.clean()
+ @property
+ def premium(self):
+ return bool(self.get_data('premium'))
- return self._login(user)
+ def signin(self, user, password, data):
+ """
+ Login into account, the cookies will be saved so user can be recognized
+ """
+ pass
- #@TODO: Rewrite in 0.4.10
- def init_accounts(self, accounts):
- for user, data in accounts.items():
- self.add(user, data['password'], data['options'])
+ def login(self):
+ if not self.req:
+ self.log_info(_("Login user `%s`...") % self.user)
+ else:
+ self.log_info(_("Relogin user `%s`...") % self.user)
+ self.clean()
- @lock
- def add(self, user, password=None, options={}):
- if user not in self.info:
- self.info[user] = {'login': {'valid' : None,
- 'password' : password or "",
- 'timestamp': 0},
- 'data' : {'options' : options,
- 'premium' : None,
- 'validuntil' : None,
- 'trafficleft': None,
- 'maxtraffic' : None}}
-
- #@TODO: Remove in 0.4.10
- self.accounts[user] = self.info[user]['data']
- self.accounts[user].update({'login' : user,
- 'type' : self.__name__,
- 'valid' : self.info[user]['login']['valid'],
- 'password': self.info[user]['login']['password']})
-
- self.log_info(_("Login user `%s`...") % user)
- self._login(user)
- return True
+ self.req = self.pyload.requestFactory.getRequest(self.__name__, self.user)
- else:
- self.log_error(_("Error adding user `%s`") % user, _("User already exists"))
+ self.sync()
+ try:
+ self.info['login']['timestamp'] = time.time() #: Set timestamp for login
+ self.signin(self.user, self.info['login']['password'], self.info['data'])
- @lock
- def update(self, user, password=None, options={}):
- """
- Updates account and return true if anything changed
- """
- if not (password or options):
- return
+ except Skip:
+ self.info['login']['valid'] = True
+ if self.auto_timeout:
+ self.auto_timeout *= 2
+ self.interval = self.auto_timeout
- if user not in self.info:
- return self.add(user, password, options)
+ except Exception, e:
+ self.log_error(_("Could not login user `%s`") % user, e)
+ self.info['login']['valid'] = False
else:
- if password:
- self.info[user]['login']['password'] = password
- self.accounts[user]['password'] = password #@TODO: Remove in 0.4.10
- self.relogin(user)
+ self.info['login']['valid'] = True
+ if self.interval is self.auto_timeout:
+ self.interval = self.auto_timeout / 2
+ self.auto_timeout = False
- if options:
- before = self.info[user]['data']['options']
- self.info[user]['data']['options'].update(options)
- return self.info[user]['data']['options'] != before
+ finally:
+ self.syncback()
+ return bool(self.info['login']['valid'])
- return True
+ #@TODO: Recheck in 0.4.10
+ def syncback(self):
+ return self.sync(reverse=True)
- #: Deprecated method, use `update` instead (Remove in 0.4.10)
- def updateAccounts(self, *args, **kwargs):
- return self.update(*args, **kwargs)
+ #@TODO: Recheck in 0.4.10
+ def sync(self, reverse=False):
+ if not self.user:
+ return
- def remove(self, user=None): # -> def remove
- if not user:
- self.info.clear()
- self.accounts.clear() #@TODO: Remove in 0.4.10
+ u = self.accounts[self.user]
- elif user in self.info:
- self.info.pop(user, None)
- self.accounts.pop(user, None) #@TODO: Remove in 0.4.10
+ if reverse:
+ u.update(self.info['data'])
+ u.update(self.info['login'])
+ else:
+ d = {'login': {'password' : u['password'],
+ 'timestamp': u['timestamp'],
+ 'valid' : u['valid']},
+ 'data' : {'maxtraffic' : u['maxtraffic'],
+ 'options' : u['options'],
+ 'premium' : u['premium'],
+ 'trafficleft': u['trafficleft'],
+ 'validuntil' : u['validuntil']}}
- #: Deprecated method, use `remove` instead (Remove in 0.4.10)
- def removeAccount(self, *args, **kwargs):
- return self.remove(*args, **kwargs)
+ self.info.update(d)
- #@NOTE: Remove in 0.4.10?
- def get_data(self, user, reload=False):
- if not user:
- return
+ def relogin(self):
+ return self.login()
- info = self.get_info(user, reload)
- if info and 'data' in info:
- return info['data']
+ def reset(self):
+ self.sync()
- #: Deprecated method, use `get_data` instead (Remove in 0.4.10)
- def getAccountData(self, *args, **kwargs):
- if 'force' in kwargs:
- kwargs['reload'] = kwargs['force']
- kwargs.pop('force', None)
+ d = {'maxtraffic' : None,
+ 'options' : {'limitdl': ['0']},
+ 'premium' : None,
+ 'trafficleft': None,
+ 'validuntil' : None}
- data = self.get_data(*args, **kwargs) or {}
- if 'options' not in data:
- data['options'] = {'limitdl': ['0']}
+ self.info['data'].update(d)
- return data
+ self.syncback()
- def get_info(self, user, reload=False):
+ def get_info(self, refresh=True):
"""
- Retrieve account infos for an user, do **not** overwrite this method!\\
- just use it to retrieve infos in hoster plugins. see `parse_info`
+ Retrieve account infos for an user, do **not** overwrite this method!
+ just use it to retrieve infos in hoster plugins. see `grab_info`
:param user: username
- :param reload: reloads cached account information
+ :param relogin: reloads cached account information
:return: dictionary with information
"""
- if user not in self.info:
- self.log_error(_("User `%s` not found while retrieving account info") % user)
- return
+ if not self.logged:
+ if self.relogin():
+ refresh = True
+ else:
+ refresh = False
+ self.reset()
- elif reload:
- self.log_info(_("Parsing account info for user `%s`...") % user)
- info = self._parse_info(user)
+ if refresh:
+ self.log_info(_("Grabbing account info for user `%s`...") % self.user)
+ self.info = self._grab_info()
- safe_info = copy.deepcopy(info)
- safe_info['login']['password'] = "**********"
- safe_info['data']['password'] = "**********" #@TODO: Remove in 0.4.10
- self.log_debug("Account info for user `%s`: %s" % (user, safe_info))
+ self.syncback()
- elif self.INFO_THRESHOLD > 0 and self.info[user]['login']['timestamp'] + self.INFO_THRESHOLD < time.time():
- self.log_debug("Reached data timeout for %s" % user)
- info = self.get_info(user, True)
+ safe_info = dict(self.info)
+ safe_info['login']['password'] = "**********"
+ self.log_debug("Account info for user `%s`: %s" % (self.user, safe_info))
- else:
- info = self.info[user]
+ return self.info
- return info
+ def get_login(self, key=None, default=None):
+ d = self.get_info()['login']
+ return d.get(key, default) if key else d
- def is_premium(self, user):
- if not user:
- return False
- info = self.get_info(user)
- return info['data']['premium']
+ def get_data(self, key=None, default=None):
+ d = self.get_info()['data']
+ return d.get(key, default) if key else d
- def _parse_info(self, user):
- info = self.info[user]
-
- if not info['login']['valid']:
- return info
-
+ def _grab_info(self):
try:
- self.req = self.get_request(user)
- extra_info = self.parse_info(user, info['login']['password'], info, self.req)
+ data = self.grab_info(self.user, self.info['login']['password'], self.info['data'])
- if extra_info and isinstance(extra_info, dict):
- info['data'].update(extra_info)
+ if data and isinstance(data, dict):
+ self.info['data'].update(data)
- except (Fail, Exception), e:
- self.log_warning(_("Error loading info for user `%s`") % user, e)
-
- if self.pyload.debug:
- traceback.print_exc()
+ except Exception, e:
+ self.log_warning(_("Error loading info for user `%s`") % self.user, e)
finally:
- self.clean()
-
- self.info[user].update(info)
- return info
+ return self.info
- def parse_info(self, user, password, info, req):
+ def grab_info(self, user, password, data):
"""
This should be overwritten in account plugin
and retrieving account information for user
@@ -265,43 +223,105 @@ class Account(Plugin):
pass
- #: Remove in 0.4.10
- def getAllAccounts(self, *args, **kwargs):
- return [self.getAccountData(user, *args, **kwargs) for user, info in self.info.items()]
+ ###########################################################################
+ #@TODO: Recheck and move to `AccountManager` in 0.4.10 ####################
+ ###########################################################################
+ @lock
+ def init_accounts(self):
+ accounts = dict(self.accounts)
+ self.accounts.clear()
- def login_fail(self, reason=_("Login handshake has failed")):
- return self.fail(reason)
+ for user, info in accounts.items():
+ self.add(user, info['password'], info['options'])
- def get_request(self, user=None):
- if not user:
- user, info = self.select()
+ @lock
+ def getAccountData(self, user, force=False):
+ self.accounts[user]['plugin'].get_info()
+ return self.accounts[user]
- return self.pyload.requestFactory.getRequest(self.__name__, user)
+ @lock
+ def getAllAccounts(self, force=False):
+ if force:
+ self.init_accounts() #@TODO: Recheck in 0.4.10
- def get_cookies(self, user=None):
- if not user:
- user, info = self.select()
+ return [self.getAccountData(user, force) for user in self.accounts]
- return self.pyload.requestFactory.getCookieJar(self.__name__, user)
+ #@TODO: Remove in 0.4.10
+ @lock
+ def scheduleRefresh(self, user, force=False):
+ pass
- def select(self):
+
+ @lock
+ def add(self, user, password=None, options={}):
+ self.log_info(_("Adding user `%s`...") % user)
+
+ if user in self.accounts:
+ self.log_error(_("Error adding user `%s`") % user, _("User already exists"))
+ return False
+
+ d = {'login' : user,
+ 'maxtraffic' : None,
+ 'options' : options or {'limitdl': ['0']},
+ 'password' : password or "",
+ 'plugin' : self.__class__(self.manager, self.accounts),
+ 'premium' : None,
+ 'timestamp' : 0,
+ 'trafficleft': None,
+ 'type' : self.__name__,
+ 'valid' : None,
+ 'validuntil' : None}
+
+ u = self.accounts[user] = d
+ return u['plugin'].choose(user)
+
+
+ @lock
+ def updateAccounts(self, user, password=None, options={}):
"""
- Returns a valid account name and info
+ Updates account and return true if anything changed
"""
+ if user in self.accounts:
+ self.log_info(_("Updating account info for user `%s`...") % user)
+
+ u = self.accounts[user]
+ if password:
+ u['password'] = password
+
+ if options:
+ u['options'].update(options)
+
+ u['plugin'].relogin()
+
+ else:
+ self.add(user, password, options)
+
+
+ @lock
+ def removeAccount(self, user):
+ self.log_info(_("Removing user `%s`...") % user)
+ self.accounts.pop(user, None)
+ if user is self.user:
+ self.choose()
+
+
+ @lock
+ def select(self):
free_accounts = {}
premium_accounts = {}
- for user, info in self.info.items():
+ for user in self.accounts:
+ info = self.accounts[user]['plugin'].get_info()
+ data = info['data']
+
if not info['login']['valid']:
continue
- data = info['data']
-
- if "time" in data['options'] and data['options']['time']:
+ if data['options'].get('time'):
time_data = ""
try:
time_data = data['options']['time'][0]
@@ -311,7 +331,8 @@ class Account(Plugin):
continue
except Exception:
- self.log_warning(_("Wrong time format `%s` for account `%s`, use 1:22-3:44") % (user, time_data))
+ self.log_warning(_("Invalid time format `%s` for account `%s`, use 1:22-3:44")
+ % (user, time_data))
if data['trafficleft'] == 0:
continue
@@ -330,68 +351,54 @@ class Account(Plugin):
if not account_list:
return None, None
- validuntil_list = [(user, info) for user, info in account_list if info['data']['validuntil']]
+ validuntil_list = [(user, info) for user, info in account_list \
+ if info['data']['validuntil']]
if not validuntil_list:
- return random.choice(account_list) #@TODO: Random account?! Recheck in 0.4.10
+ return random.choice(account_list) #@TODO: Random account?! Rewrite in 0.4.10
return sorted(validuntil_list,
key=lambda a: a[1]['data']['validuntil'],
reverse=True)[0]
- def parse_traffic(self, value, unit=None): #: Return kilobytes
- if not unit and not isinstance(value, basestring):
- unit = "KB"
-
- return parse_size(value, unit)
-
-
- def empty(self, user):
- if user not in self.info:
- return
+ @lock
+ def choose(self, user=None):
+ """
+ Choose a valid account
+ """
+ if not user:
+ user = self.select()[0]
- self.log_warning(_("Account `%s` has not enough traffic") % user, _("Checking again in 30 minutes"))
+ elif user not in self.accounts:
+ self.log_error(_("Error choosing user `%s`") % user, _("User not exists"))
+ return False
- self.info[user]['data']['trafficleft'] = 0
- self.schedule_refresh(user, 30 * 60)
+ if user is self.user:
+ return True
+ self.user = user
+ self.info.clear()
+ self.clean()
- def expired(self, user):
- if user not in self.info:
- return
+ if self.user is not None:
+ self.login()
+ return True
- self.log_warning(_("Account `%s` is expired") % user, _("Checking again in 60 minutes"))
+ else:
+ return False
- self.info[user]['data']['validuntil'] = time.time() - 1
- self.schedule_refresh(user, 60 * 60)
+ ###########################################################################
- def schedule_refresh(self, user, time=0):
- """
- Add task to refresh account info to sheduler
- """
- self.log_debug("Scheduled refresh for user `%s` in %s seconds" % (user, time))
- self.pyload.scheduler.addJob(time, self.get_info, [user, True])
+ def parse_traffic(self, size, unit="KB"): #@NOTE: Returns kilobytes in 0.4.9
+ size = re.search(r'(\d*[\.,]?\d+)', size).group(1) #@TODO: Recjeck in 0.4.10
+ return parse_size(size, unit) / 1024 #@TODO: Remove `/ 1024` in 0.4.10
- #: Deprecated method, use `schedule_refresh` instead (Remove in 0.4.10)
- def scheduleRefresh(self, *args, **kwargs):
- if 'force' in kwargs:
- kwargs.pop('force', None) #@TODO: Recheck in 0.4.10
- return self.schedule_refresh(*args, **kwargs)
+ def fail_login(self, msg=_("Login handshake has failed")):
+ return self.fail(msg)
- @lock
- def is_logged(self, user, relogin=False):
- """
- Checks if user is still logged in
- """
- if user in self.info:
- if self.LOGIN_TIMEOUT > 0 and self.info[user]['login']['timestamp'] + self.LOGIN_TIMEOUT < time.time():
- self.log_debug("Reached login timeout for %s" % user)
- return self.relogin(user) if relogin else False
- else:
- return True
- else:
- return False
+ def skip_login(self, msg=_("Already signed in")):
+ return self.skip(msg)
diff --git a/module/plugins/internal/Addon.py b/module/plugins/internal/Addon.py
index 45ca98eac..3a252fdfb 100644
--- a/module/plugins/internal/Addon.py
+++ b/module/plugins/internal/Addon.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
-import traceback
-
from module.plugins.internal.Plugin import Plugin
@@ -25,10 +23,9 @@ def threaded(fn):
class Addon(Plugin):
__name__ = "Addon"
__type__ = "hook" #@TODO: Change to `addon` in 0.4.10
- __version__ = "0.04"
+ __version__ = "0.06"
__status__ = "testing"
- __config__ = [] #: [("name", "type", "desc", "default")]
__threaded__ = [] #@TODO: Remove in 0.4.10
__description__ = """Base addon plugin"""
@@ -57,6 +54,12 @@ class Addon(Plugin):
self.init_events()
+ #@TODO: Remove in 0.4.10
+ def _log(self, level, plugintype, pluginname, messages):
+ plugintype = "addon" if plugintype is "hook" else plugintype
+ return super(Addon, self)._log(level, plugintype, pluginname, messages)
+
+
def init_events(self):
if self.event_map:
for event, funcs in self.event_map.items():
@@ -97,8 +100,6 @@ class Addon(Plugin):
except Exception, e:
self.log_error(_("Error executing periodical task: %s") % e)
- if self.pyload.debug:
- traceback.print_exc()
self.cb = self.pyload.scheduler.addJob(self.interval, self._periodical, [threaded], threaded=threaded)
@@ -107,20 +108,17 @@ class Addon(Plugin):
pass
- def __repr__(self):
- return "<Addon %s>" % self.__name__
-
-
- def is_activated(self):
+ @property
+ def activated(self):
"""
Checks if addon is activated
"""
return self.get_config("activated")
- #: Deprecated method, use `is_activated` instead (Remove in 0.4.10)
+ #: Deprecated method, use `activated` property instead (Remove in 0.4.10)
def isActivated(self, *args, **kwargs):
- return self.is_activated(*args, **kwargs)
+ return self.activated
def deactivate(self):
diff --git a/module/plugins/internal/Base.py b/module/plugins/internal/Base.py
new file mode 100644
index 000000000..bc9ef9158
--- /dev/null
+++ b/module/plugins/internal/Base.py
@@ -0,0 +1,503 @@
+# -*- coding: utf-8 -*-
+
+import inspect
+import mimetypes
+import os
+import time
+import urlparse
+
+from module.plugins.internal.Captcha import Captcha
+from module.plugins.internal.Plugin import (Plugin, Abort, Fail, Reconnect, Retry, Skip,
+ decode, encode, fixurl, parse_html_form,
+ parse_name, replace_patterns)
+
+
+#@TODO: Remove in 0.4.10
+def getInfo(urls):
+ #: result = [ .. (name, size, status, url) .. ]
+ pass
+
+
+#@TODO: Remove in 0.4.10
+def parse_fileInfo(klass, url="", html=""):
+ info = klass.get_info(url, html)
+ return encode(info['name']), info['size'], info['status'], info['url']
+
+
+#@TODO: Remove in 0.4.10
+def create_getInfo(klass):
+ def get_info(urls):
+ for url in urls:
+ try:
+ url = replace_patterns(url, klass.URL_REPLACEMENTS)
+
+ except Exception:
+ pass
+
+ yield parse_fileInfo(klass, url)
+
+ return get_info
+
+
+#@NOTE: `check_abort` decorator
+def check_abort(fn):
+
+ def wrapper(self, *args, **kwargs):
+ self.check_abort()
+ return fn(self, *args, **kwargs)
+
+ return wrapper
+
+
+class Base(Plugin):
+ __name__ = "Base"
+ __type__ = "base"
+ __version__ = "0.02"
+ __status__ = "testing"
+
+ __pattern__ = r'^unmatchable$'
+ __config__ = [("use_premium", "bool", "Use premium account if available", True)]
+
+ __description__ = """Base plugin for Hoster and Crypter"""
+ __license__ = "GPLv3"
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+
+
+ def __init__(self, pyfile):
+ self._init(pyfile.m.core)
+
+ #: Engage wan reconnection
+ self.wantReconnect = False #@TODO: Change to `want_reconnect` in 0.4.10
+
+ #: Enable simultaneous processing of multiple downloads
+ self.multiDL = True #@TODO: Change to `multi_dl` in 0.4.10
+
+ #: time.time() + wait in seconds
+ self.wait_until = 0
+ self.waiting = False
+
+ #: Account handler instance, see :py:class:`Account`
+ self.account = None
+ self.user = None #@TODO: Remove in 0.4.10
+
+ #: Associated pyfile instance, see `PyFile`
+ self.pyfile = pyfile
+
+ self.thread = None #: Holds thread in future
+
+ #: Js engine, see `JsEngine`
+ self.js = self.pyload.js
+
+ #: Captcha stuff
+ self.captcha = Captcha(self)
+
+ #: Some plugins store html code here
+ self.html = None
+
+ #: Dict of the amount of retries already made
+ self.retries = {}
+
+
+ def _log(self, level, plugintype, pluginname, messages):
+ log = getattr(self.pyload.log, level)
+ msg = u" | ".join(decode(a).strip() for a in messages if a)
+ log("%(plugintype)s %(pluginname)s[%(id)s]: %(msg)s"
+ % {'plugintype': plugintype.upper(),
+ 'pluginname': pluginname,
+ 'id' : self.pyfile.id,
+ 'msg' : msg})
+
+
+ @classmethod
+ def get_info(cls, url="", html=""):
+ url = fixurl(url)
+ info = {'name' : parse_name(url),
+ 'size' : 0,
+ 'status': 3 if url else 8,
+ 'url' : url}
+
+ return info
+
+
+ def init(self):
+ """
+ Initialize the plugin (in addition to `__init__`)
+ """
+ pass
+
+
+ def setup(self):
+ """
+ Setup for enviroment and other things, called before downloading (possibly more than one time)
+ """
+ pass
+
+
+ def _setup(self):
+ #@TODO: Remove in 0.4.10
+ self.html = ""
+ self.pyfile.error = ""
+ self.last_html = None
+
+ if self.get_config('use_premium', True):
+ self.load_account() #@TODO: Move to PluginThread in 0.4.10
+ else:
+ self.account = False
+ self.user = None #@TODO: Remove in 0.4.10
+
+ try:
+ self.req.close()
+ except Exception:
+ pass
+
+ if self.account:
+ self.req = self.pyload.requestFactory.getRequest(self.__name__, self.account.user)
+ self.chunk_limit = -1 #: -1 for unlimited
+ self.resume_download = True
+ self.premium = self.account.premium
+ else:
+ self.req = self.pyload.requestFactory.getRequest(self.__name__)
+ self.chunk_limit = 1
+ self.resume_download = False
+ self.premium = False
+
+ self.setup()
+
+
+ def load_account(self):
+ if not self.account:
+ self.account = self.pyload.accountManager.getAccountPlugin(self.__name__)
+
+ if not self.account:
+ self.account = False
+ self.user = None #@TODO: Remove in 0.4.10
+
+ else:
+ self.account.choose()
+ self.user = self.account.user #@TODO: Remove in 0.4.10
+ if self.account.user is None:
+ self.account = False
+
+
+ def _process(self, thread):
+ """
+ Handles important things to do before starting
+ """
+ self.thread = thread
+
+ self._setup()
+
+ # self.pyload.hookManager.downloadPreparing(self.pyfile) #@TODO: Recheck in 0.4.10
+ self.check_abort()
+
+ self.pyfile.setStatus("starting")
+
+ self.log_debug("PROCESS URL " + self.pyfile.url, "PLUGIN VERSION %s" % self.__version__)
+ self.process(self.pyfile)
+
+
+ #: Deprecated method, use `_process` instead (Remove in 0.4.10)
+ def preprocessing(self, *args, **kwargs):
+ return self._process(*args, **kwargs)
+
+
+ def process(self, pyfile):
+ """
+ The "main" method of every hoster plugin, you **have to** overwrite it
+ """
+ raise NotImplementedError
+
+
+ def set_reconnect(self, reconnect):
+ self.log_debug("RECONNECT %s required" % ("" if reconnect else "not"),
+ "Previous wantReconnect: %s" % self.wantReconnect)
+ self.wantReconnect = bool(reconnect)
+
+
+ def set_wait(self, seconds, reconnect=None):
+ """
+ Set a specific wait time later used with `wait`
+
+ :param seconds: wait time in seconds
+ :param reconnect: True if a reconnect would avoid wait time
+ """
+ wait_time = max(int(seconds), 1)
+ wait_until = time.time() + wait_time + 1
+
+ self.log_debug("WAIT set to %d seconds" % wait_time,
+ "Previous waitUntil: %f" % self.pyfile.waitUntil)
+
+ self.pyfile.waitUntil = wait_until
+
+ if reconnect is not None:
+ self.set_reconnect(reconnect)
+
+
+ def wait(self, seconds=None, reconnect=None):
+ """
+ Waits the time previously set
+ """
+ pyfile = self.pyfile
+
+ if seconds is not None:
+ self.set_wait(seconds)
+
+ if reconnect is not None:
+ self.set_reconnect(reconnect)
+
+ self.waiting = True
+
+ status = pyfile.status #@NOTE: Recheck in 0.4.10
+ pyfile.setStatus("waiting")
+
+ self.log_info(_("Waiting %d seconds...") % (pyfile.waitUntil - time.time()))
+
+ if self.wantReconnect:
+ self.log_info(_("Requiring reconnection..."))
+ if self.account:
+ self.log_warning("Ignore reconnection due logged account")
+
+ if not self.wantReconnect or self.account:
+ while pyfile.waitUntil > time.time():
+ self.check_abort()
+ time.sleep(2)
+
+ else:
+ while pyfile.waitUntil > time.time():
+ self.check_abort()
+ self.thread.m.reconnecting.wait(1)
+
+ if self.thread.m.reconnecting.isSet():
+ self.waiting = False
+ self.wantReconnect = False
+ raise Reconnect
+
+ time.sleep(2)
+
+ self.waiting = False
+ pyfile.status = status #@NOTE: Recheck in 0.4.10
+
+
+ def skip(self, msg=""):
+ """
+ Skip and give msg
+ """
+ raise Skip(encode(msg or self.pyfile.error or self.pyfile.pluginname)) #@TODO: Remove `encode` in 0.4.10
+
+
+ #@TODO: Remove in 0.4.10
+ def fail(self, msg):
+ """
+ Fail and give msg
+ """
+ msg = msg.strip()
+
+ if msg:
+ self.pyfile.error = msg
+ else:
+ msg = self.pyfile.error or (self.info['error'] if 'error' in self.info else self.pyfile.getStatusName())
+
+ raise Fail(encode(msg)) #@TODO: Remove `encode` in 0.4.10
+
+
+ def error(self, msg="", type=_("Parse")):
+ type = _("%s error") % type.strip().capitalize() if type else _("Unknown")
+ msg = _("%(type)s: %(msg)s | Plugin may be out of date"
+ % {'type': type, 'msg': msg or self.pyfile.error})
+
+ self.fail(msg)
+
+
+ def abort(self, msg=""):
+ """
+ Abort and give msg
+ """
+ if msg: #@TODO: Remove in 0.4.10
+ self.pyfile.error = encode(msg)
+
+ raise Abort
+
+
+ #@TODO: Recheck in 0.4.10
+ def offline(self, msg=""):
+ """
+ Fail and indicate file is offline
+ """
+ self.fail("offline")
+
+
+ #@TODO: Recheck in 0.4.10
+ def temp_offline(self, msg=""):
+ """
+ Fail and indicates file ist temporary offline, the core may take consequences
+ """
+ self.fail("temp. offline")
+
+
+ def retry(self, attemps=5, wait=1, msg=""):
+ """
+ Retries and begin again from the beginning
+
+ :param attemps: number of maximum retries
+ :param wait: time to wait in seconds before retry
+ :param msg: message passed to fail if attemps value was reached
+ """
+ id = inspect.currentframe().f_back.f_lineno
+ if id not in self.retries:
+ self.retries[id] = 0
+
+ if 0 < attemps <= self.retries[id]:
+ self.fail(msg or _("Max retries reached"))
+
+ self.wait(wait, False)
+
+ self.retries[id] += 1
+ raise Retry(encode(msg)) #@TODO: Remove `encode` in 0.4.10
+
+
+ def retry_captcha(self, attemps=10, wait=1, msg=_("Max captcha retries reached")):
+ self.captcha.invalid()
+ self.retry(attemps, wait, msg)
+
+
+ def fixurl(self, url, baseurl=None, unquote=True):
+ url = fixurl(url)
+
+ if not baseurl:
+ baseurl = fixurl(self.pyfile.url)
+
+ if not urlparse.urlparse(url).scheme:
+ url_p = urlparse.urlparse(baseurl)
+ baseurl = "%s://%s" % (url_p.scheme, url_p.netloc)
+ url = urlparse.urljoin(baseurl, url)
+
+ return fixurl(url, unquote)
+
+
+ @check_abort
+ def load(self, *args, **kwargs):
+ return super(Base, self).load(*args, **kwargs)
+
+
+ def check_abort(self):
+ if not self.pyfile.abort:
+ return
+
+ if self.pyfile.status is 8:
+ self.fail()
+
+ elif self.pyfile.status is 4:
+ self.skip(self.pyfile.statusname)
+
+ elif self.pyfile.status is 1:
+ self.offline()
+
+ elif self.pyfile.status is 6:
+ self.temp_offline()
+
+ else:
+ self.abort()
+
+
+ def direct_link(self, url, follow_location=None):
+ link = ""
+
+ if follow_location is None:
+ redirect = 1
+
+ elif type(follow_location) is int:
+ redirect = max(follow_location, 1)
+
+ else:
+ redirect = self.get_config("maxredirs", 10, "UserAgentSwitcher")
+
+ for i in xrange(redirect):
+ try:
+ self.log_debug("Redirect #%d to: %s" % (i, url))
+ header = self.load(url, just_header=True)
+
+ except Exception: #: Bad bad bad... rewrite this part in 0.4.10
+ res = self.load(url,
+ just_header=True,
+ req=self.pyload.requestFactory.getRequest(self.__name__))
+
+ header = {'code': req.code}
+ for line in res.splitlines():
+ line = line.strip()
+ if not line or ":" not in line:
+ continue
+
+ key, none, value = line.partition(":")
+ key = key.lower().strip()
+ value = value.strip()
+
+ if key in header:
+ if type(header[key]) is list:
+ header[key].append(value)
+ else:
+ header[key] = [header[key], value]
+ else:
+ header[key] = value
+
+ if 'content-disposition' in header:
+ link = url
+
+ elif header.get('location'):
+ location = self.fixurl(header['location'], url)
+
+ if header.get('code') == 302:
+ link = location
+
+ if follow_location:
+ url = location
+ continue
+
+ else:
+ extension = os.path.splitext(parse_name(url))[-1]
+
+ if header.get('content-type'):
+ mimetype = header['content-type'].split(';')[0].strip()
+
+ elif extension:
+ mimetype = mimetypes.guess_type(extension, False)[0] or "application/octet-stream"
+
+ else:
+ mimetype = ""
+
+ if mimetype and (link or 'html' not in mimetype):
+ link = url
+ else:
+ link = ""
+
+ break
+
+ else:
+ try:
+ self.log_error(_("Too many redirects"))
+
+ except Exception:
+ pass
+
+ return link
+
+
+ def parse_html_form(self, attr_str="", input_names={}):
+ return parse_html_form(attr_str, self.html, input_names)
+
+
+ def get_password(self):
+ """
+ Get the password the user provided in the package
+ """
+ return self.pyfile.package().password or ""
+
+
+ def clean(self):
+ """
+ Clean everything and remove references
+ """
+ super(Base, self).clean()
+
+ for attr in ("account", "html", "pyfile", "thread"):
+ if hasattr(self, attr):
+ setattr(self, attr, None)
diff --git a/module/plugins/internal/Captcha.py b/module/plugins/internal/Captcha.py
index c08050ee8..a8f48b5e4 100644
--- a/module/plugins/internal/Captcha.py
+++ b/module/plugins/internal/Captcha.py
@@ -4,7 +4,6 @@ from __future__ import with_statement
import os
import time
-import traceback
from module.plugins.internal.Plugin import Plugin
@@ -12,7 +11,7 @@ from module.plugins.internal.Plugin import Plugin
class Captcha(Plugin):
__name__ = "Captcha"
__type__ = "captcha"
- __version__ = "0.42"
+ __version__ = "0.46"
__status__ = "testing"
__description__ = """Base anti-captcha plugin"""
@@ -50,18 +49,17 @@ class Captcha(Plugin):
pass
- def decrypt(self, url, get={}, post={}, ref=False, cookies=False, decode=False,
+ def decrypt(self, url, get={}, post={}, ref=False, cookies=True, decode=False, req=None,
input_type='jpg', output_type='textual', ocr=True, timeout=120):
- img = self.load(url, get=get, post=post, ref=ref, cookies=cookies, decode=decode)
- return self._decrypt(img, input_type, output_type, ocr, timeout)
+ img = self.load(url, get=get, post=post, ref=ref, cookies=cookies, decode=decode, req=req or self.plugin.req)
+ return self.decrypt_image(img, input_type, output_type, ocr, timeout)
- #@TODO: Definitely choose a better name for this method!
- def _decrypt(self, raw, input_type='jpg', output_type='textual', ocr=False, timeout=120):
+ def decrypt_image(self, data, input_type='jpg', output_type='textual', ocr=False, timeout=120):
"""
Loads a captcha and decrypts it with ocr, plugin, user input
- :param raw: image raw data
+ :param data: image raw data
:param get: get part for request
:param post: post part for request
:param cookies: True if cookies should be enabled
@@ -77,7 +75,7 @@ class Captcha(Plugin):
time_ref = ("%.2f" % time.time())[-6:].replace(".", "")
with open(os.path.join("tmp", "captcha_image_%s_%s.%s" % (self.plugin.__name__, time_ref, input_type)), "wb") as tmp_img:
- tmp_img.write(raw)
+ tmp_img.write(data)
if ocr:
if isinstance(ocr, basestring):
@@ -90,14 +88,13 @@ class Captcha(Plugin):
captchaManager = self.pyload.captchaManager
try:
- self.task = captchaManager.newTask(raw, input_type, tmp_img.name, output_type)
+ self.task = captchaManager.newTask(data, input_type, tmp_img.name, output_type)
captchaManager.handleCaptcha(self.task)
self.task.setWaiting(max(timeout, 50)) #@TODO: Move to `CaptchaManager` in 0.4.10
while self.task.isWaiting():
- if self.plugin.pyfile.abort:
- self.plugin.abort()
+ self.plugin.check_abort()
time.sleep(1)
finally:
@@ -107,8 +104,7 @@ class Captcha(Plugin):
self.fail(self.task.error)
elif not self.task.result:
- self.invalid()
- self.plugin.retry(reason=_("No captcha result obtained in appropiate time"))
+ self.plugin.retry_captcha(msg=_("No captcha result obtained in appropriate time"))
result = self.task.result
@@ -118,9 +114,8 @@ class Captcha(Plugin):
except OSError, e:
self.log_warning(_("Error removing: %s") % tmp_img.name, e)
- traceback.print_exc()
- self.log_info(_("Captcha result: ") + result) #@TODO: Remove from here?
+ #self.log_info(_("Captcha result: ") + result) #@TODO: Remove from here?
return result
diff --git a/module/plugins/internal/Container.py b/module/plugins/internal/Container.py
index 729592a0d..2300c4cab 100644
--- a/module/plugins/internal/Container.py
+++ b/module/plugins/internal/Container.py
@@ -4,7 +4,6 @@ from __future__ import with_statement
import os
import re
-import traceback
from module.plugins.internal.Crypter import Crypter
from module.plugins.internal.Plugin import exists
@@ -14,15 +13,15 @@ from module.utils import save_join as fs_join
class Container(Crypter):
__name__ = "Container"
__type__ = "container"
- __version__ = "0.06"
+ __version__ = "0.07"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
- __config__ = [] #: [("name", "type", "desc", "default")]
__description__ = """Base container decrypter plugin"""
__license__ = "GPLv3"
- __authors__ = [("mkaay", "mkaay@mkaay.de")]
+ __authors__ = [("mkaay" , "mkaay@mkaay.de" ),
+ ("Walter Purcaro", "vuolter@gmail.com")]
def process(self, pyfile):
@@ -44,11 +43,6 @@ class Container(Crypter):
self._create_packages()
- #: Deprecated method, use `_load2disk` instead (Remove in 0.4.10)
- def loadToDisk(self, *args, **kwargs):
- return self._load2disk(*args, **kwargs)
-
-
def _load2disk(self):
"""
Loads container to disk if its stored remotely and overwrite url,
@@ -63,20 +57,18 @@ class Container(Crypter):
f.write(content)
except IOError, e:
- self.fail(str(e)) #@TODO: Remove `str` in 0.4.10
+ self.fail(e)
else:
self.pyfile.name = os.path.basename(self.pyfile.url)
+
if not exists(self.pyfile.url):
if exists(fs_join(pypath, self.pyfile.url)):
self.pyfile.url = fs_join(pypath, self.pyfile.url)
else:
self.fail(_("File not exists"))
-
-
- #: Deprecated method, use `delete_tmp` instead (Remove in 0.4.10)
- def deleteTmp(self, *args, **kwargs):
- return self.delete_tmp(*args, **kwargs)
+ else:
+ self.data = self.pyfile.url
def delete_tmp(self):
@@ -87,5 +79,3 @@ class Container(Crypter):
os.remove(self.pyfile.url)
except OSError, e:
self.log_warning(_("Error removing: %s") % self.pyfile.url, e)
- if self.pyload.debug:
- traceback.print_exc()
diff --git a/module/plugins/internal/Crypter.py b/module/plugins/internal/Crypter.py
index d0e8eb1b4..a5c88aed9 100644
--- a/module/plugins/internal/Crypter.py
+++ b/module/plugins/internal/Crypter.py
@@ -1,19 +1,18 @@
# -*- coding: utf-8 -*-
-import urlparse
-
-from module.plugins.internal.Hoster import Hoster, _fixurl
+from module.plugins.internal.Base import Base, parse_name
from module.utils import save_path as safe_filename
-class Crypter(Hoster):
+class Crypter(Base):
__name__ = "Crypter"
__type__ = "crypter"
- __version__ = "0.07"
+ __version__ = "0.11"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
- __config__ = [("use_subfolder", "bool", "Save package to subfolder", True), #: Overrides pyload.config.get("general", "folder_per_package")
+ __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
+ ("use_subfolder" , "bool", "Save package to subfolder" , True), #: Overrides pyload.config.get("general", "folder_per_package")
("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
__description__ = """Base decrypter plugin"""
@@ -21,9 +20,6 @@ class Crypter(Hoster):
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- html = None #: Last html loaded #@TODO: Move to Hoster
-
-
def __init__(self, pyfile):
super(Crypter, self).__init__(pyfile)
@@ -33,6 +29,16 @@ class Crypter(Hoster):
#: List of urls, pyLoad will generate packagenames
self.urls = []
+ self._setup()
+ self.init()
+
+
+ def _setup(self):
+ super(Crypter, self)._setup()
+
+ self.packages = []
+ self.urls = []
+
def process(self, pyfile):
"""
@@ -50,6 +56,9 @@ class Crypter(Hoster):
def decrypt(self, pyfile):
+ """
+ The "main" method of every crypter plugin, you **have to** overwrite it
+ """
raise NotImplementedError
@@ -78,13 +87,15 @@ class Crypter(Hoster):
"%d links" % len(links),
"Saved to folder: %s" % folder if folder else "Saved to download folder")
- pid = self.pyload.api.addPackage(name, map(self.fixurl, links), package_queue)
+ links = map(self.fixurl, links)
+
+ pid = self.pyload.api.addPackage(name, links, package_queue)
if package_password:
self.pyload.api.setPackageData(pid, {'password': package_password})
#: Workaround to do not break API addPackage method
- set_folder = lambda x: self.pyload.api.setPackageData(pid, {'folder': x or ""})
+ set_folder = lambda x="": self.pyload.api.setPackageData(pid, {'folder': safe_filename(x)})
if use_subfolder:
if not subfolder_per_package:
@@ -93,10 +104,10 @@ class Crypter(Hoster):
elif not folder_per_package or name is not folder:
if not folder:
- folder = urlparse.urlparse(_fixurl(name)).path.split("/")[-1]
+ folder = parse_name(name)
- set_folder(safe_filename(folder))
+ set_folder(folder)
self.log_debug("Set package %(name)s folder to: %(folder)s" % {'name': name, 'folder': folder})
elif folder_per_package:
- set_folder(None)
+ set_folder()
diff --git a/module/plugins/internal/Extractor.py b/module/plugins/internal/Extractor.py
index 7f5212090..3ab5d6a0d 100644
--- a/module/plugins/internal/Extractor.py
+++ b/module/plugins/internal/Extractor.py
@@ -5,6 +5,7 @@ import re
from module.PyFile import PyFile
from module.plugins.internal.Plugin import Plugin
+from module.utils import fs_encode
class ArchiveError(Exception):
@@ -22,7 +23,7 @@ class PasswordError(Exception):
class Extractor(Plugin):
__name__ = "Extractor"
__type__ = "extractor"
- __version__ = "0.33"
+ __version__ = "0.35"
__status__ = "testing"
__description__ = """Base extractor plugin"""
@@ -43,15 +44,9 @@ class Extractor(Plugin):
@classmethod
- def is_multipart(cls, filename):
- return False
-
-
- @classmethod
def find(cls):
"""
Check if system statisfy dependencies
- :return: boolean
"""
pass
@@ -72,9 +67,15 @@ class Extractor(Plugin):
if pname not in processed:
processed.append(pname)
targets.append((fname, id, fout))
+
return targets
+ @property
+ def target(self):
+ return fs_encode(self.filename)
+
+
def __init__(self, plugin, filename, out,
fullpath=True,
overwrite=False,
@@ -119,53 +120,29 @@ class Extractor(Plugin):
(self.__name__,) + messages)
- def check(self):
+ def verify(self, password=None):
"""
- Quick Check by listing content of archive.
- Raises error if password is needed, integrity is questionable or else.
-
- :raises PasswordError
- :raises CRCError
- :raises ArchiveError
+ Testing with Extractors built-in method
+ Raise error if password is needed, integrity is questionable or else
"""
- raise NotImplementedError
-
-
- def verify(self):
- """
- Testing with Extractors buildt-in method
- Raises error if password is needed, integrity is questionable or else.
-
- :raises PasswordError
- :raises CRCError
- :raises ArchiveError
- """
- raise NotImplementedError
+ pass
def repair(self):
- return None
+ return False
def extract(self, password=None):
"""
- Extract the archive. Raise specific errors in case of failure.
-
- :param progress: Progress function, call this to update status
- :param password password to use
- :raises PasswordError
- :raises CRCError
- :raises ArchiveError
- :return:
+ Extract the archive
+ Raise specific errors in case of failure
"""
raise NotImplementedError
- def get_delete_files(self):
+ def items(self):
"""
- Return list of files to delete, do *not* delete them here.
-
- :return: List with paths of files to delete
+ Return list of archive parts
"""
return [self.filename]
diff --git a/module/plugins/internal/Hook.py b/module/plugins/internal/Hook.py
index 1f566f824..8ae731a7f 100644
--- a/module/plugins/internal/Hook.py
+++ b/module/plugins/internal/Hook.py
@@ -9,8 +9,6 @@ class Hook(Addon):
__version__ = "0.13"
__status__ = "testing"
- __config__ = [] #: [("name", "type", "desc", "default")]
-
__description__ = """Base hook plugin"""
__license__ = "GPLv3"
__authors__ = [("mkaay" , "mkaay@mkaay.de" ),
diff --git a/module/plugins/internal/Hoster.py b/module/plugins/internal/Hoster.py
index a0cdb1e2e..26da436a5 100644
--- a/module/plugins/internal/Hoster.py
+++ b/module/plugins/internal/Hoster.py
@@ -2,352 +2,111 @@
from __future__ import with_statement
-import inspect
import os
-import random
-import time
-import traceback
-import urlparse
-
-from module.plugins.internal.Captcha import Captcha
-from module.plugins.internal.Plugin import (Plugin, Abort, Fail, Reconnect, Retry, Skip,
- chunks, encode, exists, fixurl as _fixurl, replace_patterns,
- seconds_to_midnight, set_cookie, set_cookies, parse_html_form,
- parse_html_tag_attr_value, timestamp)
-from module.utils import fs_decode, fs_encode, save_join as fs_join, save_path as safe_filename
-
-
-#@TODO: Remove in 0.4.10
-def parse_fileInfo(klass, url="", html=""):
- info = klass.get_info(url, html)
- return info['name'], info['size'], info['status'], info['url']
-
-
-#@TODO: Remove in 0.4.10
-def getInfo(urls):
- #: result = [ .. (name, size, status, url) .. ]
- pass
+import re
-
-#@TODO: Remove in 0.4.10
-def create_getInfo(klass):
- def get_info(urls):
- for url in urls:
- if hasattr(klass, "URL_REPLACEMENTS"):
- url = replace_patterns(url, klass.URL_REPLACEMENTS)
- yield parse_fileInfo(klass, url)
-
- return get_info
+from module.plugins.internal.Base import Base, check_abort, create_getInfo, getInfo, parse_fileInfo
+from module.plugins.internal.Plugin import Fail, Retry, encode, exists, fixurl, parse_name
+from module.utils import fs_decode, fs_encode, save_join as fs_join, save_path as safe_filename
-class Hoster(Plugin):
+class Hoster(Base):
__name__ = "Hoster"
__type__ = "hoster"
- __version__ = "0.19"
+ __version__ = "0.34"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
- __config__ = [] #: [("name", "type", "desc", "default")]
+ __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
+ ("fallback_premium", "bool", "Fallback to free download if premium fails", True),
+ ("chk_filesize" , "bool", "Check file size" , True)]
__description__ = """Base hoster plugin"""
__license__ = "GPLv3"
- __authors__ = [("RaNaN" , "RaNaN@pyload.org" ),
- ("spoob" , "spoob@pyload.org" ),
- ("mkaay" , "mkaay@mkaay.de" ),
- ("Walter Purcaro", "vuolter@gmail.com")]
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
def __init__(self, pyfile):
- self._init(pyfile.m.core)
-
- #: Engage wan reconnection
- self.wantReconnect = False #@TODO: Change to `want_reconnect` in 0.4.10
+ super(Hoster, self).__init__(pyfile)
#: Enable simultaneous processing of multiple downloads
- self.multiDL = True #@TODO: Change to `multi_dl` in 0.4.10
self.limitDL = 0 #@TODO: Change to `limit_dl` in 0.4.10
- #: time.time() + wait in seconds
- self.wait_until = 0
- self.waiting = False
-
- #: Account handler instance, see :py:class:`Account`
- self.account = None
- self.user = None
- self.req = None #: Browser instance, see `network.Browser`
-
- #: Associated pyfile instance, see `PyFile`
- self.pyfile = pyfile
-
- self.thread = None #: Holds thread in future
-
#: Location where the last call to download was saved
- self.last_download = ""
+ self.last_download = None
#: Re match of the last call to `checkDownload`
self.last_check = None
- #: Js engine, see `JsEngine`
- self.js = self.pyload.js
-
- #: Captcha stuff
- self.captcha = Captcha(self)
-
- #: Some plugins store html code here
- self.html = None
-
- #: Dict of the amount of retries already made
- self.retries = {}
- self.retry_free = False #@TODO: Recheck in 0.4.10
+ #: Restart flag
+ self.rst_free = False #@TODO: Recheck in 0.4.10
self._setup()
self.init()
- @classmethod
- def get_info(cls, url="", html=""):
- url = _fixurl(url)
- url_p = urlparse.urlparse(url)
- return {'name' : (url_p.path.split('/')[-1] or
- url_p.query.split('=', 1)[::-1][0].split('&', 1)[0] or
- url_p.netloc.split('.', 1)[0]),
- 'size' : 0,
- 'status': 3 if url else 8,
- 'url' : url}
-
-
- def init(self):
- """
- Initialize the plugin (in addition to `__init__`)
- """
- pass
-
-
- def setup(self):
- """
- Setup for enviroment and other things, called before downloading (possibly more than one time)
- """
- pass
-
-
def _setup(self):
- if self.account:
- self.req = self.pyload.requestFactory.getRequest(self.__name__, self.user)
- self.chunk_limit = -1 #: -1 for unlimited
- self.resume_download = True
- self.premium = self.account.is_premium(self.user)
- else:
- self.req = self.pyload.requestFactory.getRequest(self.__name__)
- self.chunk_limit = 1
- self.resume_download = False
- self.premium = False
-
-
- def load_account(self):
- if self.req:
- self.req.close()
+ super(Hoster, self)._setup()
- if not self.account:
- self.account = self.pyload.accountManager.getAccountPlugin(self.__name__)
+ self.last_download = None
+ self.last_check = None
+ self.rst_free = False
- if self.account:
- if not self.user:
- self.user = self.account.select()[0]
- if not self.user or not self.account.is_logged(self.user, True):
- self.account = False
+ def load_account(self):
+ if self.rst_free:
+ self.account = False
+ self.user = None #@TODO: Remove in 0.4.10
+ else:
+ super(Hoster, self).load_account()
+ # self.rst_free = False
- def preprocessing(self, thread):
+ def _process(self, thread):
"""
Handles important things to do before starting
"""
self.thread = thread
- if self.retry_free:
- self.account = False
- else:
- self.load_account() #@TODO: Move to PluginThread in 0.4.10
- self.retry_free = False
-
self._setup()
- self.setup()
-
- self.pyload.hookManager.downloadPreparing(self.pyfile) #@TODO: Recheck in 0.4.10
- if self.pyfile.abort:
- self.abort()
+ # self.pyload.hookManager.downloadPreparing(self.pyfile) #@TODO: Recheck in 0.4.10
+ self.check_abort()
self.pyfile.setStatus("starting")
- self.log_debug("PROCESS URL " + self.pyfile.url, "PLUGIN VERSION %s" % self.__version__)
-
- return self.process(self.pyfile)
-
-
- def process(self, pyfile):
- """
- The 'main' method of every plugin, you **have to** overwrite it
- """
- raise NotImplementedError
-
-
- def set_reconnect(self, reconnect):
- reconnect = bool(reconnect)
-
- self.log_info(_("RECONNECT ") + ("enabled" if reconnect else "disabled"))
- self.log_debug("Previous wantReconnect: %s" % self.wantReconnect)
-
- self.wantReconnect = reconnect
-
-
- def set_wait(self, seconds, reconnect=None):
- """
- Set a specific wait time later used with `wait`
-
- :param seconds: wait time in seconds
- :param reconnect: True if a reconnect would avoid wait time
- """
- wait_time = max(int(seconds), 1)
- wait_until = time.time() + wait_time + 1
-
- self.log_info(_("WAIT %d seconds") % wait_time)
- self.log_debug("Previous waitUntil: %f" % self.pyfile.waitUntil)
-
- self.pyfile.waitUntil = wait_until
-
- if reconnect is not None:
- self.set_reconnect(reconnect)
-
-
- def wait(self, seconds=None, reconnect=None):
- """
- Waits the time previously set
- """
- pyfile = self.pyfile
-
- if seconds is not None:
- self.set_wait(seconds)
-
- if reconnect is not None:
- self.set_reconnect(reconnect)
-
- self.waiting = True
-
- status = pyfile.status #@NOTE: Remove in 0.4.10
- pyfile.setStatus("waiting")
-
- if not self.wantReconnect or self.account:
- if self.account:
- self.log_warning("Ignore reconnection due logged account")
-
- while pyfile.waitUntil > time.time():
- if pyfile.abort:
- self.abort()
-
- time.sleep(2)
-
- else:
- while pyfile.waitUntil > time.time():
- if pyfile.abort:
- self.abort()
-
- if self.thread.m.reconnecting.isSet():
- self.waiting = False
- self.wantReconnect = False
- raise Reconnect
-
- self.thread.m.reconnecting.wait(2)
- time.sleep(2)
-
- self.waiting = False
- pyfile.status = status #@NOTE: Remove in 0.4.10
-
-
- def skip(self, reason=""):
- """
- Skip and give reason
- """
- raise Skip(encode(reason)) #@TODO: Remove `encode` in 0.4.10
-
-
- def abort(self, reason=""):
- """
- Abort and give reason
- """
- #@TODO: Remove in 0.4.10
- if reason:
- self.pyfile.error = encode(reason)
-
- raise Abort
-
-
- def offline(self, reason=""):
- """
- Fail and indicate file is offline
- """
- #@TODO: Remove in 0.4.10
- if reason:
- self.pyfile.error = encode(reason)
-
- raise Fail("offline")
-
-
- def temp_offline(self, reason=""):
- """
- Fail and indicates file ist temporary offline, the core may take consequences
- """
- #@TODO: Remove in 0.4.10
- if reason:
- self.pyfile.error = encode(reason)
-
- raise Fail("temp. offline")
+ try:
+ self.log_debug("PROCESS URL " + self.pyfile.url, "PLUGIN VERSION %s" % self.__version__) #@TODO: Remove in 0.4.10
+ self.process(self.pyfile)
- def retry(self, max_tries=5, wait_time=1, reason=""):
- """
- Retries and begin again from the beginning
+ self.check_abort()
- :param max_tries: number of maximum retries
- :param wait_time: time to wait in seconds
- :param reason: reason for retrying, will be passed to fail if max_tries reached
- """
- id = inspect.currentframe().f_back.f_lineno
- if id not in self.retries:
- self.retries[id] = 0
+ self.log_debug("CHECK DOWNLOAD") #@TODO: Recheck in 0.4.10
+ self._check_download()
- if 0 < max_tries <= self.retries[id]:
- self.fail(reason or _("Max retries reached"))
+ except Fail, e: #@TODO: Move to PluginThread in 0.4.10
+ if self.get_config('fallback_premium', True) and self.premium:
+ self.log_warning(_("Premium download failed"), e)
+ self.restart()
- self.wait(wait_time, False)
-
- self.retries[id] += 1
- raise Retry(encode(reason)) #@TODO: Remove `encode` in 0.4.10
+ else:
+ raise Fail(e)
- def restart(self, reason=None, nopremium=False):
- if not reason:
- reason = _("Fallback to free download") if nopremium else _("Restart")
+ def restart(self, msg="", premium=False):
+ if not msg:
+ msg = _("Simple restart") if premium else _("Fallback to free download")
- if nopremium:
+ if not premium:
if self.premium:
- self.retry_free = True
+ self.rst_free = True
else:
- self.fail("%s | %s" % (reason, _("Download was already free")))
+ self.fail("%s | %s" % (msg, _("Download was already free")))
- raise Retry(encode(reason)) #@TODO: Remove `encode` in 0.4.10
-
-
- def fixurl(self, url):
- url = _fixurl(url)
-
- if not urlparse.urlparse(url).scheme:
- url_p = urlparse.urlparse(self.pyfile.url)
- baseurl = "%s://%s" % (url_p.scheme, url_p.netloc)
- url = urlparse.urljoin(baseurl, url)
-
- return url
+ raise Retry(encode(msg)) #@TODO: Remove `encode` in 0.4.10
+ @check_abort
def download(self, url, get={}, post={}, ref=True, cookies=True, disposition=True):
"""
Downloads the content at url to download folder
@@ -361,23 +120,18 @@ class Hoster(Plugin):
the filename will be changed if needed
:return: The location where the file was saved
"""
- if self.pyfile.abort:
- self.abort()
-
- url = self.fixurl(url)
-
- if not url or not isinstance(url, basestring):
- self.fail(_("No url given"))
-
if self.pyload.debug:
self.log_debug("DOWNLOAD URL " + url,
- *["%s=%s" % (key, val) for key, val in locals().items() if key not in ("self", "url")])
+ *["%s=%s" % (key, val) for key, val in locals().items() if key not in ("self", "url", "_[1]")])
+
+ url = self.fixurl(url)
- name = _fixurl(self.pyfile.name)
- self.pyfile.name = urlparse.urlparse(name).path.split('/')[-1] or name
+ self.pyfile.name = parse_name(self.pyfile.name) #: Safe check
self.captcha.correct()
- self.check_for_same_files()
+
+ if self.pyload.config.get("download", "skip_existing"):
+ self.check_filedupe()
self.pyfile.setStatus("downloading")
@@ -387,6 +141,7 @@ class Hoster(Plugin):
if not exists(download_location):
try:
os.makedirs(download_location)
+
except Exception, e:
self.fail(e)
@@ -397,8 +152,7 @@ class Hoster(Plugin):
self.pyload.hookManager.dispatchEvent("download_start", self.pyfile, url, filename)
- if self.pyfile.abort:
- self.abort()
+ self.check_abort()
try:
newname = self.req.httpDownload(url, filename, get=get, post=post, ref=ref, cookies=cookies,
@@ -409,30 +163,60 @@ class Hoster(Plugin):
#@TODO: Recheck in 0.4.10
if disposition and newname:
- finalname = urlparse.urlparse(newname).path.split('/')[-1].split(' filename*=')[0]
+ finalname = parse_name(newname).split(' filename*=')[0]
- if finalname != newname != self.pyfile.name:
+ if finalname != newname:
try:
- os.rename(fs_join(location, newname), fs_join(location, finalname))
+ oldname_enc = fs_join(download_location, newname)
+ newname_enc = fs_join(download_location, finalname)
+ os.rename(oldname_enc, newname_enc)
except OSError, e:
self.log_warning(_("Error renaming `%s` to `%s`") % (newname, finalname), e)
finalname = newname
self.log_info(_("`%s` saved as `%s`") % (self.pyfile.name, finalname))
- self.pyfile.name = finalname
- filename = os.path.join(location, finalname)
+
+ self.pyfile.name = finalname
+ filename = os.path.join(location, finalname)
self.set_permissions(fs_encode(filename))
self.last_download = filename
- return self.last_download
+ return filename
+
+
+ def check_filesize(self, file_size, size_tolerance=1024):
+ """
+ Checks the file size of the last downloaded file
+
+ :param file_size: expected file size
+ :param size_tolerance: size check tolerance
+ """
+ if not self.last_download:
+ return
+
+ download_location = fs_encode(self.last_download)
+ download_size = os.stat(download_location).st_size
+
+ if download_size < 1:
+ self.fail(_("Empty file"))
+
+ elif file_size > 0:
+ diff = abs(file_size - download_size)
+
+ if diff > size_tolerance:
+ self.fail(_("File size mismatch | Expected file size: %s | Downloaded file size: %s")
+ % (file_size, download_size))
+
+ elif diff != 0:
+ self.log_warning(_("File size is not equal to expected size"))
- def check_download(self, rules, delete=False, file_size=0, size_tolerance=1024, read_size=1048576):
+ def check_file(self, rules, delete=False, read_size=1048576, file_size=0, size_tolerance=1024):
"""
- Checks the content of the last downloaded file, re match is saved to `lastCheck`
+ Checks the content of the last downloaded file, re match is saved to `last_check`
:param rules: dict with names and rules to match (compiled regexp or strings)
:param delete: delete if matched
@@ -442,29 +226,13 @@ class Hoster(Plugin):
:return: dictionary key of the first rule that matched
"""
do_delete = False
- last_download = fs_encode(self.last_download)
+ last_download = fs_encode(self.last_download) #@TODO: Recheck in 0.4.10
if not self.last_download or not exists(last_download):
- self.last_download = ""
self.fail(self.pyfile.error or _("No file downloaded"))
try:
- download_size = os.stat(last_download).st_size
-
- if download_size < 1:
- do_delete = True
- self.fail(_("Empty file"))
-
- elif file_size > 0:
- diff = abs(file_size - download_size)
-
- if diff > size_tolerance:
- do_delete = True
- self.fail(_("File size mismatch | Expected file size: %s | Downloaded file size: %s")
- % (file_size, download_size))
-
- elif diff != 0:
- self.log_warning(_("File size is not equal to expected size"))
+ self.check_filesize(file_size, size_tolerance)
with open(last_download, "rb") as f:
content = f.read(read_size)
@@ -479,7 +247,7 @@ class Hoster(Plugin):
elif hasattr(rule, "search"):
m = rule.search(content)
- if m:
+ if m is not None:
do_delete = True
self.last_check = m
return name
@@ -490,133 +258,45 @@ class Hoster(Plugin):
except OSError, e:
self.log_warning(_("Error removing: %s") % last_download, e)
- if self.pyload.debug:
- traceback.print_exc()
else:
- self.last_download = ""
- self.log_info(_("File deleted"))
-
-
- def direct_link(self, url, follow_location=None):
- link = ""
-
- if follow_location is None:
- redirect = 1
-
- elif type(follow_location) is int:
- redirect = max(follow_location, 1)
-
- else:
- redirect = self.get_config("maxredirs", 10, "UserAgentSwitcher")
+ self.log_info(_("File deleted: ") + self.last_download)
+ self.last_download = "" #: Recheck in 0.4.10
- for i in xrange(redirect):
- try:
- self.log_debug("Redirect #%d to: %s" % (i, url))
- header = self.load(url, just_header=True)
-
- except Exception: #: Bad bad bad... rewrite this part in 0.4.10
- res = self.load(url,
- just_header=True,
- req=self.pyload.requestFactory.getRequest())
-
- header = {'code': req.code}
- for line in res.splitlines():
- line = line.strip()
- if not line or ":" not in line:
- continue
-
- key, none, value = line.partition(":")
- key = key.lower().strip()
- value = value.strip()
-
- if key in header:
- if type(header[key]) is list:
- header[key].append(value)
- else:
- header[key] = [header[key], value]
- else:
- header[key] = value
-
- if 'content-disposition' in header:
- link = url
-
- elif 'location' in header and header['location']:
- location = header['location']
-
- if not urlparse.urlparse(location).scheme:
- url_p = urlparse.urlparse(url)
- baseurl = "%s://%s" % (url_p.scheme, url_p.netloc)
- location = urlparse.urljoin(baseurl, location)
-
- if 'code' in header and header['code'] == 302:
- link = location
-
- if follow_location:
- url = location
- continue
-
- else:
- extension = os.path.splitext(urlparse.urlparse(url).path.split('/')[-1])[-1]
-
- if 'content-type' in header and header['content-type']:
- mimetype = header['content-type'].split(';')[0].strip()
-
- elif extension:
- mimetype = mimetypes.guess_type(extension, False)[0] or "application/octet-stream"
- else:
- mimetype = ""
-
- if mimetype and (link or 'html' not in mimetype):
- link = url
- else:
- link = ""
+ def _check_download(self):
+ if self.captcha.task and not self.last_download:
+ self.retry_captcha()
- break
+ elif self.check_file({'Empty file': re.compile(r'\A((.|)(\2|\s)*)\Z')},
+ delete=True):
+ self.error(_("Empty file"))
- else:
- try:
- self.log_error(_("Too many redirects"))
- except Exception:
- pass
+ elif self.get_config('chk_filesize', False) and self.info.get('size'):
+ # 10485760 is 10MB, tolerance is used when comparing displayed size on the hoster website to real size
+ # For example displayed size can be 1.46GB for example, but real size can be 1.4649853GB
+ self.check_filesize(self.info['size'], size_tolerance=10485760)
- return link
-
- def parse_html_form(self, attr_str="", input_names={}):
- return parse_html_form(attr_str, self.html, input_names)
-
-
- def check_traffic_left(self):
+ def check_traffic(self):
if not self.account:
return True
- traffic = self.account.get_data(self.user, True)['trafficleft']
+ traffic = self.account.get_data('trafficleft')
if traffic is None:
return False
- elif traffic == -1:
+
+ elif traffic is -1:
return True
+
else:
- size = self.pyfile.size / 1024
- self.log_info(_("Filesize: %s KiB, Traffic left for user %s: %s KiB") % (size, self.user, traffic))
+ size = self.pyfile.size / 1024 #@TODO: Remove in 0.4.10
+ self.log_info(_("Filesize: %s KiB, Traffic left for user %s: %s KiB") % (size, self.account.user, traffic)) #@TODO: Rewrite in 0.4.10
return size <= traffic
- def get_password(self):
- """
- Get the password the user provided in the package
- """
- return self.pyfile.package().password or ""
-
-
- #: Deprecated method, use `check_for_same_files` instead (Remove in 0.4.10)
- def checkForSameFiles(self, *args, **kwargs):
- return self.check_for_same_files(*args, **kwargs)
-
-
- def check_for_same_files(self, starting=False):
+ def check_filedupe(self):
"""
Checks if same file was/is downloaded within same package
@@ -626,23 +306,32 @@ class Hoster(Plugin):
pack = self.pyfile.package()
for pyfile in self.pyload.files.cache.values():
- if pyfile != self.pyfile and pyfile.name is self.pyfile.name and pyfile.package().folder is pack.folder:
- if pyfile.status in (0, 12): #: Finished or downloading
- self.skip(pyfile.pluginname)
- elif pyfile.status in (5, 7) and starting: #: A download is waiting/starting and was appenrently started before
- self.skip(pyfile.pluginname)
+ if pyfile is self.pyfile:
+ continue
- download_folder = self.pyload.config.get("general", "download_folder")
- location = fs_join(download_folder, pack.folder, self.pyfile.name)
+ if pyfile.name != self.pyfile.name or pyfile.package().folder != pack.folder:
+ continue
- if starting and self.pyload.config.get("download", "skip_existing") and exists(location):
- size = os.stat(location).st_size
- if size >= self.pyfile.size:
- self.skip("File exists")
+ if pyfile.status in (0, 5, 7, 12): #: (finished, waiting, starting, downloading)
+ self.skip(pyfile.pluginname)
- pyfile = self.pyload.db.findDuplicates(self.pyfile.id, self.pyfile.package().folder, self.pyfile.name)
+ download_folder = self.pyload.config.get("general", "download_folder")
+ package_folder = pack.folder if self.pyload.config.get("general", "folder_per_package") else ""
+ download_location = fs_join(download_folder, package_folder, self.pyfile.name)
+
+ if not exists(download_location):
+ return
+
+ pyfile = self.pyload.db.findDuplicates(self.pyfile.id, package_folder, self.pyfile.name)
if pyfile:
- if exists(location):
- self.skip(pyfile[0])
+ self.skip(pyfile[0])
+
+ size = os.stat(download_location).st_size
+ if size >= self.pyfile.size:
+ self.skip(_("File exists"))
- self.log_debug("File %s not skipped, because it does not exists." % self.pyfile.name)
+
+ #: Deprecated method, use `check_filedupe` instead (Remove in 0.4.10)
+ def checkForSameFiles(self, *args, **kwargs):
+ if self.pyload.config.get("download", "skip_existing"):
+ return self.check_filedupe()
diff --git a/module/plugins/internal/MultiHook.py b/module/plugins/internal/MultiAccount.py
index 42a1985b5..b38670ce7 100644
--- a/module/plugins/internal/MultiHook.py
+++ b/module/plugins/internal/MultiAccount.py
@@ -2,30 +2,29 @@
import re
import time
-import traceback
-from module.plugins.internal.Hook import Hook
+from module.plugins.internal.Account import Account
from module.utils import decode, remove_chars
-class MultiHook(Hook):
- __name__ = "MultiHook"
- __type__ = "hook"
- __version__ = "0.54"
+class MultiAccount(Account):
+ __name__ = "MultiAccount"
+ __type__ = "account"
+ __version__ = "0.02"
__status__ = "testing"
- __config__ = [("pluginmode" , "all;listed;unlisted", "Use for plugins" , "all"),
- ("pluginlist" , "str" , "Plugin list (comma separated)", "" ),
- ("reload" , "bool" , "Reload plugin list" , True ),
- ("reloadinterval", "int" , "Reload interval in hours" , 12 )]
+ __config__ = [("pluginmode" , "all;listed;unlisted", "Use for plugins" , "all"),
+ ("pluginlist" , "str" , "Plugin list (comma separated)", "" ),
+ ("reload" , "bool" , "Reload plugin list" , True ),
+ ("reloadinterval", "int" , "Reload interval in hours" , 12 )]
- __description__ = """Hook plugin for multi hoster/crypter"""
+ __description__ = """Multi hoster account plugin"""
__license__ = "GPLv3"
__authors__ = [("pyLoad Team" , "admin@pyload.org" ),
("Walter Purcaro", "vuolter@gmail.com")]
- MIN_RELOAD_INTERVAL = 1 * 60 * 60 #: 1 hour
+ REFRESH_INTERVAL = 1 * 60 * 60 #: 1 hour
DOMAIN_REPLACEMENTS = [(r'180upload\.com' , "hundredeightyupload.com"),
(r'bayfiles\.net' , "bayfiles.com" ),
@@ -55,6 +54,34 @@ class MultiHook(Hook):
(r'^0' , "zero" )]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
def init(self):
self.plugins = []
self.supported = []
@@ -102,7 +129,7 @@ class MultiHook(Hook):
for _i in xrange(5):
try:
- pluginset = self._plugin_set(self.get_hosters())
+ pluginset = self._plugin_set(self.grab_hosters())
break
except Exception, e:
@@ -110,7 +137,7 @@ class MultiHook(Hook):
time.sleep(60)
else:
self.log_error(_("No hoster list retrieved"))
- self.interval = self.MIN_RELOAD_INTERVAL
+ self.interval = self.REFRESH_INTERVAL
return list()
try:
@@ -144,7 +171,7 @@ class MultiHook(Hook):
return set(plugins)
- def get_hosters(self):
+ def grab_hosters(self, user, password, data):
"""
Load list of supported hoster
@@ -160,7 +187,7 @@ class MultiHook(Hook):
self.load_account()
if self.get_config('reload', True):
- self.interval = max(self.get_config('reloadinterval', 12) * 60 * 60, self.MIN_RELOAD_INTERVAL)
+ self.interval = max(self.get_config('reloadinterval', 12) * 60 * 60, self.REFRESH_INTERVAL)
else:
self.pyload.scheduler.removeJob(self.cb)
self.cb = None
diff --git a/module/plugins/internal/MultiCrypter.py b/module/plugins/internal/MultiCrypter.py
index ca7b03941..9d4ac3ab9 100644
--- a/module/plugins/internal/MultiCrypter.py
+++ b/module/plugins/internal/MultiCrypter.py
@@ -6,12 +6,13 @@ from module.plugins.internal.SimpleCrypter import SimpleCrypter
class MultiCrypter(SimpleCrypter):
__name__ = "MultiCrypter"
__type__ = "hoster"
- __version__ = "0.02"
+ __version__ = "0.03"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
- __config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
- ("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
+ __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
+ ("use_subfolder" , "bool", "Save package to subfolder" , True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
__description__ = """Multi decrypter plugin"""
__license__ = "GPLv3"
@@ -19,7 +20,7 @@ class MultiCrypter(SimpleCrypter):
def init(self):
- self.CRYPTER_NAME = self.pyload.pluginManager.crypterPlugins[self.__name__]['name']
+ self.PLUGIN_NAME = self.pyload.pluginManager.crypterPlugins[self.__name__]['name']
def _log(self, level, plugintype, pluginname, messages):
diff --git a/module/plugins/internal/MultiHoster.py b/module/plugins/internal/MultiHoster.py
index c0c928a45..d7d3c5ccd 100644
--- a/module/plugins/internal/MultiHoster.py
+++ b/module/plugins/internal/MultiHoster.py
@@ -9,33 +9,35 @@ from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, r
class MultiHoster(SimpleHoster):
__name__ = "MultiHoster"
__type__ = "hoster"
- __version__ = "0.50"
+ __version__ = "0.52"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
- __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
- ("revertfailed", "bool", "Revert to standard download if fails", True)]
+ __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
+ ("fallback_premium", "bool", "Fallback to free download if premium fails", True),
+ ("chk_filesize" , "bool", "Check file size" , True),
+ ("revertfailed" , "bool", "Revert to standard download if fails" , True)]
__description__ = """Multi hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- HOSTER_NAME = None
+ PLUGIN_NAME = None
LEECH_HOSTER = False
LOGIN_ACCOUNT = True
def init(self):
- self.HOSTER_NAME = self.pyload.pluginManager.hosterPlugins[self.__name__]['name']
+ self.PLUGIN_NAME = self.pyload.pluginManager.hosterPlugins[self.__name__]['name']
def _log(self, level, plugintype, pluginname, messages):
return super(MultiHoster, self)._log(level,
plugintype,
pluginname,
- (self.HOSTER_NAME,) + messages)
+ (self.PLUGIN_NAME,) + messages)
def setup(self):
@@ -83,11 +85,11 @@ class MultiHoster(SimpleHoster):
self.check_errors()
self.check_status(getinfo=False)
- if self.premium and (not self.CHECK_TRAFFIC or self.check_traffic_left()):
+ if self.premium and (not self.CHECK_TRAFFIC or self.check_traffic()):
self.log_info(_("Processing as premium download..."))
self.handle_premium(pyfile)
- elif not self.LOGIN_ACCOUNT or (not self.CHECK_TRAFFIC or self.check_traffic_left()):
+ elif not self.LOGIN_ACCOUNT or (not self.CHECK_TRAFFIC or self.check_traffic()):
self.log_info(_("Processing as free download..."))
self.handle_free(pyfile)
@@ -95,12 +97,12 @@ class MultiHoster(SimpleHoster):
self.log_info(_("Downloading file..."))
self.download(self.link, disposition=self.DISPOSITION)
- self.check_file()
+ self.check_download()
except Fail, e: #@TODO: Move to PluginThread in 0.4.10
if self.premium:
self.log_warning(_("Premium download failed"))
- self.restart(nopremium=True)
+ self.restart()
elif self.get_config("revertfailed", True) \
and "new_module" in self.pyload.pluginManager.hosterPlugins[self.__name__]:
@@ -116,7 +118,7 @@ class MultiHoster(SimpleHoster):
hdict['new_module'] = tmp_module
hdict['new_name'] = tmp_name
- self.restart(_("Revert to original hoster plugin"))
+ self.restart(_("Revert to original hoster plugin"), premium=True)
else:
raise Fail(encode(e)) #@TODO: Remove `encode` in 0.4.10
diff --git a/module/plugins/internal/OCR.py b/module/plugins/internal/OCR.py
index b24b3058b..884639b6b 100644
--- a/module/plugins/internal/OCR.py
+++ b/module/plugins/internal/OCR.py
@@ -12,7 +12,6 @@ import logging
import os
import subprocess
# import tempfile
-import traceback
from module.plugins.internal.Plugin import Plugin
from module.utils import save_join as fs_join
@@ -95,7 +94,7 @@ class OCR(Plugin):
self.pyload.log_debug("Saving tiff...")
self.image.save(tmpTif.name, 'TIFF')
- if os.name == "nt":
+ if os.name is "nt":
tessparams = [os.path.join(pypath, "tesseract", "tesseract.exe")]
else:
tessparams = ["tesseract"]
@@ -128,6 +127,7 @@ class OCR(Plugin):
try:
with open(tmpTxt.name, 'r') as f:
self.result_captcha = f.read().replace("\n", "")
+
except Exception:
self.result_captcha = ""
@@ -137,10 +137,9 @@ class OCR(Plugin):
os.remove(tmpTxt.name)
if subset and (digits or lowercase or uppercase):
os.remove(tmpSub.name)
+
except OSError, e:
self.log_warning(e)
- if self.pyload.debug:
- traceback.print_exc()
def recognize(self, name):
@@ -194,6 +193,7 @@ class OCR(Plugin):
count += 1
if pixels[x, y - 1] != 255:
count += 1
+
except Exception:
pass
diff --git a/module/plugins/internal/Plugin.py b/module/plugins/internal/Plugin.py
index 7b45c40a8..6d94ca1e4 100644
--- a/module/plugins/internal/Plugin.py
+++ b/module/plugins/internal/Plugin.py
@@ -6,14 +6,17 @@ import datetime
import inspect
import os
import re
+import time
+import traceback
import urllib
+import urlparse
-if os.name != "nt":
+if os.name is not "nt":
import grp
import pwd
from module.plugins.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload as Skip #@TODO: Remove in 0.4.10
-from module.utils import fs_encode, fs_decode, html_unescape, save_join as fs_join
+from module.utils import fs_encode, fs_decode, html_unescape, parseFileSize as parse_size, save_join as fs_join
#@TODO: Move to utils in 0.4.10
@@ -22,7 +25,12 @@ def decode(string, encoding='utf8'):
if type(string) is str:
return string.decode(encoding, "replace")
else:
- return string
+ return unicode(string)
+
+
+#@TODO: Remove in 0.4.10
+def _decode(*args, **kwargs):
+ return decode(*args, **kwargs)
#@TODO: Move to utils in 0.4.10
@@ -31,14 +39,14 @@ def encode(string, encoding='utf8'):
if type(string) is unicode:
return string.encode(encoding, "replace")
else:
- return string
+ return str(string)
#@TODO: Move to utils in 0.4.10
def exists(path):
if os.path.exists(path):
- if os.name == "nt":
- dir, name = os.path.split(path)
+ if os.name is "nt":
+ dir, name = os.path.split(path.rstrip(os.sep))
return name in os.listdir(dir)
else:
return True
@@ -46,9 +54,69 @@ def exists(path):
return False
+def fixurl(url, unquote=None):
+ newurl = urllib.unquote(url)
+
+ if unquote is None:
+ unquote = newurl == url
+
+ newurl = html_unescape(newurl.decode('unicode-escape'))
+ newurl = re.sub(r'[^:]/{2,}', '/', newurl).strip().lstrip('.')
+
+ if not unquote:
+ newurl = urllib.quote(newurl)
+
+ return newurl
+
+
+def parse_name(string):
+ path = fixurl(decode(string), unquote=False)
+ url_p = urlparse.urlparse(path.rstrip('/'))
+ name = (url_p.path.split('/')[-1] or
+ url_p.query.split('=', 1)[::-1][0].split('&', 1)[0] or
+ url_p.netloc.split('.', 1)[0])
+
+ return urllib.unquote(name)
+
+
#@TODO: Move to utils in 0.4.10
-def fixurl(url):
- return html_unescape(urllib.unquote(url.decode('unicode-escape'))).strip().rstrip('/')
+def str2int(string):
+ try:
+ return int(string)
+ except:
+ pass
+
+ ones = ["zero", "one", "two", "three", "four", "five", "six", "seven", "eight",
+ "nine", "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen",
+ "sixteen", "seventeen", "eighteen", "nineteen"]
+ tens = ["", "", "twenty", "thirty", "forty", "fifty", "sixty", "seventy",
+ "eighty", "ninety"]
+
+ o_tuple = [(w, i) for i, w in enumerate(ones)]
+ t_tuple = [(w, i * 10) for i, w in enumerate(tens)]
+
+ numwords = dict(o_tuple + t_tuple)
+ tokens = re.split(r"[\s-]+", string.lower())
+
+ try:
+ return sum(numwords[word] for word in tokens)
+ except:
+ return 0
+
+
+def parse_time(string):
+ if re.search("da(il)?y|today", string):
+ time = seconds_to_midnight()
+
+ else:
+ this = re.compile("this", re.I)
+ regex = re.compile(r'(\d+|\w+)\s*(hr|hour|min|sec|)', re.I)
+
+ time = sum(1 if this.match(v) else str2int(v) *
+ {'hr': 3600, 'hour': 3600, 'min': 60, 'sec': 1, '': 1}[u.lower()]
+ for v, u in regex.findall(string))
+
+ return time
#@TODO: Move to utils in 0.4.10
@@ -56,22 +124,35 @@ def timestamp():
return int(time.time() * 1000)
-def seconds_to_midnight(gmt=0):
- now = datetime.datetime.utcnow() + datetime.timedelta(hours=gmt)
-
- if now.hour == 0 and now.minute < 10:
- midnight = now
+#@TODO: Move to utils in 0.4.10
+def which(program):
+ """
+ Works exactly like the unix command which
+ Courtesy of http://stackoverflow.com/a/377028/675646
+ """
+ isExe = lambda x: os.path.isfile(x) and os.access(x, os.X_OK)
+
+ fpath, fname = os.path.split(program)
+
+ if fpath:
+ if isExe(program):
+ return program
else:
- midnight = now + datetime.timedelta(days=1)
+ for path in os.environ['PATH'].split(os.pathsep):
+ exe_file = os.path.join(path.strip('"'), program)
+ if isExe(exe_file):
+ return exe_file
+
- td = midnight.replace(hour=0, minute=10, second=0, microsecond=0) - now
+def seconds_to_midnight(utc=None):
+ if utc is None:
+ now = datetime.datetime.today()
+ else:
+ now = datetime.datetime.utcnow() + datetime.timedelta(hours=utc)
- if hasattr(td, 'total_seconds'):
- res = td.total_seconds()
- else: #@NOTE: work-around for python 2.5 and 2.6 missing datetime.timedelta.total_seconds
- res = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
+ midnight = now.replace(hour=0, minute=1, second=0, microsecond=0) + datetime.timedelta(days=1)
- return int(res)
+ return (midnight - now).seconds
def replace_patterns(string, ruleslist):
@@ -112,7 +193,10 @@ def parse_html_form(attr_str, html, input_names={}):
else:
inputs[name] = value
- if input_names:
+ if not input_names:
+ #: No attribute check
+ return action, inputs
+ else:
#: Check input attributes
for key, val in input_names.items():
if key in inputs:
@@ -122,14 +206,12 @@ def parse_html_form(attr_str, html, input_names={}):
continue
elif hasattr(val, "search") and re.match(val, inputs[key]):
continue
- break #: Attibute value does not match
+ else:
+ break #: Attibute value does not match
else:
break #: Attibute name does not match
else:
return action, inputs #: Passed attribute check
- else:
- #: No attribute check
- return action, inputs
return {}, None #: No matching form found
@@ -145,8 +227,8 @@ def chunks(iterable, size):
class Plugin(object):
__name__ = "Plugin"
- __type__ = "hoster"
- __version__ = "0.30"
+ __type__ = "plugin"
+ __version__ = "0.44"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
@@ -154,10 +236,7 @@ class Plugin(object):
__description__ = """Base plugin"""
__license__ = "GPLv3"
- __authors__ = [("RaNaN" , "RaNaN@pyload.org" ),
- ("spoob" , "spoob@pyload.org" ),
- ("mkaay" , "mkaay@mkaay.de" ),
- ("Walter Purcaro", "vuolter@gmail.com")]
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
def __init__(self, core):
@@ -165,10 +244,16 @@ class Plugin(object):
self.init()
+ def __repr__(self):
+ return "<%(type)s %(name)s>" % {'type': self.__type__.capitalize(),
+ 'name': self.__name__}
+
+
def _init(self, core):
- self.pyload = core
- self.info = {} #: Provide information in dict here
- self.req = None
+ self.pyload = core
+ self.info = {} #: Provide information in dict here
+ self.req = None #: Browser instance, see `network.Browser`
+ self.last_html = None
def init(self):
@@ -180,33 +265,37 @@ class Plugin(object):
def _log(self, level, plugintype, pluginname, messages):
log = getattr(self.pyload.log, level)
- msg = encode(" | ".join((a if isinstance(a, basestring) else str(a)).strip() for a in messages if a))
- log("%(plugintype)s %(pluginname)s%(id)s: %(msg)s"
+ msg = u" | ".join(decode(a).strip() for a in messages if a)
+ log("%(plugintype)s %(pluginname)s: %(msg)s"
% {'plugintype': plugintype.upper(),
'pluginname': pluginname,
- 'id' : ("[%s]" % self.pyfile.id) if hasattr(self, 'pyfile') else "",
'msg' : msg})
def log_debug(self, *args):
- if self.pyload.debug:
- return self._log("debug", self.__type__, self.__name__, args)
+ if not self.pyload.debug:
+ return
+ self._log("debug", self.__type__, self.__name__, args)
def log_info(self, *args):
- return self._log("info", self.__type__, self.__name__, args)
+ self._log("info", self.__type__, self.__name__, args)
def log_warning(self, *args):
- return self._log("warning", self.__type__, self.__name__, args)
+ self._log("warning", self.__type__, self.__name__, args)
def log_error(self, *args):
- return self._log("error", self.__type__, self.__name__, args)
+ self._log("error", self.__type__, self.__name__, args)
+ if self.pyload.debug:
+ traceback.print_exc()
def log_critical(self, *args):
return self._log("critical", self.__type__, self.__name__, args)
+ if self.pyload.debug:
+ traceback.print_exc()
def set_permissions(self, path):
@@ -225,7 +314,7 @@ class Plugin(object):
self.log_warning(_("Setting path mode failed"), e)
try:
- if os.name != "nt" and self.pyload.config.get("permission", "change_dl"):
+ if os.name is not "nt" and self.pyload.config.get("permission", "change_dl"):
uid = pwd.getpwnam(self.pyload.config.get("permission", "user"))[2]
gid = grp.getgrnam(self.pyload.config.get("permission", "group"))[2]
os.chown(path, uid, gid)
@@ -240,7 +329,7 @@ class Plugin(object):
return min(self.pyload.config.get("download", "chunks"), self.chunk_limit)
- def set_config(self, option, value):
+ def set_config(self, option, value, plugin=None):
"""
Set config value for current plugin
@@ -248,7 +337,7 @@ class Plugin(object):
:param value:
:return:
"""
- self.pyload.config.setPlugin(self.__name__, option, value)
+ self.pyload.config.setPlugin(plugin or self.__name__, option, value)
def get_config(self, option, default="", plugin=None):
@@ -287,21 +376,10 @@ class Plugin(object):
self.pyload.db.delStorage(self.__name__, key)
- def fail(self, reason):
+ def fail(self, msg):
"""
- Fail and give reason
+ Fail and give msg
"""
- raise Fail(encode(reason)) #@TODO: Remove `encode` in 0.4.10
-
-
- def error(self, reason="", type=_("Parse")):
- if not reason:
- type = _("Unknown")
-
- msg = _("%s error") % type.strip().capitalize() if type else _("Error")
- msg += (": %s" % reason.strip()) if reason else ""
- msg += _(" | Plugin may be out of date")
-
raise Fail(encode(msg)) #@TODO: Remove `encode` in 0.4.10
@@ -318,17 +396,11 @@ class Plugin(object):
:param decode: Wether to decode the output according to http header, should be True in most cases
:return: Loaded content
"""
- if hasattr(self, 'pyfile') and self.pyfile.abort:
- self.abort()
-
- url = fixurl(url)
-
- if not url or not isinstance(url, basestring):
- self.fail(_("No url given"))
-
if self.pyload.debug:
self.log_debug("LOAD URL " + url,
- *["%s=%s" % (key, val) for key, val in locals().items() if key not in ("self", "url")])
+ *["%s=%s" % (key, val) for key, val in locals().items() if key not in ("self", "url", "_[1]")])
+
+ url = fixurl(url) #: Recheck in 0.4.10
if req is None:
req = self.req or self.pyload.requestFactory.getRequest(self.__name__)
@@ -337,15 +409,17 @@ class Plugin(object):
if isinstance(cookies, list):
set_cookies(req.cj, cookies)
- res = req.load(url, get, post, ref, bool(cookies), just_header, multipart, decode is True) #@TODO: Fix network multipart in 0.4.10
+ html = req.load(url, get, post, ref, bool(cookies), just_header, multipart, decode is True) #@TODO: Fix network multipart in 0.4.10
#@TODO: Move to network in 0.4.10
if decode:
- res = html_unescape(res)
+ html = html_unescape(html)
#@TODO: Move to network in 0.4.10
if isinstance(decode, basestring):
- res = decode(res, decode)
+ html = _decode(html, decode) #@NOTE: Use `utils.decode()` in 0.4.10
+
+ self.last_html = html
if self.pyload.debug:
frame = inspect.currentframe()
@@ -356,15 +430,18 @@ class Plugin(object):
with open(framefile, "wb") as f:
del frame #: Delete the frame or it wont be cleaned
- f.write(encode(res))
+ f.write(encode(html))
except IOError, e:
self.log_error(e)
- if just_header:
- #: Parse header
+ if not just_header:
+ return html
+
+ else:
+ #@TODO: Move to network in 0.4.10
header = {'code': req.code}
- for line in res.splitlines():
+ for line in html.splitlines():
line = line.strip()
if not line or ":" not in line:
continue
@@ -380,20 +457,20 @@ class Plugin(object):
header[key] = [header[key], value]
else:
header[key] = value
- res = header
- return res
+ return header
def clean(self):
"""
- Clean everything and remove references
+ Remove references
"""
try:
+ self.req.clearCookies()
self.req.close()
+
except Exception:
pass
- for a in ("pyfile", "thread", "html", "req"):
- if hasattr(self, a):
- setattr(self, a, None)
+ else:
+ self.req = None
diff --git a/module/plugins/internal/SevenZip.py b/module/plugins/internal/SevenZip.py
index 5811c28de..f73e935e8 100644
--- a/module/plugins/internal/SevenZip.py
+++ b/module/plugins/internal/SevenZip.py
@@ -5,18 +5,18 @@ import re
import subprocess
from module.plugins.internal.UnRar import ArchiveError, CRCError, PasswordError, UnRar, renice
-from module.utils import fs_encode, save_join as fs_join
+from module.utils import save_join as fs_join
class SevenZip(UnRar):
__name__ = "SevenZip"
- __version__ = "0.14"
+ __version__ = "0.16"
__status__ = "testing"
__description__ = """7-Zip extractor plugin"""
__license__ = "GPLv3"
- __authors__ = [("Michael Nowak" , "" ),
- ("Walter Purcaro", "vuolter@gmail.com")]
+ __authors__ = [("Walter Purcaro", "vuolter@gmail.com"),
+ ("Michael Nowak" , None )]
CMD = "7z"
@@ -38,7 +38,7 @@ class SevenZip(UnRar):
@classmethod
def find(cls):
try:
- if os.name == "nt":
+ if os.name is "nt":
cls.CMD = os.path.join(pypath, "7z.exe")
p = subprocess.Popen([cls.CMD], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -55,42 +55,28 @@ class SevenZip(UnRar):
return True
- def verify(self, password):
+ def verify(self, password=None):
#: 7z can't distinguish crc and pw error in test
- p = self.call_cmd("l", "-slt", fs_encode(self.filename))
+ p = self.call_cmd("l", "-slt", self.target)
out, err = p.communicate()
if self.re_wrongpwd.search(out):
raise PasswordError
- if self.re_wrongpwd.search(err):
+ elif self.re_wrongpwd.search(err):
raise PasswordError
- if self.re_wrongcrc.search(err):
- raise CRCError(err)
-
-
-
- def check(self, password):
- p = self.call_cmd("l", "-slt", fs_encode(self.filename))
- out, err = p.communicate()
-
- #: Check if output or error macthes the 'wrong password'-Regexp
- if self.re_wrongpwd.search(out):
- raise PasswordError
-
- if self.re_wrongcrc.search(out):
+ elif self.re_wrongcrc.search(out):
raise CRCError(_("Header protected"))
-
- def repair(self):
- return False
+ elif self.re_wrongcrc.search(err):
+ raise CRCError(err)
def extract(self, password=None):
command = "x" if self.fullpath else "e"
- p = self.call_cmd(command, '-o' + self.out, fs_encode(self.filename), password=password)
+ p = self.call_cmd(command, '-o' + self.out, self.target, password=password)
renice(p.pid, self.renice)
@@ -117,7 +103,7 @@ class SevenZip(UnRar):
def list(self, password=None):
command = "l" if self.fullpath else "l"
- p = self.call_cmd(command, fs_encode(self.filename), password=password)
+ p = self.call_cmd(command, self.target, password=password)
out, err = p.communicate()
if "Can not open" in err:
@@ -142,7 +128,7 @@ class SevenZip(UnRar):
args.append("-y")
#: Set a password
- if "password" in kwargs and kwargs['password']:
+ if kwargs.get('password'):
args.append("-p%s" % kwargs['password'])
else:
args.append("-p-")
diff --git a/module/plugins/internal/SimpleCrypter.py b/module/plugins/internal/SimpleCrypter.py
index 6a3f91a5b..20e91ac1e 100644
--- a/module/plugins/internal/SimpleCrypter.py
+++ b/module/plugins/internal/SimpleCrypter.py
@@ -3,19 +3,20 @@
import re
from module.plugins.internal.Crypter import Crypter
-from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, replace_patterns, set_cookie, set_cookies
-from module.utils import fixup, html_unescape
+from module.plugins.internal.Plugin import replace_patterns, set_cookie, set_cookies
+from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class SimpleCrypter(Crypter, SimpleHoster):
__name__ = "SimpleCrypter"
__type__ = "crypter"
- __version__ = "0.60"
+ __version__ = "0.64"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
- __config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True), #: Overrides pyload.config['general']['folder_per_package']
- ("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
+ __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
+ ("use_subfolder" , "bool", "Save package to subfolder" , True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
__description__ = """Simple decrypter plugin"""
__license__ = "GPLv3"
@@ -47,13 +48,35 @@ class SimpleCrypter(Crypter, SimpleHoster):
and its loadPage method:
-
def load_page(self, page_n):
return the html of the page number page_n
"""
- DIRECT_LINK = True
- LEECH_HOSTER = False
+ NAME_REPLACEMENTS = []
+ URL_REPLACEMENTS = []
+
+ COOKIES = True #: or False or list of tuples [(domain, name, value)]
+ DIRECT_LINK = True #: Set to True to looking for direct link (as defined in handle_direct method), set to None to do it if self.account is True else False
+ LOGIN_ACCOUNT = False #: Set to True to require account login
+ LOGIN_PREMIUM = False #: Set to True to require premium account login
+ # LEECH_HOSTER = False #: Set to True to leech other hoster link (as defined in handle_multi method)
+ TEXT_ENCODING = True #: Set to encoding name if encoding value in http header is not correct
+ PAGES_PATTERN = None
+
+ LINK_PATTERN = None
+
+ NAME_PATTERN = None
+ HASHSUM_PATTERN = None
+ OFFLINE_PATTERN = None
+ TEMP_OFFLINE_PATTERN = None
+
+ WAIT_PATTERN = None
+ PREMIUM_ONLY_PATTERN = None
+ HAPPY_HOUR_PATTERN = None
+ IP_BLOCKED_PATTERN = None
+ DL_LIMIT_PATTERN = None
+ SIZE_LIMIT_PATTERN = None
+ ERROR_PATTERN = None
#@TODO: Remove in 0.4.10
@@ -82,7 +105,7 @@ class SimpleCrypter(Crypter, SimpleHoster):
self.log_debug("Redirect #%d to: %s" % (i, redirect))
header = self.load(redirect, just_header=True)
- if 'location' in header and header['location']:
+ if header.get('location'):
self.link = header['location']
else:
break
@@ -90,6 +113,11 @@ class SimpleCrypter(Crypter, SimpleHoster):
self.log_error(_("Too many redirects"))
+ def prepare(self):
+ self.links = []
+ return super(SimpleCrypter, self).prepare()
+
+
def decrypt(self, pyfile):
self.prepare()
self.check_info() #@TODO: Remove in 0.4.10
@@ -108,7 +136,7 @@ class SimpleCrypter(Crypter, SimpleHoster):
self.links = self.get_links() or list()
- if hasattr(self, 'PAGES_PATTERN') and hasattr(self, 'loadPage'):
+ if self.PAGES_PATTERN:
self.handle_pages(pyfile)
self.log_debug("Package has %d links" % len(self.links))
@@ -129,9 +157,14 @@ class SimpleCrypter(Crypter, SimpleHoster):
return re.findall(self.LINK_PATTERN, self.html)
+ def load_page(self, number)
+ raise NotImplementedError
+
+
def handle_pages(self, pyfile):
try:
pages = int(re.search(self.PAGES_PATTERN, self.html).group(1))
+
except Exception:
pages = 1
diff --git a/module/plugins/internal/SimpleHoster.py b/module/plugins/internal/SimpleHoster.py
index 9c310ca27..e5526a7bd 100644
--- a/module/plugins/internal/SimpleHoster.py
+++ b/module/plugins/internal/SimpleHoster.py
@@ -2,33 +2,27 @@
from __future__ import with_statement
-import mimetypes
import os
import re
import time
-import urlparse
-from module.PyFile import statusMap as _statusMap
from module.network.HTTPRequest import BadHeader
from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.Hoster import Hoster, create_getInfo, parse_fileInfo
-from module.plugins.internal.Plugin import Fail, encode, fixurl, replace_patterns, seconds_to_midnight, set_cookie, set_cookies
+from module.plugins.internal.Plugin import Fail, encode, parse_name, parse_time, replace_patterns, seconds_to_midnight, set_cookie, set_cookies
from module.utils import fixup, fs_encode, parseFileSize as parse_size
-#@TODO: Adapt and move to PyFile in 0.4.10
-statusMap = dict((v, k) for k, v in _statusMap.items())
-
-
class SimpleHoster(Hoster):
__name__ = "SimpleHoster"
__type__ = "hoster"
- __version__ = "1.80"
+ __version__ = "1.93"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
- __config__ = [("use_premium", "bool", "Use premium account if available" , True),
- ("fallback" , "bool", "Fallback to free download if premium fails", True)]
+ __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
+ ("fallback_premium", "bool", "Fallback to free download if premium fails", True),
+ ("chk_filesize" , "bool", "Check file size" , True)]
__description__ = """Simple hoster plugin"""
__license__ = "GPLv3"
@@ -91,25 +85,43 @@ class SimpleHoster(Hoster):
LINK_PREMIUM_PATTERN: (optional) group(1) should be the direct link for premium download
example: LINK_PREMIUM_PATTERN = r'<div class="link"><a href="(.+?)"'
"""
- NAME_REPLACEMENTS = [("&#?\w+;", fixup)]
- SIZE_REPLACEMENTS = []
- URL_REPLACEMENTS = []
-
- FILE_ERRORS = [('Html error' , r'\A(?:\s*<.+>)?((?:[\w\s]*(?:[Ee]rror|ERROR)\s*\:?)?\s*\d{3})(?:\Z|\s+)'),
- ('Request error', r'([Aa]n error occured while processing your request)' ),
- ('Html file' , r'\A\s*<!DOCTYPE html' )]
- CHECK_FILE = True #: Set to False to not check the last downloaded file with declared error patterns
- CHECK_TRAFFIC = False #: Set to True to reload checking traffic left for premium account
- COOKIES = True #: or False or list of tuples [(domain, name, value)]
- DIRECT_LINK = None #: Set to True to looking for direct link (as defined in handle_direct method), set to None to do it if self.account is True else False
- DISPOSITION = True #: Set to True to use any content-disposition value in http header as file name
- LOGIN_ACCOUNT = False #: Set to True to require account login
- LOGIN_PREMIUM = False #: Set to True to require premium account login
- LEECH_HOSTER = False #: Set to True to leech other hoster link (as defined in handle_multi method)
- TEXT_ENCODING = True #: Set to encoding name if encoding value in http header is not correct
-
- LINK_PATTERN = None
+ NAME_REPLACEMENTS = []
+ SIZE_REPLACEMENTS = []
+ URL_REPLACEMENTS = []
+
+ CHECK_FILE = True #: Set to False to not check the last downloaded file with declared error patterns
+ CHECK_TRAFFIC = False #: Set to True to reload checking traffic left for premium account
+ COOKIES = True #: or False or list of tuples [(domain, name, value)]
+ DIRECT_LINK = None #: Set to True to looking for direct link (as defined in handle_direct method), set to None to do it if self.account is True else False
+ DISPOSITION = True #: Set to True to use any content-disposition value in http header as file name
+ LOGIN_ACCOUNT = False #: Set to True to require account login
+ LOGIN_PREMIUM = False #: Set to True to require premium account login
+ LEECH_HOSTER = False #: Set to True to leech other hoster link (as defined in handle_multi method)
+ TEXT_ENCODING = True #: Set to encoding name if encoding value in http header is not correct
+
+ LINK_PATTERN = None
+ LINK_FREE_PATTERN = None
+ LINK_PREMIUM_PATTERN = None
+
+ INFO_PATTERN = None
+ NAME_PATTERN = None
+ SIZE_PATTERN = None
+ HASHSUM_PATTERN = None
+ OFFLINE_PATTERN = None
+ TEMP_OFFLINE_PATTERN = None
+
+ WAIT_PATTERN = None
+ PREMIUM_ONLY_PATTERN = None
+ HAPPY_HOUR_PATTERN = None
+ IP_BLOCKED_PATTERN = None
+ DL_LIMIT_PATTERN = None
+ SIZE_LIMIT_PATTERN = None
+ ERROR_PATTERN = None
+
+ FILE_ERRORS = [('Html error' , r'\A(?:\s*<.+>)?((?:[\w\s]*(?:[Ee]rror|ERROR)\s*\:?)?\s*\d{3})(?:\Z|\s+)'),
+ ('Request error', r'([Aa]n error occured while processing your request)' ),
+ ('Html file' , r'\A\s*<!DOCTYPE html' )]
@classmethod
@@ -150,10 +162,10 @@ class SimpleHoster(Hoster):
pass
if html:
- if hasattr(cls, "OFFLINE_PATTERN") and re.search(cls.OFFLINE_PATTERN, html):
+ if cls.OFFLINE_PATTERN and re.search(cls.OFFLINE_PATTERN, html):
info['status'] = 1
- elif hasattr(cls, "TEMP_OFFLINE_PATTERN") and re.search(cls.TEMP_OFFLINE_PATTERN, html):
+ elif cls.TEMP_OFFLINE_PATTERN and re.search(cls.TEMP_OFFLINE_PATTERN, html):
info['status'] = 6
else:
@@ -165,7 +177,7 @@ class SimpleHoster(Hoster):
if all(True for k in pdict if k not in info['pattern']):
info['pattern'].update(pdict)
- except AttributeError:
+ except Exception:
continue
else:
@@ -175,8 +187,8 @@ class SimpleHoster(Hoster):
info['status'] = 2
if 'N' in info['pattern']:
- info['name'] = replace_patterns(fixurl(info['pattern']['N']),
- cls.NAME_REPLACEMENTS)
+ name = replace_patterns(info['pattern']['N'], cls.NAME_REPLACEMENTS)
+ info['name'] = parse_name(name)
if 'S' in info['pattern']:
size = replace_patterns(info['pattern']['S'] + info['pattern']['U'] if 'U' in info['pattern'] else info['pattern']['S'],
@@ -202,19 +214,12 @@ class SimpleHoster(Hoster):
def prepare(self):
- self.pyfile.error = "" #@TODO: Remove in 0.4.10
- self.html = "" #@TODO: Recheck in 0.4.10
- self.link = "" #@TODO: Recheck in 0.4.10
- self.last_download = ""
- self.direct_dl = False
- self.leech_dl = False
-
- if not self.get_config('use_premium', True):
- self.restart(nopremium=True)
+ self.link = ""
+ self.direct_dl = False
+ self.leech_dl = False
if self.LOGIN_PREMIUM and not self.premium:
self.fail(_("Required premium account not found"))
- self.LOGIN_ACCOUNT = True
if self.LOGIN_ACCOUNT and not self.account:
self.fail(_("Required account not found"))
@@ -222,10 +227,10 @@ class SimpleHoster(Hoster):
self.req.setOption("timeout", 120)
if self.LINK_PATTERN:
- if not hasattr(self, 'LINK_FREE_PATTERN'):
+ if self.LINK_FREE_PATTERN is None:
self.LINK_FREE_PATTERN = self.LINK_PATTERN
- if not hasattr(self, 'LINK_PREMIUM_PATTERN'):
+ if self.LINK_PREMIUM_PATTERN is None:
self.LINK_PREMIUM_PATTERN = self.LINK_PATTERN
if (self.LEECH_HOSTER
@@ -253,95 +258,72 @@ class SimpleHoster(Hoster):
def process(self, pyfile):
- try:
- self.prepare()
- self.check_info() #@TODO: Remove in 0.4.10
-
- if self.leech_dl:
- self.log_info(_("Processing as debrid download..."))
- self.handle_multi(pyfile)
-
- if not self.link and not was_downloaded():
- self.log_info(_("Failed to leech url"))
+ self.prepare()
+ self.check_info() #@TODO: Remove in 0.4.10
- else:
- if not self.link and self.direct_dl and not self.last_download:
- self.log_info(_("Looking for direct download link..."))
- self.handle_direct(pyfile)
+ if self.leech_dl:
+ self.log_info(_("Processing as debrid download..."))
+ self.handle_multi(pyfile)
- if self.link or self.last_download:
- self.log_info(_("Direct download link detected"))
- else:
- self.log_info(_("Direct download link not found"))
+ if not self.link and not was_downloaded():
+ self.log_info(_("Failed to leech url"))
- if not self.link and not self.last_download:
- self.preload()
+ else:
+ if not self.link and self.direct_dl and not self.last_download:
+ self.log_info(_("Looking for direct download link..."))
+ self.handle_direct(pyfile)
- if 'status' not in self.info or self.info['status'] is 3: #@TODO: Recheck in 0.4.10
- self.check_info()
+ if self.link or self.last_download:
+ self.log_info(_("Direct download link detected"))
+ else:
+ self.log_info(_("Direct download link not found"))
- if self.premium and (not self.CHECK_TRAFFIC or self.check_traffic_left()):
- self.log_info(_("Processing as premium download..."))
- self.handle_premium(pyfile)
+ if not self.link and not self.last_download:
+ self.preload()
- elif not self.LOGIN_ACCOUNT or (not self.CHECK_TRAFFIC or self.check_traffic_left()):
- self.log_info(_("Processing as free download..."))
- self.handle_free(pyfile)
+ if 'status' not in self.info or self.info['status'] is 3: #@TODO: Recheck in 0.4.10
+ self.check_info()
- if not self.last_download:
- self.log_info(_("Downloading file..."))
- self.download(self.link, disposition=self.DISPOSITION)
+ if self.premium and (not self.CHECK_TRAFFIC or self.check_traffic()):
+ self.log_info(_("Processing as premium download..."))
+ self.handle_premium(pyfile)
- self.check_file()
+ elif not self.LOGIN_ACCOUNT or (not self.CHECK_TRAFFIC or self.check_traffic()):
+ self.log_info(_("Processing as free download..."))
+ self.handle_free(pyfile)
- except Fail, e: #@TODO: Move to PluginThread in 0.4.10
- if self.get_config('fallback', True) and self.premium:
- self.log_warning(_("Premium download failed"), e)
- self.restart(nopremium=True)
+ if not self.last_download:
+ self.log_info(_("Downloading file..."))
+ self.download(self.link, disposition=self.DISPOSITION)
- else:
- raise Fail(encode(e)) #@TODO: Remove `encode` in 0.4.10
+ self.check_download()
- def check_file(self):
- self.log_info(_("Checking file..."))
+ def check_download(self):
+ self.log_info(_("Checking downloaded file..."))
+ self.log_debug("Using default check rules...")
+ for r, p in self.FILE_ERRORS:
+ errmsg = self.check_file({r: re.compile(p)})
+ if errmsg is not None:
+ errmsg = errmsg.strip().capitalize()
- if self.captcha.task and not self.last_download:
- self.captcha.invalid()
- self.retry(10, reason=_("Wrong captcha"))
+ try:
+ errmsg += " | " + self.last_check.group(1).strip()
- # 10485760 is 10MB, tolerance is used when comparing displayed size on the hoster website to real size
- # For example displayed size can be 1.46GB for example, but real size can be 1.4649853GB
- elif self.check_download({'Empty file': re.compile(r'\A((.|)(\2|\s)*)\Z')},
- file_size=self.info['size'] if 'size' in self.info else 0,
- size_tolerance=10485760,
- delete=True):
- self.error(_("Empty file"))
+ except Exception:
+ pass
+ self.log_warning(_("Check result: ") + errmsg, _("Waiting 1 minute and retry"))
+ self.wait(60, reconnect=True)
+ self.restart(errmsg, premium=True)
else:
- self.log_debug("Using default check rules...")
- for r, p in self.FILE_ERRORS:
- errmsg = self.check_download({r: re.compile(p)})
- if errmsg is not None:
- errmsg = errmsg.strip().capitalize()
-
- try:
- errmsg += " | " + self.last_check.group(1).strip()
- except Exception:
- pass
-
- self.log_warning(_("Check result: ") + errmsg, _("Waiting 1 minute and retry"))
- self.wantReconnect = True
- self.retry(wait_time=60, reason=errmsg)
- else:
- if self.CHECK_FILE:
- self.log_debug("Using custom check rules...")
- with open(fs_encode(self.last_download), "rb") as f:
- self.html = f.read(1048576) #@TODO: Recheck in 0.4.10
- self.check_errors()
+ if self.CHECK_FILE:
+ self.log_debug("Using custom check rules...")
+ with open(fs_encode(self.last_download), "rb") as f:
+ self.html = f.read(1048576) #@TODO: Recheck in 0.4.10
+ self.check_errors()
self.log_info(_("No errors found"))
- self.pyfile.error = ""
def check_errors(self):
@@ -349,65 +331,56 @@ class SimpleHoster(Hoster):
self.log_warning(_("No html code to check"))
return
- if hasattr(self, 'IP_BLOCKED_PATTERN') and re.search(self.IP_BLOCKED_PATTERN, self.html):
+ if self.IP_BLOCKED_PATTERN and re.search(self.IP_BLOCKED_PATTERN, self.html):
self.fail(_("Connection from your current IP address is not allowed"))
elif not self.premium:
- if hasattr(self, 'PREMIUM_ONLY_PATTERN') and re.search(self.PREMIUM_ONLY_PATTERN, self.html):
+ if self.PREMIUM_ONLY_PATTERN and re.search(self.PREMIUM_ONLY_PATTERN, self.html):
self.fail(_("File can be downloaded by premium users only"))
- elif hasattr(self, 'SIZE_LIMIT_PATTERN') and re.search(self.SIZE_LIMIT_PATTERN, self.html):
+ elif self.SIZE_LIMIT_PATTERN and re.search(self.SIZE_LIMIT_PATTERN, self.html):
self.fail(_("File too large for free download"))
- elif hasattr(self, 'DL_LIMIT_PATTERN') and re.search(self.DL_LIMIT_PATTERN, self.html):
+ elif self.DL_LIMIT_PATTERN and re.search(self.DL_LIMIT_PATTERN, self.html):
m = re.search(self.DL_LIMIT_PATTERN, self.html)
try:
errmsg = m.group(1).strip()
- except Exception:
+
+ except (AttributeError, IndexError):
errmsg = m.group(0).strip()
self.info['error'] = re.sub(r'<.*?>', " ", errmsg)
self.log_warning(self.info['error'])
- if re.search('da(il)?y|today', errmsg, re.I):
- wait_time = seconds_to_midnight(gmt=2)
- else:
- wait_time = sum(int(v) * {'hr': 3600, 'hour': 3600, 'min': 60, 'sec': 1, "": 1}[u.lower()] for v, u in
- re.findall(r'(\d+)\s*(hr|hour|min|sec|)', errmsg, re.I))
+ wait_time = parse_time(errmsg)
+ self.wait(wait_time, reconnect=wait_time > 300)
+ self.restart(_("Download limit exceeded"), premium=True)
- self.wantReconnect = wait_time > 300
- self.retry(1, wait_time, _("Download limit exceeded"))
-
- if hasattr(self, 'HAPPY_HOUR_PATTERN') and re.search(self.HAPPY_HOUR_PATTERN, self.html):
+ if self.HAPPY_HOUR_PATTERN and re.search(self.HAPPY_HOUR_PATTERN, self.html):
self.multiDL = True
- if hasattr(self, 'ERROR_PATTERN'):
+ if self.ERROR_PATTERN:
m = re.search(self.ERROR_PATTERN, self.html)
- if m:
+ if m is not None:
try:
errmsg = m.group(1).strip()
- except Exception:
+
+ except (AttributeError, IndexError):
errmsg = m.group(0).strip()
self.info['error'] = re.sub(r'<.*?>', " ", errmsg)
self.log_warning(self.info['error'])
if re.search('limit|wait|slot', errmsg, re.I):
- if re.search("da(il)?y|today", errmsg):
- wait_time = seconds_to_midnight(gmt=2)
- else:
- wait_time = sum(int(v) * {'hr': 3600, 'hour': 3600, 'min': 60, 'sec': 1, "": 1}[u.lower()] for v, u in
- re.findall(r'(\d+)\s*(hr|hour|min|sec|)', errmsg, re.I))
-
- self.wantReconnect = wait_time > 300
- self.retry(1, wait_time, _("Download limit exceeded"))
+ wait_time = parse_time(errmsg)
+ self.wait(wait_time, reconnect=wait_time > 300)
+ self.restart(_("Download limit exceeded"), premium=True)
elif re.search('country|ip|region|nation', errmsg, re.I):
self.fail(_("Connection from your current IP address is not allowed"))
elif re.search('captcha|code', errmsg, re.I):
- self.captcha.invalid()
- self.retry(10, reason=_("Wrong captcha"))
+ self.retry_captcha()
elif re.search('countdown|expired', errmsg, re.I):
self.retry(10, 60, _("Link expired"))
@@ -422,28 +395,26 @@ class SimpleHoster(Hoster):
self.offline()
elif re.search('filename', errmsg, re.I):
- url_p = urlparse.urlparse(self.pyfile.url)
- self.pyfile.url = "%s://%s/%s" % (url_p.scheme, url_p.netloc, url_p.path.split('/')[0])
- self.retry(1, reason=_("Wrong url"))
+ self.fail(_("Invalid url"))
elif re.search('premium', errmsg, re.I):
self.fail(_("File can be downloaded by premium users only"))
else:
- self.wantReconnect = True
- self.retry(wait_time=60, reason=errmsg)
+ self.wait(60, reconnect=True)
+ self.restart(errmsg, premium=True)
- elif hasattr(self, 'WAIT_PATTERN'):
+ elif self.WAIT_PATTERN:
m = re.search(self.WAIT_PATTERN, self.html)
- if m:
+ if m is not None:
try:
waitmsg = m.group(1).strip()
- except Exception:
+
+ except (AttributeError, IndexError):
waitmsg = m.group(0).strip()
- wait_time = sum(int(v) * {'hr': 3600, 'hour': 3600, 'min': 60, 'sec': 1, "": 1}[u.lower()] for v, u in
- re.findall(r'(\d+)\s*(hr|hour|min|sec|)', waitmsg, re.I))
- self.wait(wait_time, wait_time > 300)
+ wait_time = parse_time(waitmsg)
+ self.wait(wait_time, econnect=wait_time > 300)
self.info.pop('error', None)
@@ -457,22 +428,19 @@ class SimpleHoster(Hoster):
self.log_debug("Previous file info: %s" % old_info)
try:
- status = self.info['status'] or None
+ status = self.info['status'] or 14
- if status == 1:
+ if status is 1:
self.offline()
- elif status == 6:
+ elif status is 6:
self.temp_offline()
- elif status == 8:
- if 'error' in self.info:
- self.fail(self.info['error'])
- else:
- self.fail(_("File status: " + statusMap[status]))
+ elif status is 8:
+ self.fail()
finally:
- self.log_info(_("File status: ") + (statusMap[status] if status else _("Unknown")))
+ self.log_info(_("File status: ") + self.pyfile.getStatusName())
def check_name_size(self, getinfo=True):
@@ -484,8 +452,8 @@ class SimpleHoster(Hoster):
self.log_debug("Previous file info: %s" % old_info)
try:
- url = self.info['url'].strip()
- name = self.info['name'].strip()
+ url = self.info['url']
+ name = self.info['name']
except KeyError:
pass
@@ -494,7 +462,7 @@ class SimpleHoster(Hoster):
if name and name is not url:
self.pyfile.name = name
- if 'size' in self.info and self.info['size'] > 0:
+ if self.info.get('size') > 0:
self.pyfile.size = int(self.info['size']) #@TODO: Fix int conversion in 0.4.10
# self.pyfile.sync()
@@ -535,7 +503,7 @@ class SimpleHoster(Hoster):
def handle_free(self, pyfile):
- if not hasattr(self, 'LINK_FREE_PATTERN'):
+ if not self.LINK_FREE_PATTERN:
self.log_error(_("Free download not implemented"))
m = re.search(self.LINK_FREE_PATTERN, self.html)
@@ -546,10 +514,9 @@ class SimpleHoster(Hoster):
def handle_premium(self, pyfile):
- if not hasattr(self, 'LINK_PREMIUM_PATTERN'):
+ if not self.LINK_PREMIUM_PATTERN:
self.log_error(_("Premium download not implemented"))
- self.log_info(_("Processing as free download..."))
- self.handle_free(pyfile)
+ self.restart()
m = re.search(self.LINK_PREMIUM_PATTERN, self.html)
if m is None:
diff --git a/module/plugins/internal/UnRar.py b/module/plugins/internal/UnRar.py
index 0386991d9..908689109 100644
--- a/module/plugins/internal/UnRar.py
+++ b/module/plugins/internal/UnRar.py
@@ -8,11 +8,11 @@ from glob import glob
from string import digits
from module.plugins.internal.Extractor import Extractor, ArchiveError, CRCError, PasswordError
-from module.utils import fs_decode, fs_encode, save_join as fs_join
+from module.utils import fs_decode, save_join as fs_join
def renice(pid, value):
- if value and os.name != "nt":
+ if value and os.name is not "nt":
try:
subprocess.Popen(["renice", str(value), str(pid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=-1)
@@ -22,7 +22,7 @@ def renice(pid, value):
class UnRar(Extractor):
__name__ = "UnRar"
- __version__ = "1.25"
+ __version__ = "1.27"
__status__ = "testing"
__description__ = """Rar extractor plugin"""
@@ -49,7 +49,7 @@ class UnRar(Extractor):
@classmethod
def find(cls):
try:
- if os.name == "nt":
+ if os.name is "nt":
cls.CMD = os.path.join(pypath, "RAR.exe")
else:
cls.CMD = "rar"
@@ -61,7 +61,7 @@ class UnRar(Extractor):
except OSError:
try:
- if os.name == "nt":
+ if os.name is "nt":
cls.CMD = os.path.join(pypath, "UnRAR.exe")
else:
cls.CMD = "unrar"
@@ -84,20 +84,8 @@ class UnRar(Extractor):
return True if cls.re_multipart.search(filename) else False
- def verify(self, password):
- p = self.call_cmd("t", "-v", fs_encode(self.filename), password=password)
- self._progress(p)
- err = p.stderr.read().strip()
-
- if self.re_wrongpwd.search(err):
- raise PasswordError
-
- if self.re_wrongcrc.search(err):
- raise CRCError(err)
-
-
- def check(self, password):
- p = self.call_cmd("l", "-v", fs_encode(self.filename), password=password)
+ def verify(self, password=None):
+ p = self.call_cmd("l", "-v", self.target, password=password)
out, err = p.communicate()
if self.re_wrongpwd.search(err):
@@ -113,13 +101,28 @@ class UnRar(Extractor):
def repair(self):
- p = self.call_cmd("rc", fs_encode(self.filename))
+ p = self.call_cmd("rc", self.target)
#: Communicate and retrieve stderr
self._progress(p)
err = p.stderr.read().strip()
+
if err or p.returncode:
- return False
+ p = self.call_cmd("r", self.target)
+
+ # communicate and retrieve stderr
+ self._progress(p)
+ err = p.stderr.read().strip()
+
+ if err or p.returncode:
+ return False
+
+ else:
+ dir = os.path.dirname(filename)
+ name = re_filefixed.search(out).group(1)
+
+ self.filename = os.path.join(dir, name)
+
return True
@@ -145,7 +148,7 @@ class UnRar(Extractor):
def extract(self, password=None):
command = "x" if self.fullpath else "e"
- p = self.call_cmd(command, fs_encode(self.filename), self.out, password=password)
+ p = self.call_cmd(command, self.target, self.out, password=password)
renice(p.pid, self.renice)
@@ -169,7 +172,7 @@ class UnRar(Extractor):
self.files = self.list(password)
- def get_delete_files(self):
+ def items(self):
dir, name = os.path.split(self.filename)
#: Actually extracted file
@@ -185,7 +188,7 @@ class UnRar(Extractor):
def list(self, password=None):
command = "vb" if self.fullpath else "lb"
- p = self.call_cmd(command, "-v", fs_encode(self.filename), password=password)
+ p = self.call_cmd(command, "-v", self.target, password=password)
out, err = p.communicate()
if "Cannot open" in err:
@@ -226,7 +229,7 @@ class UnRar(Extractor):
args.append("-y")
#: Set a password
- if "password" in kwargs and kwargs['password']:
+ if kwargs.get('password'):
args.append("-p%s" % kwargs['password'])
else:
args.append("-p-")
diff --git a/module/plugins/internal/UnZip.py b/module/plugins/internal/UnZip.py
index 9a01611bf..87cbd568a 100644
--- a/module/plugins/internal/UnZip.py
+++ b/module/plugins/internal/UnZip.py
@@ -7,12 +7,11 @@ import sys
import zipfile
from module.plugins.internal.Extractor import Extractor, ArchiveError, CRCError, PasswordError
-from module.utils import fs_encode
class UnZip(Extractor):
__name__ = "UnZip"
- __version__ = "1.15"
+ __version__ = "1.16"
__status__ = "testing"
__description__ = """Zip extractor plugin"""
@@ -30,17 +29,13 @@ class UnZip(Extractor):
def list(self, password=None):
- with zipfile.ZipFile(fs_encode(self.filename), 'r', allowZip64=True) as z:
+ with zipfile.ZipFile(self.target, 'r', allowZip64=True) as z:
z.setpassword(password)
return z.namelist()
- def check(self, password):
- pass
-
-
- def verify(self):
- with zipfile.ZipFile(fs_encode(self.filename), 'r', allowZip64=True) as z:
+ def verify(self, password=None):
+ with zipfile.ZipFile(self.target, 'r', allowZip64=True) as z:
badfile = z.testzip()
if badfile:
@@ -51,7 +46,7 @@ class UnZip(Extractor):
def extract(self, password=None):
try:
- with zipfile.ZipFile(fs_encode(self.filename), 'r', allowZip64=True) as z:
+ with zipfile.ZipFile(self.target, 'r', allowZip64=True) as z:
z.setpassword(password)
badfile = z.testzip()
diff --git a/module/plugins/internal/XFSAccount.py b/module/plugins/internal/XFSAccount.py
index e0f6b1ee8..7b9410222 100644
--- a/module/plugins/internal/XFSAccount.py
+++ b/module/plugins/internal/XFSAccount.py
@@ -4,14 +4,16 @@ import re
import time
import urlparse
+from module.common.json_layer import json_loads
from module.plugins.internal.Account import Account
+# from module.plugins.internal.MultiAccount import MultiAccount
from module.plugins.internal.Plugin import parse_html_form, set_cookie
class XFSAccount(Account):
__name__ = "XFSAccount"
__type__ = "account"
- __version__ = "0.42"
+ __version__ = "0.49"
__status__ = "testing"
__description__ = """XFileSharing account plugin"""
@@ -20,8 +22,8 @@ class XFSAccount(Account):
("Walter Purcaro", "vuolter@gmail.com" )]
- HOSTER_DOMAIN = None
- HOSTER_URL = None
+ PLUGIN_DOMAIN = None
+ PLUGIN_URL = None
LOGIN_URL = None
COOKIES = True
@@ -37,28 +39,39 @@ class XFSAccount(Account):
LEECH_TRAFFIC_UNIT = "MB" #: Used only if no group <U> was found
LOGIN_FAIL_PATTERN = r'Incorrect Login or Password|account was banned|Error<'
+ LOGIN_SKIP_PATTERN = r'op=logout'
- def parse_info(self, user, password, data, req):
+ def set_xfs_cookie(self):
+ if not self.COOKIES:
+ return
+
+ if isinstance(self.COOKIES, list) and (self.PLUGIN_DOMAIN, "lang", "english") not in self.COOKIES:
+ self.COOKIES.insert((self.PLUGIN_DOMAIN, "lang", "english"))
+ else:
+ set_cookie(self.req.cj, self.PLUGIN_DOMAIN, "lang", "english")
+
+
+ def grab_info(self, user, password, data):
validuntil = None
trafficleft = None
leechtraffic = None
premium = None
- if not self.HOSTER_URL: #@TODO: Remove in 0.4.10
+ if not self.PLUGIN_URL: #@TODO: Remove in 0.4.10
return {'validuntil' : validuntil,
'trafficleft' : trafficleft,
'leechtraffic': leechtraffic,
'premium' : premium}
- html = self.load(self.HOSTER_URL,
+ html = self.load(self.PLUGIN_URL,
get={'op': "my_account"},
cookies=self.COOKIES)
premium = True if re.search(self.PREMIUM_PATTERN, html) else False
m = re.search(self.VALID_UNTIL_PATTERN, html)
- if m:
+ if m is not None:
expiredate = m.group(1).strip()
self.log_debug("Expire date: " + expiredate)
@@ -81,7 +94,7 @@ class XFSAccount(Account):
self.log_debug("VALID_UNTIL_PATTERN not found")
m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
- if m:
+ if m is not None:
try:
traffic = m.groupdict()
size = traffic['S']
@@ -138,29 +151,30 @@ class XFSAccount(Account):
'premium' : premium}
- def login(self, user, password, data, req):
- if self.HOSTER_DOMAIN:
- if not self.HOSTER_URL:
- self.HOSTER_URL = "http://www.%s/" % self.HOSTER_DOMAIN
+ def signin(self, user, password, data):
+ if self.PLUGIN_DOMAIN:
+ if not self.PLUGIN_URL:
+ self.PLUGIN_URL = "http://www.%s/" % self.PLUGIN_DOMAIN
- if self.COOKIES:
- if isinstance(self.COOKIES, list) and not self.COOKIES.count((self.HOSTER_DOMAIN, "lang", "english")):
- self.COOKIES.insert((self.HOSTER_DOMAIN, "lang", "english"))
- else:
- set_cookie(self.req.cj, self.HOSTER_DOMAIN, "lang", "english")
+ self.set_xfs_cookie()
- if not self.HOSTER_URL:
- self.login_fail(_("Missing HOSTER_URL"))
+ if not self.PLUGIN_URL:
+ self.fail_login(_("Missing PLUGIN_URL"))
+ else:
+ self.PLUGIN_URL = self.PLUGIN_URL.rstrip('/') + "/"
if not self.LOGIN_URL:
- self.LOGIN_URL = urlparse.urljoin(self.HOSTER_URL, "login.html")
+ self.LOGIN_URL = urlparse.urljoin(self.PLUGIN_URL, "login.html")
html = self.load(self.LOGIN_URL, cookies=self.COOKIES)
+ if re.search(self.LOGIN_SKIP_PATTERN, html):
+ self.skip_login()
+
action, inputs = parse_html_form('name="FL"', html)
if not inputs:
inputs = {'op' : "login",
- 'redirect': self.HOSTER_URL}
+ 'redirect': self.PLUGIN_URL}
inputs.update({'login' : user,
'password': password})
@@ -168,9 +182,17 @@ class XFSAccount(Account):
if action:
url = urlparse.urljoin("http://", action)
else:
- url = self.HOSTER_URL
+ url = self.PLUGIN_URL
html = self.load(url, post=inputs, cookies=self.COOKIES)
- if re.search(self.LOGIN_FAIL_PATTERN, html):
- self.login_fail()
+ try:
+ json = json_loads(html)
+
+ except ValueError:
+ if re.search(self.LOGIN_FAIL_PATTERN, html):
+ self.fail_login()
+
+ else:
+ if not 'success' in json or not json['success']:
+ self.fail_login()
diff --git a/module/plugins/internal/XFSCrypter.py b/module/plugins/internal/XFSCrypter.py
index 4c059d647..876575874 100644
--- a/module/plugins/internal/XFSCrypter.py
+++ b/module/plugins/internal/XFSCrypter.py
@@ -7,17 +7,20 @@ from module.plugins.internal.SimpleCrypter import SimpleCrypter, create_getInfo
class XFSCrypter(SimpleCrypter):
__name__ = "XFSCrypter"
__type__ = "crypter"
- __version__ = "0.13"
+ __version__ = "0.16"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
+ __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
+ ("use_subfolder" , "bool", "Save package to subfolder" , True),
+ ("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
__description__ = """XFileSharing decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- HOSTER_DOMAIN = None
+ PLUGIN_DOMAIN = None
URL_REPLACEMENTS = [(r'&?per_page=\d+', ""), (r'[?/&]+$', ""), (r'(.+/[^?]+)$', r'\1?'), (r'$', r'&per_page=10000')]
@@ -28,23 +31,29 @@ class XFSCrypter(SimpleCrypter):
TEMP_OFFLINE_PATTERN = r'>\s*\w+ server (is in )?(maintenance|maintainance)'
+ def set_xfs_cookie(self):
+ if not self.COOKIES:
+ return
+
+ if isinstance(self.COOKIES, list) and (self.PLUGIN_DOMAIN, "lang", "english") not in self.COOKIES:
+ self.COOKIES.insert((self.PLUGIN_DOMAIN, "lang", "english"))
+ else:
+ set_cookie(self.req.cj, self.PLUGIN_DOMAIN, "lang", "english")
+
+
def prepare(self):
- if not self.HOSTER_DOMAIN:
+ if not self.PLUGIN_DOMAIN:
if self.account:
account = self.account
else:
account_name = (self.__name__ + ".py").replace("Folder.py", "").replace(".py", "")
account = self.pyload.accountManager.getAccountPlugin(account_name)
- if account and hasattr(account, "HOSTER_DOMAIN") and account.HOSTER_DOMAIN:
- self.HOSTER_DOMAIN = account.HOSTER_DOMAIN
+ if account and hasattr(account, "PLUGIN_DOMAIN") and account.PLUGIN_DOMAIN:
+ self.PLUGIN_DOMAIN = account.PLUGIN_DOMAIN
else:
- self.fail(_("Missing HOSTER_DOMAIN"))
+ self.fail(_("Missing PLUGIN_DOMAIN"))
- if self.COOKIES:
- if isinstance(self.COOKIES, list) and not self.COOKIES.count((self.HOSTER_DOMAIN, "lang", "english")):
- self.COOKIES.insert((self.HOSTER_DOMAIN, "lang", "english"))
- else:
- set_cookie(self.req.cj, self.HOSTER_DOMAIN, "lang", "english")
+ self.set_xfs_cookie()
return super(XFSCrypter, self).prepare()
diff --git a/module/plugins/internal/XFSHoster.py b/module/plugins/internal/XFSHoster.py
index 5e0830dc6..b59f34122 100644
--- a/module/plugins/internal/XFSHoster.py
+++ b/module/plugins/internal/XFSHoster.py
@@ -14,10 +14,13 @@ from module.utils import html_unescape
class XFSHoster(SimpleHoster):
__name__ = "XFSHoster"
__type__ = "hoster"
- __version__ = "0.57"
+ __version__ = "0.63"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
+ __config__ = [("use_premium" , "bool", "Use premium account if available" , True),
+ ("fallback_premium", "bool", "Fallback to free download if premium fails", True),
+ ("chk_filesize" , "bool", "Check file size" , True)]
__description__ = """XFileSharing hoster plugin"""
__license__ = "GPLv3"
@@ -26,7 +29,7 @@ class XFSHoster(SimpleHoster):
("Walter Purcaro", "vuolter@gmail.com" )]
- HOSTER_DOMAIN = None
+ PLUGIN_DOMAIN = None
LEECH_HOSTER = True #@NOTE: Should be default to False for safe, but I'm lazy...
@@ -57,30 +60,36 @@ class XFSHoster(SimpleHoster):
self.resume_download = self.multiDL = self.premium
+ def set_xfs_cookie(self):
+ if not self.COOKIES:
+ return
+
+ if isinstance(self.COOKIES, list) and (self.PLUGIN_DOMAIN, "lang", "english") not in self.COOKIES:
+ self.COOKIES.insert((self.PLUGIN_DOMAIN, "lang", "english"))
+ else:
+ set_cookie(self.req.cj, self.PLUGIN_DOMAIN, "lang", "english")
+
+
def prepare(self):
"""
Initialize important variables
"""
- if not self.HOSTER_DOMAIN:
+ if not self.PLUGIN_DOMAIN:
if self.account:
account = self.account
else:
account = self.pyload.accountManager.getAccountPlugin(self.__name__)
- if account and hasattr(account, "HOSTER_DOMAIN") and account.HOSTER_DOMAIN:
- self.HOSTER_DOMAIN = account.HOSTER_DOMAIN
+ if account and hasattr(account, "PLUGIN_DOMAIN") and account.PLUGIN_DOMAIN:
+ self.PLUGIN_DOMAIN = account.PLUGIN_DOMAIN
else:
- self.fail(_("Missing HOSTER_DOMAIN"))
+ self.fail(_("Missing PLUGIN_DOMAIN"))
- if self.COOKIES:
- if isinstance(self.COOKIES, list) and not self.COOKIES.count((self.HOSTER_DOMAIN, "lang", "english")):
- self.COOKIES.insert((self.HOSTER_DOMAIN, "lang", "english"))
- else:
- set_cookie(self.req.cj, self.HOSTER_DOMAIN, "lang", "english")
+ self.set_xfs_cookie()
if not self.LINK_PATTERN:
pattern = r'(?:file: "(.+?)"|(https?://(?:www\.)?([^/]*?%s|\d+\.\d+\.\d+\.\d+)(\:\d+)?(/d/|(/files)?/\d+/\w+/).+?)["\'<])'
- self.LINK_PATTERN = pattern % self.HOSTER_DOMAIN.replace('.', '\.')
+ self.LINK_PATTERN = pattern % self.PLUGIN_DOMAIN.replace('.', '\.')
super(XFSHoster, self).prepare()
@@ -95,7 +104,7 @@ class XFSHoster(SimpleHoster):
self.check_errors()
m = re.search(self.LINK_PATTERN, self.html, re.S)
- if m:
+ if m is not None:
break
data = self.get_post_parameters()
@@ -111,7 +120,7 @@ class XFSHoster(SimpleHoster):
break
m = re.search(self.LINK_PATTERN, self.html, re.S)
- if m:
+ if m is not None:
break
else:
if 'op' in data:
@@ -129,7 +138,7 @@ class XFSHoster(SimpleHoster):
self.fail(_("Only registered or premium users can use url leech feature"))
#: Only tested with easybytez.com
- self.html = self.load("http://www.%s/" % self.HOSTER_DOMAIN)
+ self.html = self.load("http://www.%s/" % self.PLUGIN_DOMAIN)
action, inputs = self.parse_html_form()
@@ -150,7 +159,7 @@ class XFSHoster(SimpleHoster):
action, inputs = self.parse_html_form('F1')
if not inputs:
- self.retry(reason=self.info['error'] if 'error' in self.info else _("TEXTAREA F1 not found"))
+ self.retry(msg=self.info['error'] if 'error' in self.info else _("TEXTAREA F1 not found"))
self.log_debug(inputs)
@@ -163,7 +172,7 @@ class XFSHoster(SimpleHoster):
self.retry(20, 3 * 60, _("Can not leech file"))
elif 'today' in stmsg:
- self.retry(wait_time=seconds_to_midnight(gmt=2), reason=_("You've used all Leech traffic today"))
+ self.retry(wait=seconds_to_midnight(), msg=_("You've used all Leech traffic today"))
else:
self.fail(stmsg)
@@ -188,7 +197,7 @@ class XFSHoster(SimpleHoster):
if not inputs:
action, inputs = self.parse_html_form('F1')
if not inputs:
- self.retry(reason=self.info['error'] if 'error' in self.info else _("TEXTAREA F1 not found"))
+ self.retry(msg=self.info['error'] if 'error' in self.info else _("TEXTAREA F1 not found"))
self.log_debug(inputs)
@@ -202,7 +211,7 @@ class XFSHoster(SimpleHoster):
if not self.premium:
m = re.search(self.WAIT_PATTERN, self.html)
- if m:
+ if m is not None:
wait_time = int(m.group(1))
self.set_wait(wait_time, False)
@@ -223,13 +232,13 @@ class XFSHoster(SimpleHoster):
def handle_captcha(self, inputs):
m = re.search(self.CAPTCHA_PATTERN, self.html)
- if m:
+ if m is not None:
captcha_url = m.group(1)
inputs['code'] = self.captcha.decrypt(captcha_url)
return
m = re.search(self.CAPTCHA_BLOCK_PATTERN, self.html, re.S)
- if m:
+ if m is not None:
captcha_div = m.group(1)
numerals = re.findall(r'<span.*?padding-left\s*:\s*(\d+).*?>(\d)</span>', html_unescape(captcha_div))