diff options
Diffstat (limited to 'pyload/plugins/internal')
-rw-r--r-- | pyload/plugins/internal/AbstractExtractor.py | 109 | ||||
-rw-r--r-- | pyload/plugins/internal/DeadCrypter.py | 20 | ||||
-rw-r--r-- | pyload/plugins/internal/DeadHoster.py | 28 | ||||
-rw-r--r-- | pyload/plugins/internal/MultiHoster.py | 205 | ||||
-rw-r--r-- | pyload/plugins/internal/SimpleCrypter.py | 172 | ||||
-rw-r--r-- | pyload/plugins/internal/SimpleHoster.py | 422 | ||||
-rw-r--r-- | pyload/plugins/internal/UnRar.py | 221 | ||||
-rw-r--r-- | pyload/plugins/internal/UnZip.py | 41 | ||||
-rw-r--r-- | pyload/plugins/internal/UpdateManager.py | 304 | ||||
-rw-r--r-- | pyload/plugins/internal/XFSPAccount.py | 96 | ||||
-rw-r--r-- | pyload/plugins/internal/XFSPHoster.py | 54 | ||||
-rw-r--r-- | pyload/plugins/internal/__init__.py | 0 |
12 files changed, 1672 insertions, 0 deletions
diff --git a/pyload/plugins/internal/AbstractExtractor.py b/pyload/plugins/internal/AbstractExtractor.py new file mode 100644 index 000000000..54ea9b348 --- /dev/null +++ b/pyload/plugins/internal/AbstractExtractor.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- + +class ArchiveError(Exception): + pass + + +class CRCError(Exception): + pass + + +class WrongPassword(Exception): + pass + + +class AbtractExtractor: + __name__ = "AbtractExtractor" + __version__ = "0.1" + + __description__ = """Abtract extractor plugin""" + __license__ = "GPLv3" + __authors__ = [("pyLoad Team", "admin@pyload.org")] + + + @staticmethod + def checkDeps(): + """ Check if system statisfy dependencies + :return: boolean + """ + return True + + + @staticmethod + def getTargets(files_ids): + """ Filter suited targets from list of filename id tuple list + :param files_ids: List of filepathes + :return: List of targets, id tuple list + """ + raise NotImplementedError + + + def __init__(self, m, file, out, fullpath, overwrite, excludefiles, renice): + """Initialize extractor for specific file + + :param m: ExtractArchive Addon plugin + :param file: Absolute filepath + :param out: Absolute path to destination directory + :param fullpath: extract to fullpath + :param overwrite: Overwrite existing archives + :param renice: Renice value + """ + self.m = m + self.file = file + self.out = out + self.fullpath = fullpath + self.overwrite = overwrite + self.excludefiles = excludefiles + self.renice = renice + self.files = [] #: Store extracted files here + + + def init(self): + """ Initialize additional data structures """ + pass + + + def checkArchive(self): + """Check if password if needed. Raise ArchiveError if integrity is + questionable. + + :return: boolean + :raises ArchiveError + """ + return False + + + def checkPassword(self, password): + """ Check if the given password is/might be correct. + If it can not be decided at this point return true. + + :param password: + :return: boolean + """ + return True + + + def extract(self, progress, password=None): + """Extract the archive. Raise specific errors in case of failure. + + :param progress: Progress function, call this to update status + :param password password to use + :raises WrongPassword + :raises CRCError + :raises ArchiveError + :return: + """ + raise NotImplementedError + + + def getDeleteFiles(self): + """Return list of files to delete, do *not* delete them here. + + :return: List with paths of files to delete + """ + raise NotImplementedError + + + def getExtractedFiles(self): + """Populate self.files at some point while extracting""" + return self.files diff --git a/pyload/plugins/internal/DeadCrypter.py b/pyload/plugins/internal/DeadCrypter.py new file mode 100644 index 000000000..83aed6d43 --- /dev/null +++ b/pyload/plugins/internal/DeadCrypter.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- + +from pyload.plugins.base.Crypter import Crypter as _Crypter + + +class DeadCrypter(_Crypter): + __name__ = "DeadCrypter" + __type__ = "crypter" + __version__ = "0.02" + + __pattern__ = r'^unmatchable$' + + __description__ = """Crypter is no longer available""" + __license__ = "GPLv3" + __authors__ = [("stickell", "l.stickell@yahoo.it")] + + + def setup(self): + self.pyfile.error = "Crypter is no longer available" + self.offline() #@TODO: self.offline("Crypter is no longer available") diff --git a/pyload/plugins/internal/DeadHoster.py b/pyload/plugins/internal/DeadHoster.py new file mode 100644 index 000000000..76a7b5d80 --- /dev/null +++ b/pyload/plugins/internal/DeadHoster.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- + +from pyload.plugins.base.Hoster import Hoster as _Hoster + + +def create_getInfo(plugin): + + def getInfo(urls): + yield map(lambda url: ('#N/A: ' + url, 0, 1, url), urls) + + return getInfo + + +class DeadHoster(_Hoster): + __name__ = "DeadHoster" + __type__ = "hoster" + __version__ = "0.12" + + __pattern__ = r'^unmatchable$' + + __description__ = """Hoster is no longer available""" + __license__ = "GPLv3" + __authors__ = [("zoidberg", "zoidberg@mujmail.cz")] + + + def setup(self): + self.pyfile.error = "Hoster is no longer available" + self.offline() #@TODO: self.offline("Hoster is no longer available") diff --git a/pyload/plugins/internal/MultiHoster.py b/pyload/plugins/internal/MultiHoster.py new file mode 100644 index 000000000..97cbb4591 --- /dev/null +++ b/pyload/plugins/internal/MultiHoster.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- + +import re + +from pyload.plugins.base.Addon import Addon +from pyload.utils import remove_chars + + +class MultiHoster(Addon): + __name__ = "MultiHoster" + __type__ = "addon" + __version__ = "0.20" + + __description__ = """Generic MultiHoster plugin""" + __license__ = "GPLv3" + __authors__ = [("pyLoad Team", "admin@pyload.org")] + + + interval = 24 * 60 * 60 #: reload hosters daily + + HOSTER_REPLACEMENTS = [("2shared.com", "twoshared.com"), ("4shared.com", "fourshared.com"), ("cloudnator.com", "shragle.com"), + ("ifile.it", "filecloud.io"), ("easy-share.com", "crocko.com"), ("freakshare.net", "freakshare.com"), + ("hellshare.com", "hellshare.cz"), ("share-rapid.cz", "sharerapid.com"), ("sharerapid.cz", "sharerapid.com"), + ("ul.to", "uploaded.to"), ("uploaded.net", "uploaded.to"), ("1fichier.com", "onefichier.com")] + HOSTER_EXCLUDED = [] + + + def setup(self): + self.hosters = [] + self.supported = [] + self.new_supported = [] + + + def getConfig(self, option, default=''): + """getConfig with default value - subclass may not implements all config options""" + try: + # Fixed loop due to getConf deprecation in 0.4.10 + return super(MultiHoster, self).getConfig(option) + except KeyError: + return default + + + def getHosterCached(self): + if not self.hosters: + try: + hosterSet = self.toHosterSet(self.getHoster()) - set(self.HOSTER_EXCLUDED) + except Exception, e: + self.logError(e) + return [] + + try: + configMode = self.getConfig('hosterListMode', 'all') + if configMode in ("listed", "unlisted"): + configSet = self.toHosterSet(self.getConfig('hosterList', '').replace('|', ',').replace(';', ',').split(',')) + + if configMode == "listed": + hosterSet &= configSet + else: + hosterSet -= configSet + + except Exception, e: + self.logError(e) + + self.hosters = list(hosterSet) + + return self.hosters + + + def toHosterSet(self, hosters): + hosters = set((str(x).strip().lower() for x in hosters)) + + for rep in self.HOSTER_REPLACEMENTS: + if rep[0] in hosters: + hosters.remove(rep[0]) + hosters.add(rep[1]) + + hosters.discard('') + return hosters + + + def getHoster(self): + """Load list of supported hoster + + :return: List of domain names + """ + raise NotImplementedError + + + def coreReady(self): + if self.cb: + self.core.scheduler.removeJob(self.cb) + + self.setConfig("activated", True) #: config not in sync after plugin reload + + cfg_interval = self.getConfig("interval", None) #: reload interval in hours + if cfg_interval is not None: + self.interval = cfg_interval * 60 * 60 + + if self.interval: + self._periodical() + else: + self.periodical() + + + def initPeriodical(self): + pass + + + def periodical(self): + """reload hoster list periodically""" + self.logInfo(_("Reloading supported hoster list")) + + old_supported = self.supported + self.supported, self.new_supported, self.hosters = [], [], [] + + self.overridePlugins() + + old_supported = [hoster for hoster in old_supported if hoster not in self.supported] + if old_supported: + self.logDebug("UNLOAD", ", ".join(old_supported)) + for hoster in old_supported: + self.unloadHoster(hoster) + + + def overridePlugins(self): + pluginMap = {} + for name in self.core.pluginManager.hosterPlugins.keys(): + pluginMap[name.lower()] = name + + accountList = [name.lower() for name, data in self.core.accountManager.accounts.iteritems() if data] + excludedList = [] + + for hoster in self.getHosterCached(): + name = remove_chars(hoster.lower(), "-.") + + if name in accountList: + excludedList.append(hoster) + else: + if name in pluginMap: + self.supported.append(pluginMap[name]) + else: + self.new_supported.append(hoster) + + if not self.supported and not self.new_supported: + self.logError(_("No Hoster loaded")) + return + + module = self.core.pluginManager.getPlugin(self.__name__) + klass = getattr(module, self.__name__) + + # inject plugin plugin + self.logDebug("Overwritten Hosters", ", ".join(sorted(self.supported))) + for hoster in self.supported: + dict = self.core.pluginManager.hosterPlugins[hoster] + dict['new_module'] = module + dict['new_name'] = self.__name__ + + if excludedList: + self.logInfo(_("The following hosters were not overwritten - account exists"), ", ".join(sorted(excludedList))) + + if self.new_supported: + self.logDebug("New Hosters", ", ".join(sorted(self.new_supported))) + + # create new regexp + regexp = r'.*(%s).*' % "|".join([x.replace(".", "\\.") for x in self.new_supported]) + if hasattr(klass, "__pattern__") and isinstance(klass.__pattern__, basestring) and '://' in klass.__pattern__: + regexp = r'%s|%s' % (klass.__pattern__, regexp) + + self.logDebug("Regexp", regexp) + + dict = self.core.pluginManager.hosterPlugins[self.__name__] + dict['pattern'] = regexp + dict['re'] = re.compile(regexp) + + + def unloadHoster(self, hoster): + dict = self.core.pluginManager.hosterPlugins[hoster] + if "module" in dict: + del dict['module'] + + if "new_module" in dict: + del dict['new_module'] + del dict['new_name'] + + + def unload(self): + """Remove override for all hosters. Scheduler job is removed by AddonManager""" + for hoster in self.supported: + self.unloadHoster(hoster) + + # reset pattern + klass = getattr(self.core.pluginManager.getPlugin(self.__name__), self.__name__) + dict = self.core.pluginManager.hosterPlugins[self.__name__] + dict['pattern'] = getattr(klass, "__pattern__", r'^unmatchable$') + dict['re'] = re.compile(dict['pattern']) + + + def downloadFailed(self, pyfile): + """remove plugin override if download fails but not if file is offline/temp.offline""" + if pyfile.hasStatus("failed") and self.getConfig("unloadFailing", True): + hdict = self.core.pluginManager.hosterPlugins[pyfile.pluginname] + if "new_name" in hdict and hdict['new_name'] == self.__name__: + self.logDebug("Unload MultiHoster", pyfile.pluginname, hdict) + self.unloadHoster(pyfile.pluginname) + pyfile.setStatus("queued") diff --git a/pyload/plugins/internal/SimpleCrypter.py b/pyload/plugins/internal/SimpleCrypter.py new file mode 100644 index 000000000..634ec5f12 --- /dev/null +++ b/pyload/plugins/internal/SimpleCrypter.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- + +import re + +from urlparse import urlparse + +from pyload.plugins.Crypter import Crypter +from module.plugins.Plugin import Fail +from module.plugins.internal.SimpleHoster import _error, _wait, parseFileInfo, replace_patterns, set_cookies +from pyload.utils import fixup, html_unescape + + +class SimpleCrypter(Crypter): + __name__ = "SimpleCrypter" + __type__ = "crypter" + __version__ = "0.28" + + __pattern__ = r'^unmatchable$' + __config__ = [("use_subfolder", "bool", "Save package to subfolder", True), #: Overrides core.config['general']['folder_per_package'] + ("subfolder_per_package", "bool", "Create a subfolder for each package", True)] + + __description__ = """Simple decrypter plugin""" + __license__ = "GPLv3" + __authors__ = [("stickell", "l.stickell@yahoo.it"), + ("zoidberg", "zoidberg@mujmail.cz"), + ("Walter Purcaro", "vuolter@gmail.com")] + + + """ + Following patterns should be defined by each crypter: + + LINK_PATTERN: group(1) must be a download link or a regex to catch more links + example: LINK_PATTERN = r'<div class="link"><a href="(.+?)"' + + NAME_PATTERN: (optional) folder name or webpage title + example: NAME_PATTERN = r'<title>Files of: (?P<N>[^<]+) folder</title>' + + OFFLINE_PATTERN: (optional) Checks if the file is yet available online + example: OFFLINE_PATTERN = r'File (deleted|not found)' + + TEMP_OFFLINE_PATTERN: (optional) Checks if the file is temporarily offline + example: TEMP_OFFLINE_PATTERN = r'Server maintainance' + + + You can override the getLinks method if you need a more sophisticated way to extract the links. + + + If the links are splitted on multiple pages you can define the PAGES_PATTERN regex: + + PAGES_PATTERN: (optional) group(1) should be the number of overall pages containing the links + example: PAGES_PATTERN = r'Pages: (\d+)' + + and its loadPage method: + + + def loadPage(self, page_n): + return the html of the page number page_n + """ + + LINK_PATTERN = None + + NAME_REPLACEMENTS = [("&#?\w+;", fixup)] + URL_REPLACEMENTS = [] + + TEXT_ENCODING = False #: Set to True or encoding name if encoding in http header is not correct + COOKIES = True #: or False or list of tuples [(domain, name, value)] + + LOGIN_ACCOUNT = False + LOGIN_PREMIUM = False + + + #@TODO: remove in 0.4.10 + def init(self): + self.info = {} + + account_name = (self.__name__ + ".py").replace("Folder.py", "").replace(".py", "") + account = self.core.accountManager.getAccountPlugin(account_name) + + if account and account.canUse(): + self.user, data = account.selectAccount() + self.req = account.getAccountRequest(self.user) + self.premium = account.isPremium(self.user) + + self.account = account + + + def prepare(self): + if self.LOGIN_ACCOUNT and not self.account: + self.fail(_("Required account not found")) + + if self.LOGIN_PREMIUM and not self.premium: + self.fail(_("Required premium account not found")) + + if isinstance(self.COOKIES, list): + set_cookies(self.req.cj, self.COOKIES) + + self.pyfile.url = replace_patterns(self.pyfile.url, self.URL_REPLACEMENTS) + + if self.html is None: + self.html = self.load(self.pyfile.url, decode=not self.TEXT_ENCODING, cookies=bool(self.COOKIES)) + + if isinstance(self.TEXT_ENCODING, basestring): + self.html = unicode(self.html, self.TEXT_ENCODING) + + + def decrypt(self, pyfile): + self.prepare() + + if self.html is None: + self.fail(_("No html retrieved")) + + if not self.info: + self.getFileInfo() + + self.links = self.getLinks() + + if hasattr(self, 'PAGES_PATTERN') and hasattr(self, 'loadPage'): + self.handleMultiPages() + + self.logDebug("Package has %d links" % len(self.links)) + + if self.links: + self.packages = [(self.info['name'], self.links, self.info['folder'])] + + + def getFileInfo(self): + name, size, status, url = parseFileInfo(self) + + if name and name != url: + self.pyfile.name = name + else: + self.pyfile.name = self.info['name'] = urlparse(html_unescape(name)).path.split("/")[-1] + + if status is 1: + self.offline() + + elif status is 6: + self.tempOffline() + + self.info['folder'] = self.pyfile.name + + self.logDebug("FILE NAME: %s" % self.pyfile.name) + return self.info + + + def getLinks(self): + """ + Returns the links extracted from self.html + You should override this only if it's impossible to extract links using only the LINK_PATTERN. + """ + return re.findall(self.LINK_PATTERN, self.html) + + + def handleMultiPages(self): + try: + m = re.search(self.PAGES_PATTERN, self.html) + pages = int(m.group(1)) + except: + pages = 1 + + for p in xrange(2, pages + 1): + self.html = self.loadPage(p) + self.links += self.getLinks() + + + #@TODO: Remove in 0.4.10 + def wait(self, seconds=0, reconnect=None): + return _wait(self, seconds, reconnect) + + + def error(self, reason="", type="parse"): + return _error(self, reason, type) diff --git a/pyload/plugins/internal/SimpleHoster.py b/pyload/plugins/internal/SimpleHoster.py new file mode 100644 index 000000000..b192b45b2 --- /dev/null +++ b/pyload/plugins/internal/SimpleHoster.py @@ -0,0 +1,422 @@ +# -*- coding: utf-8 -*- + +import re + +from time import time +from urlparse import urlparse + +from pycurl import FOLLOWLOCATION + +from pyload.network.CookieJar import CookieJar +from pyload.network.RequestFactory import getURL +from pyload.plugins.base.Hoster import Hoster +from module.plugins.Plugin import Fail +from pyload.utils import fixup, html_unescape, parseFileSize + + +#@TODO: Remove in 0.4.10 and redirect to self.error instead +def _error(self, reason, type): + if not reason and not type: + type = "unknown" + + msg = _("%s error") % type.strip().capitalize() if type else _("Error") + msg += ": " + reason.strip() if reason else "" + msg += _(" | Plugin may be out of date") + + raise Fail(msg) + + +#@TODO: Remove in 0.4.10 +def _wait(self, seconds, reconnect): + if seconds: + self.setWait(seconds, reconnect) + super(SimpleHoster, self).wait() + + +def replace_patterns(string, ruleslist): + for r in ruleslist: + rf, rt = r + string = re.sub(rf, rt, string) + return string + + +def set_cookies(cj, cookies): + for cookie in cookies: + if isinstance(cookie, tuple) and len(cookie) == 3: + domain, name, value = cookie + cj.setCookie(domain, name, value) + + +def parseHtmlTagAttrValue(attr_name, tag): + m = re.search(r"%s\s*=\s*([\"']?)((?<=\")[^\"]+|(?<=')[^']+|[^>\s\"'][^>\s]*)\1" % attr_name, tag, re.I) + return m.group(2) if m else None + + +def parseHtmlForm(attr_str, html, input_names=None): + for form in re.finditer(r"(?P<tag><form[^>]*%s[^>]*>)(?P<content>.*?)</?(form|body|html)[^>]*>" % attr_str, + html, re.S | re.I): + inputs = {} + action = parseHtmlTagAttrValue("action", form.group('tag')) + for inputtag in re.finditer(r'(<(input|textarea)[^>]*>)([^<]*(?=</\2)|)', form.group('content'), re.S | re.I): + name = parseHtmlTagAttrValue("name", inputtag.group(1)) + if name: + value = parseHtmlTagAttrValue("value", inputtag.group(1)) + if not value: + inputs[name] = inputtag.group(3) or '' + else: + inputs[name] = value + + if isinstance(input_names, dict): + # check input attributes + for key, val in input_names.iteritems(): + if key in inputs: + if isinstance(val, basestring) and inputs[key] == val: + continue + elif isinstance(val, tuple) and inputs[key] in val: + continue + elif hasattr(val, "search") and re.match(val, inputs[key]): + continue + break # attibute value does not match + else: + break # attibute name does not match + else: + return action, inputs # passed attribute check + else: + # no attribute check + return action, inputs + + return {}, None # no matching form found + + +def parseFileInfo(self, url="", html=""): + if not url and hasattr(self, "pyfile"): + url = self.pyfile.url + + info = {'name': url, 'size': 0, 'status': 3} + + if not html: + if url: + return create_getInfo(self)([url]).next() + + elif hasattr(self, "req") and self.req.http.code == '404': + info['status'] = 1 + + elif hasattr(self, "html"): + html = self.html + + if html: + if hasattr(self, "OFFLINE_PATTERN") and re.search(self.OFFLINE_PATTERN, html): + info['status'] = 1 + + elif hasattr(self, "FILE_OFFLINE_PATTERN") and re.search(self.FILE_OFFLINE_PATTERN, html): #@TODO: Remove in 0.4.10 + info['status'] = 1 + + elif hasattr(self, "TEMP_OFFLINE_PATTERN") and re.search(self.TEMP_OFFLINE_PATTERN, html): + info['status'] = 6 + + else: + online = False + try: + info.update(re.match(self.__pattern__, url).groupdict()) + except: + pass + + for pattern in ("INFO_PATTERN", "NAME_PATTERN", "SIZE_PATTERN", + "FILE_INFO_PATTERN", "FILE_NAME_PATTERN", "FILE_SIZE_PATTERN"): #@TODO: Remove in 0.4.10 + try: + info.update(re.search(getattr(self, pattern), html).groupdict()) + online = True + except AttributeError: + continue + + if online: + # File online, return name and size + info['status'] = 2 + + if 'N' in info: + info['name'] = replace_patterns(info['N'].strip(), + self.FILE_NAME_REPLACEMENTS if hasattr(self, "FILE_NAME_REPLACEMENTS") else self.NAME_REPLACEMENTS) #@TODO: Remove FILE_NAME_REPLACEMENTS check in 0.4.10 + + if 'S' in info: + size = replace_patterns(info['S'] + info['U'] if 'U' in info else info['S'], + self.FILE_SIZE_REPLACEMENTS if hasattr(self, "FILE_SIZE_REPLACEMENTS") else self.SIZE_REPLACEMENTS) #@TODO: Remove FILE_SIZE_REPLACEMENTS check in 0.4.10 + info['size'] = parseFileSize(size) + + elif isinstance(info['size'], basestring): + unit = info['units'] if 'units' in info else None + info['size'] = parseFileSize(info['size'], unit) + + if hasattr(self, "html") and self.html is None: + self.html = html + + if hasattr(self, "info"): + try: + self.logDebug(_("File info (before update): %s") % self.info) + except: + pass + + self.info.update(info) + + try: + self.logDebug(_("File info (after update): %s") % self.info) + except: + pass + + return info['name'], info['size'], info['status'], url + + +def create_getInfo(plugin): + + def getInfo(urls): + for url in urls: + if hasattr(plugin, "COOKIES") and isinstance(plugin.COOKIES, list): + cj = CookieJar(plugin.__name__) + set_cookies(cj, plugin.COOKIES) + else: + cj = None + + if hasattr(plugin, "URL_REPLACEMENTS"): + url = replace_patterns(url, plugin.URL_REPLACEMENTS) + + elif hasattr(plugin, "FILE_URL_REPLACEMENTS"): #@TODO: Remove in 0.4.10 + url = replace_patterns(url, plugin.FILE_URL_REPLACEMENTS) + + if hasattr(plugin, "TEXT_ENCODING"): + html = getURL(url, cookies=bool(cj), decode=not plugin.TEXT_ENCODING) + if isinstance(plugin.TEXT_ENCODING, basestring): + html = unicode(html, plugin.TEXT_ENCODING) + else: + html = getURL(url, cookies=bool(cj), decode=True) + + yield parseFileInfo(plugin, url, html) + + return getInfo + + +def timestamp(): + return int(time() * 1000) + + +class SimpleHoster(Hoster): + __name__ = "SimpleHoster" + __type__ = "hoster" + __version__ = "0.53" + + __pattern__ = r'^unmatchable$' + + __description__ = """Simple hoster plugin""" + __license__ = "GPLv3" + __authors__ = [("zoidberg", "zoidberg@mujmail.cz"), + ("stickell", "l.stickell@yahoo.it"), + ("Walter Purcaro", "vuolter@gmail.com")] + + + """ + Following patterns should be defined by each hoster: + + INFO_PATTERN: (optional) Name and Size of the file + example: INFO_PATTERN = r'(?P<N>file_name) (?P<S>file_size) (?P<U>size_unit)' + or + NAME_PATTERN: (optional) Name that will be set for the file + example: NAME_PATTERN = r'(?P<N>file_name)' + SIZE_PATTERN: (optional) Size that will be checked for the file + example: SIZE_PATTERN = r'(?P<S>file_size) (?P<U>size_unit)' + + OFFLINE_PATTERN: (optional) Checks if the file is yet available online + example: OFFLINE_PATTERN = r'File (deleted|not found)' + + TEMP_OFFLINE_PATTERN: (optional) Checks if the file is temporarily offline + example: TEMP_OFFLINE_PATTERN = r'Server (maintenance|maintainance)' + + PREMIUM_ONLY_PATTERN: (optional) Checks if the file can be downloaded only with a premium account + example: PREMIUM_ONLY_PATTERN = r'Premium account required' + + + Instead overriding handleFree and handlePremium methods now you can define patterns for direct download: + + LINK_FREE_PATTERN: (optional) group(1) should be the direct link for free download + example: LINK_FREE_PATTERN = r'<div class="link"><a href="(.+?)"' + + LINK_PREMIUM_PATTERN: (optional) group(1) should be the direct link for premium download + example: LINK_PREMIUM_PATTERN = r'<div class="link"><a href="(.+?)"' + """ + + NAME_REPLACEMENTS = [("&#?\w+;", fixup)] + SIZE_REPLACEMENTS = [] + URL_REPLACEMENTS = [] + + TEXT_ENCODING = False #: Set to True or encoding name if encoding in http header is not correct + COOKIES = True #: or False or list of tuples [(domain, name, value)] + FORCE_CHECK_TRAFFIC = False #: Set to True to force checking traffic left for premium account + + + def init(self): + self.info = {} + + + def setup(self): + self.resumeDownload = self.multiDL = self.premium + + + def prepare(self): + if isinstance(self.COOKIES, list): + set_cookies(self.req.cj, self.COOKIES) + + self.req.setOption("timeout", 120) + + self.pyfile.url = replace_patterns(self.pyfile.url, + self.FILE_URL_REPLACEMENTS if hasattr(self, "FILE_URL_REPLACEMENTS") else self.URL_REPLACEMENTS) #@TODO: Remove FILE_URL_REPLACEMENTS check in 0.4.10 + + if self.premium: + self.logDebug(_("Looking for direct download link...")) + direct_link = self.getDirectLink(self.pyfile.url) + if direct_link: + return direct_link + else: + self.logDebug(_("No direct download link found")) + self.html = None + self.info = {} + + if self.html is None: + self.html = self.load(self.pyfile.url, decode=not self.TEXT_ENCODING, cookies=bool(self.COOKIES)) + + if isinstance(self.TEXT_ENCODING, basestring): + self.html = unicode(self.html, self.TEXT_ENCODING) + + + def process(self, pyfile): + direct_link = self.prepare() + + if isinstance(direct_link, basestring): + self.logInfo(_("Direct download link detected")) + self.download(direct_link, ref=True, cookies=True, disposition=True) + + elif self.html is None: + self.fail(_("No html retrieved")) + + else: + premium_only = hasattr(self, 'PREMIUM_ONLY_PATTERN') and re.search(self.PREMIUM_ONLY_PATTERN, self.html) + if not premium_only and not self.info: #: Usually premium only pages doesn't show any file information + self.getFileInfo() + + if self.premium and (not self.FORCE_CHECK_TRAFFIC or self.checkTrafficLeft()): + self.logDebug("Handle as premium download") + self.handlePremium() + elif premium_only: + self.fail(_("Link require a premium account to be handled")) + else: + self.logDebug("Handle as free download") + self.handleFree() + + + def getDirectLink(self, url): + self.req.http.c.setopt(FOLLOWLOCATION, 0) + + html = self.load(url, ref=True, decode=True) + + self.req.http.c.setopt(FOLLOWLOCATION, 1) + + if parseFileInfo(self, url, html)[2] is not 2: + try: + return re.search(r'Location\s*:\s*(.+)', self.req.http.header, re.I).group(1).rstrip() #@TODO: Remove .rstrip() in 0.4.10 + except: + pass + + + def getFileInfo(self): + name, size, status, url = parseFileInfo(self) + + if name and name != url: + self.pyfile.name = name + else: + self.pyfile.name = self.info['name'] = urlparse(html_unescape(name)).path.split("/")[-1] + + if status is 1: + self.offline() + + elif status is 6: + self.tempOffline() + + elif status is not 2: + self.error(_("File info: %s") % self.info) + + if size: + self.pyfile.size = size + else: + self.logError(_("File size not parsed")) + + self.logDebug("FILE NAME: %s" % self.pyfile.name, "FILE SIZE: %d" % self.pyfile.size or _("Unknown")) + return self.info + + + def handleFree(self): + if not hasattr(self, 'LINK_FREE_PATTERN'): + self.fail(_("Free download not implemented")) + + try: + m = re.search(self.LINK_FREE_PATTERN, self.html) + if m is None: + self.error(_("Free download link not found")) + + link = m.group(1) + except Exception, e: + self.fail(str(e)) + else: + self.download(link, ref=True, cookies=True, disposition=True) + + + def handlePremium(self): + if not hasattr(self, 'LINK_PREMIUM_PATTERN'): + self.fail(_("Premium download not implemented")) + + try: + m = re.search(self.LINK_PREMIUM_PATTERN, self.html) + if m is None: + self.error(_("Premium download link not found")) + + link = m.group(1) + except Exception, e: + self.fail(str(e)) + else: + self.download(link, ref=True, cookies=True, disposition=True) + + + def longWait(self, wait_time=None, max_tries=3): + if wait_time and isinstance(wait_time, (int, long, float)): + time_str = "%dh %dm" % divmod(wait_time / 60, 60) + else: + wait_time = 900 + time_str = _("(unknown time)") + max_tries = 100 + + self.logInfo(_("Download limit reached, reconnect or wait %s") % time_str) + + self.setWait(wait_time, True) + self.wait() + self.retry(max_tries=max_tries, reason=_("Download limit reached")) + + + def parseHtmlForm(self, attr_str='', input_names=None): + return parseHtmlForm(attr_str, self.html, input_names) + + + def checkTrafficLeft(self): + traffic = self.account.getAccountInfo(self.user, True)['trafficleft'] + + if traffic is None: + return False + elif traffic == -1: + return True + else: + size = self.pyfile.size / 1024 + self.logInfo(_("Filesize: %i KiB, Traffic left for user %s: %i KiB") % (size, self.user, traffic)) + return size <= traffic + + + #@TODO: Remove in 0.4.10 + def wait(self, seconds=0, reconnect=None): + return _wait(self, seconds, reconnect) + + + def error(self, reason="", type="parse"): + return _error(self, reason, type) diff --git a/pyload/plugins/internal/UnRar.py b/pyload/plugins/internal/UnRar.py new file mode 100644 index 000000000..31a0d7642 --- /dev/null +++ b/pyload/plugins/internal/UnRar.py @@ -0,0 +1,221 @@ +# -*- coding: utf-8 -*- + +import os +import re + +from glob import glob +from os.path import basename, join +from string import digits +from subprocess import Popen, PIPE + +from pyload.plugins.internal.AbstractExtractor import AbtractExtractor, WrongPassword, ArchiveError, CRCError +from pyload.utils import safe_join, decode + + +def renice(pid, value): + if os.name != "nt" and value: + try: + Popen(["renice", str(value), str(pid)], stdout=PIPE, stderr=PIPE, bufsize=-1) + except: + print "Renice failed" + + +class UnRar(AbtractExtractor): + __name__ = "UnRar" + __version__ = "0.18" + + __description__ = """Rar extractor plugin""" + __license__ = "GPLv3" + __authors__ = [("RaNaN", "RaNaN@pyload.org")] + + + CMD = "unrar" + + # there are some more uncovered rar formats + re_version = re.compile(r"(UNRAR 5[\d.]+(.*?)freeware)") + re_splitfile = re.compile(r"(.*)\.part(\d+)\.rar$", re.I) + re_partfiles = re.compile(r".*\.(rar|r\d+)", re.I) + re_filelist = re.compile(r"(.+)\s+(\d+)\s+(\d+)\s+") + re_filelist5 = re.compile(r"(.+)\s+(\d+)\s+\d\d-\d\d-\d\d\s+\d\d:\d\d\s+(.+)") + re_wrongpwd = re.compile("(Corrupt file or wrong password|password incorrect)", re.I) + + + @staticmethod + def checkDeps(): + if os.name == "nt": + UnRar.CMD = join(pypath, "UnRAR.exe") + p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE) + p.communicate() + else: + try: + p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE) + p.communicate() + except OSError: + + # fallback to rar + UnRar.CMD = "rar" + p = Popen([UnRar.CMD], stdout=PIPE, stderr=PIPE) + p.communicate() + + return True + + + @staticmethod + def getTargets(files_ids): + result = [] + + for file, id in files_ids: + if not file.endswith(".rar"): + continue + + match = UnRar.re_splitfile.findall(file) + if match: + # only add first parts + if int(match[0][1]) == 1: + result.append((file, id)) + else: + result.append((file, id)) + + return result + + + def init(self): + self.passwordProtected = False + self.headerProtected = False #: list files will not work without password + self.smallestFile = None #: small file to test passwords + self.password = "" #: save the correct password + + + def checkArchive(self): + p = self.call_unrar("l", "-v", self.file) + out, err = p.communicate() + if self.re_wrongpwd.search(err): + self.passwordProtected = True + self.headerProtected = True + return True + + # output only used to check if passworded files are present + if self.re_version.search(out): + for attr, size, name in self.re_filelist5.findall(out): + if attr.startswith("*"): + self.passwordProtected = True + return True + else: + for name, size, packed in self.re_filelist.findall(out): + if name.startswith("*"): + self.passwordProtected = True + return True + + self.listContent() + if not self.files: + raise ArchiveError("Empty Archive") + + return False + + + def checkPassword(self, password): + # at this point we can only verify header protected files + if self.headerProtected: + p = self.call_unrar("l", "-v", self.file, password=password) + out, err = p.communicate() + if self.re_wrongpwd.search(err): + return False + + return True + + + def extract(self, progress, password=None): + command = "x" if self.fullpath else "e" + + p = self.call_unrar(command, self.file, self.out, password=password) + renice(p.pid, self.renice) + + progress(0) + progressstring = "" + while True: + c = p.stdout.read(1) + # quit loop on eof + if not c: + break + # reading a percentage sign -> set progress and restart + if c == '%': + progress(int(progressstring)) + progressstring = "" + # not reading a digit -> therefore restart + elif c not in digits: + progressstring = "" + # add digit to progressstring + else: + progressstring = progressstring + c + progress(100) + + # retrieve stderr + err = p.stderr.read() + + if "CRC failed" in err and not password and not self.passwordProtected: + raise CRCError + elif "CRC failed" in err: + raise WrongPassword + if err.strip(): #: raise error if anything is on stderr + raise ArchiveError(err.strip()) + if p.returncode: + raise ArchiveError("Process terminated") + + if not self.files: + self.password = password + self.listContent() + + + def getDeleteFiles(self): + if ".part" in basename(self.file): + return glob(re.sub("(?<=\.part)([01]+)", "*", self.file, re.I)) + # get files which matches .r* and filter unsuited files out + parts = glob(re.sub(r"(?<=\.r)ar$", "*", self.file, re.I)) + return filter(lambda x: self.re_partfiles.match(x), parts) + + + def listContent(self): + command = "vb" if self.fullpath else "lb" + p = self.call_unrar(command, "-v", self.file, password=self.password) + out, err = p.communicate() + + if "Cannot open" in err: + raise ArchiveError("Cannot open file") + + if err.strip(): #: only log error at this point + self.m.logError(err.strip()) + + result = set() + + for f in decode(out).splitlines(): + f = f.strip() + result.add(safe_join(self.out, f)) + + self.files = result + + + def call_unrar(self, command, *xargs, **kwargs): + args = [] + # overwrite flag + args.append("-o+") if self.overwrite else args.append("-o-") + + if self.excludefiles: + for word in self.excludefiles.split(';'): + args.append("-x%s" % word) + + # assume yes on all queries + args.append("-y") + + # set a password + if "password" in kwargs and kwargs['password']: + args.append("-p%s" % kwargs['password']) + else: + args.append("-p-") + + # NOTE: return codes are not reliable, some kind of threading, cleanup whatever issue + call = [self.CMD, command] + args + list(xargs) + self.m.logDebug(" ".join(call)) + + p = Popen(call, stdout=PIPE, stderr=PIPE) + + return p diff --git a/pyload/plugins/internal/UnZip.py b/pyload/plugins/internal/UnZip.py new file mode 100644 index 000000000..413c0699e --- /dev/null +++ b/pyload/plugins/internal/UnZip.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- + +import sys +import zipfile + +from pyload.plugins.internal.AbstractExtractor import AbtractExtractor + + +class UnZip(AbtractExtractor): + __name__ = "UnZip" + __version__ = "0.1" + + __description__ = """Zip extractor plugin""" + __license__ = "GPLv3" + __authors__ = [("RaNaN", "RaNaN@pyload.org")] + + + @staticmethod + def checkDeps(): + return sys.version_info[:2] >= (2, 6) + + + @staticmethod + def getTargets(files_ids): + result = [] + + for file, id in files_ids: + if file.endswith(".zip"): + result.append((file, id)) + + return result + + + def extract(self, progress, password=None): + z = zipfile.ZipFile(self.file) + self.files = z.namelist() + z.extractall(self.out) + + + def getDeleteFiles(self): + return [self.file] diff --git a/pyload/plugins/internal/UpdateManager.py b/pyload/plugins/internal/UpdateManager.py new file mode 100644 index 000000000..3f5b34c45 --- /dev/null +++ b/pyload/plugins/internal/UpdateManager.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- + +import re +import sys + +from operator import itemgetter +from os import path, remove, stat + +from pyload.network.RequestFactory import getURL +from pyload.plugins.base.Addon import Expose, Addon, threaded +from pyload.utils import safe_join + + +class UpdateManager(Addon): + __name__ = "UpdateManager" + __type__ = "addon" + __version__ = "0.39" + + __config__ = [("activated", "bool", "Activated", True), + ("mode", "pyLoad + plugins;plugins only", "Check updates for", "pyLoad + plugins"), + ("interval", "int", "Check interval in hours", 8), + ("reloadplugins", "bool", "Monitor plugins for code changes (debug mode only)", True), + ("nodebugupdate", "bool", "Don't check for updates in debug mode", True)] + + __description__ = """Check for updates""" + __license__ = "GPLv3" + __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] + + + # event_list = ["pluginConfigChanged"] + + SERVER_URL = "http://updatemanager.pyload.org" + MIN_INTERVAL = 6 * 60 * 60 #: 6h minimum check interval (value is in seconds) + + + def pluginConfigChanged(self, plugin, name, value): + if name == "interval": + interval = value * 60 * 60 + if self.MIN_INTERVAL <= interval != self.interval: + self.core.scheduler.removeJob(self.cb) + self.interval = interval + self.initPeriodical() + else: + self.logDebug("Invalid interval value, kept current") + + elif name == "reloadplugins": + if self.cb2: + self.core.scheduler.removeJob(self.cb2) + if value is True and self.core.debug: + self.periodical2() + + + def coreReady(self): + self.pluginConfigChanged(self.__name__, "interval", self.getConfig("interval")) + x = lambda: self.pluginConfigChanged(self.__name__, "reloadplugins", self.getConfig("reloadplugins")) + self.core.scheduler.addJob(10, x, threaded=False) + + + def unload(self): + self.pluginConfigChanged(self.__name__, "reloadplugins", False) + + + def setup(self): + self.cb2 = None + self.interval = self.MIN_INTERVAL + self.updating = False + self.info = {'pyload': False, 'version': None, 'plugins': False} + self.mtimes = {} #: store modification time for each plugin + + + def periodical2(self): + if not self.updating: + self.autoreloadPlugins() + + self.cb2 = self.core.scheduler.addJob(4, self.periodical2, threaded=False) + + + @Expose + def autoreloadPlugins(self): + """ reload and reindex all modified plugins """ + modules = filter( + lambda m: m and (m.__name__.startswith("pyload.plugins.") or + m.__name__.startswith("userplugins.")) and + m.__name__.count(".") >= 2, sys.modules.itervalues() + ) + + reloads = [] + + for m in modules: + root, type, name = m.__name__.rsplit(".", 2) + id = (type, name) + if type in self.core.pluginManager.plugins: + f = m.__file__.replace(".pyc", ".py") + if not path.isfile(f): + continue + + mtime = stat(f).st_mtime + + if id not in self.mtimes: + self.mtimes[id] = mtime + elif self.mtimes[id] < mtime: + reloads.append(id) + self.mtimes[id] = mtime + + return True if self.core.pluginManager.reloadPlugins(reloads) else False + + + def periodical(self): + if not self.info['pyload'] and not (self.getConfig("nodebugupdate") and self.core.debug): + self.updateThread() + + + def server_request(self): + try: + return getURL(self.SERVER_URL, get={'v': self.core.api.getServerVersion()}).splitlines() + except: + self.logWarning(_("Unable to contact server to get updates")) + + + @threaded + def updateThread(self): + self.updating = True + + status = self.update(onlyplugin=self.getConfig("mode") == "plugins only") + + if status == 2: + self.core.api.restart() + else: + self.updating = False + + + @Expose + def updatePlugins(self): + """ simple wrapper for calling plugin update quickly """ + return self.update(onlyplugin=True) + + + @Expose + def update(self, onlyplugin=False): + """ check for updates """ + data = self.server_request() + + if not data: + exitcode = 0 + + elif data[0] == "None": + self.logInfo(_("No new pyLoad version available")) + updates = data[1:] + exitcode = self._updatePlugins(updates) + + elif onlyplugin: + exitcode = 0 + + else: + newversion = data[0] + self.logInfo(_("*** New pyLoad Version %s available ***") % newversion) + self.logInfo(_("*** Get it here: https://github.com/pyload/pyload/releases ***")) + exitcode = 3 + self.info['pyload'] = True + self.info['version'] = newversion + + return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required; 3 = No plugins updated, new pyLoad version available + + + def _updatePlugins(self, updates): + """ check for plugin updates """ + + if self.info['plugins']: + return False #: plugins were already updated + + exitcode = 0 + updated = [] + + vre = re.compile(r'__version__.*=.*("|\')([\d.]+)') + url = updates[0] + schema = updates[1].split('|') + + if "BLACKLIST" in updates: + blacklist = updates[updates.index('BLACKLIST') + 1:] + updates = updates[2:updates.index('BLACKLIST')] + else: + blacklist = None + updates = updates[2:] + + upgradable = sorted(map(lambda x: dict(zip(schema, x.split('|'))), updates), + key=itemgetter("type", "name")) + + for plugin in upgradable: + filename = plugin['name'] + prefix = plugin['type'] + version = plugin['version'] + + if filename.endswith(".pyc"): + name = filename[:filename.find("_")] + else: + name = filename.replace(".py", "") + + #@TODO: obsolete after 0.4.10 + if prefix.endswith("s"): + type = prefix[:-1] + else: + type = prefix + + plugins = getattr(self.core.pluginManager, "%sPlugins" % type) + + oldver = float(plugins[name]['v']) if name in plugins else None + newver = float(version) + + if not oldver: + msg = "New plugin: [%(type)s] %(name)s (v%(newver).2f)" + elif newver > oldver: + msg = "New version of plugin: [%(type)s] %(name)s (v%(oldver).2f -> v%(newver).2f)" + else: + continue + + self.logInfo(_(msg) % {'type' : type, + 'name' : name, + 'oldver': oldver, + 'newver': newver}) + try: + content = getURL(url % plugin) + m = vre.search(content) + + if m and m.group(2) == version: + f = open(safe_join("userplugins", prefix, filename), "wb") + f.write(content) + f.close() + updated.append((prefix, name)) + else: + raise Exception, _("Version mismatch") + + except Exception, e: + self.logError(_("Error updating plugin %s") % filename, e) + + if blacklist: + blacklisted = map(lambda x: (x.split('|')[0], x.split('|')[1].rsplit('.', 1)[0]), blacklist) + + # Always protect base and internal plugins from removing + for i, n, t in blacklisted.enumerate(): + if t in ("base", "internal"): + del blacklisted[i] + + blacklisted = sorted(blacklisted) + removed = self.removePlugins(blacklisted) + for t, n in removed: + self.logInfo(_("Removed blacklisted plugin [%(type)s] %(name)s") % { + 'type': t, + 'name': n, + }) + + if updated: + reloaded = self.core.pluginManager.reloadPlugins(updated) + if reloaded: + self.logInfo(_("Plugins updated and reloaded")) + exitcode = 1 + else: + self.logInfo(_("*** Plugins have been updated, but need a pyLoad restart to be reloaded ***")) + self.info['plugins'] = True + exitcode = 2 + else: + self.logInfo(_("No plugin updates available")) + + return exitcode #: 0 = No plugins updated; 1 = Plugins updated; 2 = Plugins updated, but restart required + + + @Expose + def removePlugins(self, type_plugins): + """ delete plugins from disk """ + + if not type_plugins: + return + + self.logDebug("Requested deletion of plugins: %s" % type_plugins) + + removed = [] + + for type, name in type_plugins: + err = False + file = name + ".py" + + for root in ("userplugins", path.join(pypath, "pyload", "plugins")): + + filename = safe_join(root, type, file) + try: + remove(filename) + except Exception, e: + self.logDebug("Error deleting: %s" % path.basename(filename), e) + err = True + + filename += "c" + if path.isfile(filename): + try: + if type == "addon": + self.manager.deactivateAddon(name) + remove(filename) + except Exception, e: + self.logDebug("Error deleting: %s" % path.basename(filename), e) + err = True + + if not err: + id = (type, name) + removed.append(id) + + return removed #: return a list of the plugins successfully removed diff --git a/pyload/plugins/internal/XFSPAccount.py b/pyload/plugins/internal/XFSPAccount.py new file mode 100644 index 000000000..edf6ad3cc --- /dev/null +++ b/pyload/plugins/internal/XFSPAccount.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- + +import re + +from urlparse import urljoin +from time import mktime, strptime + +from pyload.plugins.base.Account import Account +from pyload.plugins.internal.SimpleHoster import parseHtmlForm, set_cookies +from pyload.utils import parseFileSize + + +class XFSPAccount(Account): + __name__ = "XFSPAccount" + __type__ = "account" + __version__ = "0.09" + + __description__ = """XFileSharingPro base account plugin""" + __authors__ = [("zoidberg", "zoidberg@mujmail.cz"), + ("Walter Purcaro", "vuolter@gmail.com")] + + + HOSTER_URL = None + + COOKIES = None #: or list of tuples [(domain, name, value)] + + VALID_UNTIL_PATTERN = r'>Premium.[Aa]ccount expire:.*?<b>(.+?)</b>' + TRAFFIC_LEFT_PATTERN = r'>Traffic available today:.*?<b>(?P<S>.+?)</b>' + LOGIN_FAIL_PATTERN = r'>(Incorrect Login or Password|Error<)' + # PREMIUM_PATTERN = r'>Renew premium<' + + + def loadAccountInfo(self, user, req): + html = req.load(self.HOSTER_URL, get={'op': "my_account"}, decode=True) + + validuntil = None + trafficleft = None + premium = None + + if hasattr(self, "PREMIUM_PATTERN"): + premium = True if re.search(self.PREMIUM_PATTERN, html) else False + + m = re.search(self.VALID_UNTIL_PATTERN, html) + if m: + expiredate = m.group(1) + self.logDebug("Expire date: " + expiredate) + + try: + validuntil = mktime(strptime(expiredate, "%d %B %Y")) + except Exception, e: + self.logError(e) + else: + if validuntil > mktime(gmtime()): + premium = True + trafficleft = -1 + else: + if premium is False: #: registered account type (not premium) + validuntil = -1 + premium = False + + try: + traffic = re.search(self.TRAFFIC_LEFT_PATTERN, html).groupdict() + trafficsize = traffic['S'] + traffic['U'] if 'U' in traffic else traffic['S'] + if "Unlimited" in trafficsize: + trafficleft = -1 + if premium is None: + premium = True + else: + trafficleft = parseFileSize(trafficsize) + except: + pass + + if premium is None: + premium = False + + return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium} + + + def login(self, user, data, req): + set_cookies(req.cj, self.COOKIES) + + url = urljoin(self.HOSTER_URL, "login.html") + html = req.load(url, decode=True) + + action, inputs = parseHtmlForm('name="FL"', html) + if not inputs: + inputs = {'op': "login", + 'redirect': self.HOSTER_URL} + + inputs.update({'login': user, + 'password': data['password']}) + + html = req.load(self.HOSTER_URL, post=inputs, decode=True) + + if re.search(self.LOGIN_FAIL_PATTERN, html): + self.wrongPassword() diff --git a/pyload/plugins/internal/XFSPHoster.py b/pyload/plugins/internal/XFSPHoster.py new file mode 100644 index 000000000..c3f8b6f41 --- /dev/null +++ b/pyload/plugins/internal/XFSPHoster.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- + +import re + +from module.plugins.internal.XFSHoster import XFSHoster, create_getInfo + + +class XFileSharingPro(XFSHoster): + __name__ = "XFileSharingPro" + __type__ = "hoster" + __version__ = "0.42" + + __pattern__ = r'^unmatchable$' + + __description__ = """XFileSharingPro dummy hoster plugin for hook""" + __license__ = "GPLv3" + __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] + + + def _log(self, type, args): + msg = " | ".join([str(a).strip() for a in args if a]) + logger = getattr(self.log, type) + logger("%s: %s: %s" % (self.__name__, self.HOSTER_NAME, msg or _("%s MARK" % type.upper()))) + + + def init(self): + super(XFileSharingPro, self).init() + + self.__pattern__ = self.core.pluginManager.hosterPlugins[self.__name__]['pattern'] + + self.HOSTER_DOMAIN = re.match(self.__pattern__, self.pyfile.url).group(1).lower() + self.HOSTER_NAME = "".join([str.capitalize() for str in self.HOSTER_DOMAIN.split('.')]) + + account = self.core.accountManager.getAccountPlugin(self.HOSTER_NAME) + + if account and account.canUse(): + self.account = account + elif self.account: + self.account.HOSTER_DOMAIN = self.HOSTER_DOMAIN + else: + return + + self.user, data = self.account.selectAccount() + self.req = self.account.getAccountRequest(self.user) + self.premium = self.account.isPremium(self.user) + + + def setup(self): + self.chunkLimit = 1 + self.resumeDownload = self.premium + self.multiDL = True + + +getInfo = create_getInfo(XFileSharingPro) diff --git a/pyload/plugins/internal/__init__.py b/pyload/plugins/internal/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/pyload/plugins/internal/__init__.py |