summaryrefslogtreecommitdiffstats
path: root/pyload/plugins/internal
diff options
context:
space:
mode:
Diffstat (limited to 'pyload/plugins/internal')
-rw-r--r--pyload/plugins/internal/CaptchaService.py16
-rw-r--r--pyload/plugins/internal/DeadCrypter.py7
-rw-r--r--pyload/plugins/internal/DeadHoster.py9
-rw-r--r--pyload/plugins/internal/MultiHoster.py16
-rw-r--r--pyload/plugins/internal/SimpleCrypter.py48
-rw-r--r--pyload/plugins/internal/SimpleHoster.py71
-rw-r--r--pyload/plugins/internal/UnRar.py31
7 files changed, 122 insertions, 76 deletions
diff --git a/pyload/plugins/internal/CaptchaService.py b/pyload/plugins/internal/CaptchaService.py
index b247ba654..26482379d 100644
--- a/pyload/plugins/internal/CaptchaService.py
+++ b/pyload/plugins/internal/CaptchaService.py
@@ -7,7 +7,7 @@ from random import random
class CaptchaService:
__name__ = "CaptchaService"
- __version__ = "0.05"
+ __version__ = "0.06"
__description__ = """Captcha service plugin"""
__author_name__ = "pyLoad Team"
@@ -28,6 +28,7 @@ class ReCaptcha:
def __init__(self, plugin):
self.plugin = plugin
+
def detect_key(self, html):
m = re.search(self.RECAPTCHA_KEY_PATTERN, html)
if m is None:
@@ -38,11 +39,13 @@ class ReCaptcha:
else:
return None
+
def challenge(self, key=None):
- if key is None and self.recaptcha_key:
- key = self.recaptcha_key
- else:
- raise TypeError("ReCaptcha key not found")
+ if not key:
+ if self.recaptcha_key:
+ key = self.recaptcha_key
+ else:
+ raise TypeError("ReCaptcha key not found")
js = self.plugin.req.load("http://www.google.com/recaptcha/api/challenge", get={"k": key}, cookies=True)
@@ -55,6 +58,7 @@ class ReCaptcha:
return challenge, result
+
def result(self, server, challenge):
return self.plugin.decryptCaptcha("%simage" % server, get={"c": challenge},
cookies=True, forceUser=True, imgtype="jpg")
@@ -74,6 +78,7 @@ class AdsCaptcha(CaptchaService):
return challenge, result
+
def result(self, server, challenge):
return self.plugin.decryptCaptcha("%sChallenge.aspx" % server, get={"cid": challenge, "dummy": random()},
cookies=True, imgtype="jpg")
@@ -92,5 +97,6 @@ class SolveMedia(CaptchaService):
return challenge, result
+
def result(self, challenge):
return self.plugin.decryptCaptcha("http://api.solvemedia.com/papi/media?c=%s" % challenge, imgtype="gif")
diff --git a/pyload/plugins/internal/DeadCrypter.py b/pyload/plugins/internal/DeadCrypter.py
index ea9c414cb..1c484274b 100644
--- a/pyload/plugins/internal/DeadCrypter.py
+++ b/pyload/plugins/internal/DeadCrypter.py
@@ -6,14 +6,15 @@ from pyload.plugins.Crypter import Crypter as _Crypter
class DeadCrypter(_Crypter):
__name__ = "DeadCrypter"
__type__ = "crypter"
- __version__ = "0.01"
+ __version__ = "0.02"
__pattern__ = None
- __description__ = """Crypter is no longer available"""
+ __description__ = """ Crypter is no longer available """
__author_name__ = "stickell"
__author_mail__ = "l.stickell@yahoo.it"
def setup(self):
- self.fail("Crypter is no longer available")
+ self.pyfile.error = "Crypter is no longer available"
+ self.offline() #@TODO: self.offline("Crypter is no longer available")
diff --git a/pyload/plugins/internal/DeadHoster.py b/pyload/plugins/internal/DeadHoster.py
index 0b2398020..fc7e1a6ad 100644
--- a/pyload/plugins/internal/DeadHoster.py
+++ b/pyload/plugins/internal/DeadHoster.py
@@ -6,7 +6,7 @@ from pyload.plugins.Hoster import Hoster as _Hoster
def create_getInfo(plugin):
def getInfo(urls):
- yield [('#N/A: ' + url, 0, 1, url) for url in urls]
+ yield map(lambda url: ('#N/A: ' + url, 0, 1, url), urls)
return getInfo
@@ -14,14 +14,15 @@ def create_getInfo(plugin):
class DeadHoster(_Hoster):
__name__ = "DeadHoster"
__type__ = "hoster"
- __version__ = "0.11"
+ __version__ = "0.12"
__pattern__ = None
- __description__ = """Hoster is no longer available"""
+ __description__ = """ Hoster is no longer available """
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
def setup(self):
- self.fail("Hoster is no longer available")
+ self.pyfile.error = "Hoster is no longer available"
+ self.offline() #@TODO: self.offline("Hoster is no longer available")
diff --git a/pyload/plugins/internal/MultiHoster.py b/pyload/plugins/internal/MultiHoster.py
index d99ae6ff9..fdaccdd5b 100644
--- a/pyload/plugins/internal/MultiHoster.py
+++ b/pyload/plugins/internal/MultiHoster.py
@@ -41,7 +41,7 @@ class MultiHoster(Hook):
try:
hosterSet = self.toHosterSet(self.getHoster()) - set(self.ignored)
except Exception, e:
- self.logError("%s" % str(e))
+ self.logError(e)
return []
try:
@@ -55,7 +55,7 @@ class MultiHoster(Hook):
hosterSet -= configSet
except Exception, e:
- self.logError("%s" % str(e))
+ self.logError(e)
self.hosters = list(hosterSet)
@@ -99,7 +99,7 @@ class MultiHoster(Hook):
def periodical(self):
"""reload hoster list periodically"""
- self.logInfo("Reloading supported hoster list")
+ self.logInfo(_("Reloading supported hoster list"))
old_supported = self.supported
self.supported, self.new_supported, self.hosters = [], [], []
@@ -108,7 +108,7 @@ class MultiHoster(Hook):
old_supported = [hoster for hoster in old_supported if hoster not in self.supported]
if old_supported:
- self.logDebug("UNLOAD: %s" % ", ".join(old_supported))
+ self.logDebug("UNLOAD", ", ".join(old_supported))
for hoster in old_supported:
self.unloadHoster(hoster)
@@ -139,24 +139,24 @@ class MultiHoster(Hook):
klass = getattr(module, self.__name__)
# inject plugin plugin
- self.logDebug("Overwritten Hosters: %s" % ", ".join(sorted(self.supported)))
+ self.logDebug("Overwritten Hosters", ", ".join(sorted(self.supported)))
for hoster in self.supported:
dict = self.core.pluginManager.hosterPlugins[hoster]
dict['new_module'] = module
dict['new_name'] = self.__name__
if excludedList:
- self.logInfo("The following hosters were not overwritten - account exists: %s" % ", ".join(sorted(excludedList)))
+ self.logInfo(_("The following hosters were not overwritten - account exists"), ", ".join(sorted(excludedList)))
if self.new_supported:
- self.logDebug("New Hosters: %s" % ", ".join(sorted(self.new_supported)))
+ self.logDebug("New Hosters", ", ".join(sorted(self.new_supported)))
# create new regexp
regexp = r".*(%s).*" % "|".join([x.replace(".", "\\.") for x in self.new_supported])
if hasattr(klass, "__pattern__") and isinstance(klass.__pattern__, basestring) and '://' in klass.__pattern__:
regexp = r"%s|%s" % (klass.__pattern__, regexp)
- self.logDebug("Regexp: %s" % regexp)
+ self.logDebug("Regexp", regexp)
dict = self.core.pluginManager.hosterPlugins[self.__name__]
dict['pattern'] = regexp
diff --git a/pyload/plugins/internal/SimpleCrypter.py b/pyload/plugins/internal/SimpleCrypter.py
index 6e639c946..d9982007d 100644
--- a/pyload/plugins/internal/SimpleCrypter.py
+++ b/pyload/plugins/internal/SimpleCrypter.py
@@ -10,7 +10,7 @@ from pyload.utils import html_unescape
class SimpleCrypter(Crypter):
__name__ = "SimpleCrypter"
__type__ = "crypter"
- __version__ = "0.10"
+ __version__ = "0.12"
__pattern__ = None
@@ -24,7 +24,7 @@ class SimpleCrypter(Crypter):
LINK_PATTERN: group(1) must be a download link or a regex to catch more links
example: LINK_PATTERN = r'<div class="link"><a href="(http://speedload.org/\w+)'
- TITLE_PATTERN: (optional) The group defined by 'title' should be the title
+ TITLE_PATTERN: (optional) The group defined by 'title' should be the folder name or the webpage title
example: TITLE_PATTERN = r'<title>Files of: (?P<title>[^<]+) folder</title>'
OFFLINE_PATTERN: (optional) Checks if the file is yet available online
@@ -34,32 +34,47 @@ class SimpleCrypter(Crypter):
example: TEMP_OFFLINE_PATTERN = r'Server maintainance'
- If it's impossible to extract the links using the LINK_PATTERN only you can override the getLinks method.
+ You can override the getLinks method if you need a more sophisticated way to extract the links.
- If the links are disposed on multiple pages you need to define a pattern:
- PAGES_PATTERN: The group defined by 'pages' must be the total number of pages
+ If the links are splitted on multiple pages you can define the PAGES_PATTERN regex:
+
+ PAGES_PATTERN: (optional) The group defined by 'pages' should be the number of overall pages containing the links
example: PAGES_PATTERN = r'Pages: (?P<pages>\d+)'
- and a function:
+ and its loadPage method:
- loadPage(self, page_n):
- return the html of the page number 'page_n'
+ def loadPage(self, page_n):
+ return the html of the page number page_n
"""
+
URL_REPLACEMENTS = []
- SH_COOKIES = True # or False or list of tuples [(domain, name, value)]
+ TEXT_ENCODING = False #: Set to True or encoding name if encoding in http header is not correct
+ COOKIES = True #: or False or list of tuples [(domain, name, value)]
+
+ LOGIN_ACCOUNT = False
+ LOGIN_PREMIUM = False
+
+
+ def prepare(self):
+ if self.LOGIN_ACCOUNT and not self.account:
+ self.fail('Required account not found!')
+
+ if self.LOGIN_PREMIUM and not self.premium:
+ self.fail('Required premium account not found!')
+ if isinstance(self.COOKIES, list):
+ set_cookies(self.req.cj, self.COOKIES)
- def setup(self):
- if isinstance(self.SH_COOKIES, list):
- set_cookies(self.req.cj, self.SH_COOKIES)
def decrypt(self, pyfile):
+ self.prepare()
+
pyfile.url = replace_patterns(pyfile.url, self.URL_REPLACEMENTS)
- self.html = self.load(pyfile.url, decode=True)
+ self.html = self.load(pyfile.url, decode=not self.TEXT_ENCODING)
self.checkOnline()
@@ -70,13 +85,14 @@ class SimpleCrypter(Crypter):
if hasattr(self, 'PAGES_PATTERN') and hasattr(self, 'loadPage'):
self.handleMultiPages()
- self.logDebug('Package has %d links' % len(self.package_links))
+ self.logDebug("Package has %d links" % len(self.package_links))
if self.package_links:
self.packages = [(package_name, self.package_links, folder_name)]
else:
self.fail('Could not extract any links')
+
def getLinks(self):
"""
Returns the links extracted from self.html
@@ -84,12 +100,14 @@ class SimpleCrypter(Crypter):
"""
return re.findall(self.LINK_PATTERN, self.html)
+
def checkOnline(self):
if hasattr(self, "OFFLINE_PATTERN") and re.search(self.OFFLINE_PATTERN, self.html):
self.offline()
elif hasattr(self, "TEMP_OFFLINE_PATTERN") and re.search(self.TEMP_OFFLINE_PATTERN, self.html):
self.tempOffline()
+
def getPackageNameAndFolder(self):
if hasattr(self, 'TITLE_PATTERN'):
m = re.search(self.TITLE_PATTERN, self.html)
@@ -103,6 +121,7 @@ class SimpleCrypter(Crypter):
self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
return name, folder
+
def handleMultiPages(self):
pages = re.search(self.PAGES_PATTERN, self.html)
if pages:
@@ -114,5 +133,6 @@ class SimpleCrypter(Crypter):
self.html = self.loadPage(p)
self.package_links += self.getLinks()
+
def parseError(self, msg):
raise PluginParseError(msg)
diff --git a/pyload/plugins/internal/SimpleHoster.py b/pyload/plugins/internal/SimpleHoster.py
index ca320732f..75c6fc8e8 100644
--- a/pyload/plugins/internal/SimpleHoster.py
+++ b/pyload/plugins/internal/SimpleHoster.py
@@ -15,7 +15,6 @@ def replace_patterns(string, ruleslist):
for r in ruleslist:
rf, rt = r
string = re.sub(rf, rt, string)
- #self.logDebug(rf, rt, string)
return string
@@ -78,8 +77,8 @@ def parseFileInfo(self, url='', html=''):
else:
if not html and hasattr(self, "html"):
html = self.html
- if isinstance(self.SH_BROKEN_ENCODING, (str, unicode)):
- html = unicode(html, self.SH_BROKEN_ENCODING)
+ if isinstance(self.TEXT_ENCODING, basestring):
+ html = unicode(html, self.TEXT_ENCODING)
if hasattr(self, "html"):
self.html = html
@@ -112,7 +111,7 @@ def parseFileInfo(self, url='', html=''):
size = replace_patterns(info['S'] + info['U'] if 'U' in info else info['S'],
self.FILE_SIZE_REPLACEMENTS)
info['size'] = parseFileSize(size)
- elif isinstance(info['size'], (str, unicode)):
+ elif isinstance(info['size'], basestring):
if 'units' in info:
info['size'] += info['units']
info['size'] = parseFileSize(info['size'])
@@ -128,10 +127,10 @@ def create_getInfo(plugin):
def getInfo(urls):
for url in urls:
cj = CookieJar(plugin.__name__)
- if isinstance(plugin.SH_COOKIES, list):
- set_cookies(cj, plugin.SH_COOKIES)
+ if isinstance(plugin.COOKIES, list):
+ set_cookies(cj, plugin.COOKIES)
file_info = parseFileInfo(plugin, url, getURL(replace_patterns(url, plugin.FILE_URL_REPLACEMENTS),
- decode=not plugin.SH_BROKEN_ENCODING, cookies=cj))
+ decode=not plugin.TEXT_ENCODING, cookies=cj))
yield file_info
return getInfo
@@ -154,13 +153,13 @@ class PluginParseError(Exception):
class SimpleHoster(Hoster):
__name__ = "SimpleHoster"
__type__ = "hoster"
- __version__ = "0.35"
+ __version__ = "0.36"
__pattern__ = None
__description__ = """Simple hoster plugin"""
- __author_name__ = ("zoidberg", "stickell")
- __author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it")
+ __author_name__ = ("zoidberg", "stickell", "Walter Purcaro")
+ __author_mail__ = ("zoidberg@mujmail.cz", "l.stickell@yahoo.it", "vuolter@gmail.com")
"""
Following patterns should be defined by each hoster:
@@ -187,46 +186,49 @@ class SimpleHoster(Hoster):
FILE_SIZE_REPLACEMENTS = []
FILE_URL_REPLACEMENTS = []
- SH_BROKEN_ENCODING = False # Set to True or encoding name if encoding in http header is not correct
- SH_COOKIES = True # or False or list of tuples [(domain, name, value)]
- SH_CHECK_TRAFFIC = False # True = force check traffic left for a premium account
+ TEXT_ENCODING = False #: Set to True or encoding name if encoding in http header is not correct
+ COOKIES = True #: or False or list of tuples [(domain, name, value)]
+ FORCE_CHECK_TRAFFIC = False #: Set to True to force checking traffic left for premium account
def init(self):
self.file_info = {}
+
def setup(self):
self.resumeDownload = self.multiDL = self.premium
- if isinstance(self.SH_COOKIES, list):
- set_cookies(self.req.cj, self.SH_COOKIES)
+
+
+ def prepare(self):
+ if isinstance(self.COOKIES, list):
+ set_cookies(self.req.cj, self.COOKIES)
+ self.req.setOption("timeout", 120)
+
def process(self, pyfile):
+ self.prepare()
+
pyfile.url = replace_patterns(pyfile.url, self.FILE_URL_REPLACEMENTS)
- self.req.setOption("timeout", 120)
+
# Due to a 0.4.9 core bug self.load would keep previous cookies even if overridden by cookies parameter.
- # Workaround using getURL. Can be reverted in 0.5 as the cookies bug has been fixed.
- self.html = getURL(pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES)
+ # Workaround using getURL. Can be reverted in 0.4.10 as the cookies bug has been fixed.
+ self.html = getURL(pyfile.url, decode=not self.TEXT_ENCODING, cookies=self.COOKIES)
premium_only = hasattr(self, 'PREMIUM_ONLY_PATTERN') and re.search(self.PREMIUM_ONLY_PATTERN, self.html)
if not premium_only: # Usually premium only pages doesn't show the file information
self.getFileInfo()
- if self.premium and (not self.SH_CHECK_TRAFFIC or self.checkTrafficLeft()):
+ if self.premium and (not self.FORCE_CHECK_TRAFFIC or self.checkTrafficLeft()):
self.handlePremium()
elif premium_only:
self.fail("This link require a premium account")
else:
- # This line is required due to the getURL workaround. Can be removed in 0.5
- self.html = self.load(pyfile.url, decode=not self.SH_BROKEN_ENCODING, cookies=self.SH_COOKIES)
+ # This line is required due to the getURL workaround. Can be removed in 0.4.10
+ self.html = self.load(pyfile.url, decode=not self.TEXT_ENCODING)
self.handleFree()
- def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=False):
- if type(url) == unicode:
- url = url.encode('utf8')
- return Hoster.load(self, url=url, get=get, post=post, ref=ref, cookies=cookies,
- just_header=just_header, decode=decode)
def getFileInfo(self):
- self.logDebug("URL: %s" % self.pyfile.url)
+ self.logDebug("URL", self.pyfile.url)
name, size, status = parseFileInfo(self)[:3]
@@ -246,20 +248,24 @@ class SimpleHoster(Hoster):
if size:
self.pyfile.size = size
else:
- self.logError("File size not parsed")
+ self.logError(_("File size not parsed"))
self.logDebug("FILE NAME: %s FILE SIZE: %s" % (self.pyfile.name, self.pyfile.size))
return self.file_info
+
def handleFree(self):
self.fail("Free download not implemented")
+
def handlePremium(self):
self.fail("Premium download not implemented")
+
def parseError(self, msg):
raise PluginParseError(msg)
+
def longWait(self, wait_time=None, max_tries=3):
if wait_time and isinstance(wait_time, (int, long, float)):
time_str = "%dh %dm" % divmod(wait_time / 60, 60)
@@ -268,24 +274,27 @@ class SimpleHoster(Hoster):
time_str = "(unknown time)"
max_tries = 100
- self.logInfo("Download limit reached, reconnect or wait %s" % time_str)
+ self.logInfo(_("Download limit reached, reconnect or wait %s") % time_str)
self.setWait(wait_time, True)
self.wait()
self.retry(max_tries=max_tries, reason="Download limit reached")
+
def parseHtmlForm(self, attr_str='', input_names=None):
return parseHtmlForm(attr_str, self.html, input_names)
+
def checkTrafficLeft(self):
traffic = self.account.getAccountInfo(self.user, True)['trafficleft']
if traffic == -1:
return True
size = self.pyfile.size / 1024
- self.logInfo("Filesize: %i KiB, Traffic left for user %s: %i KiB" % (size, self.user, traffic))
+ self.logInfo(_("Filesize: %i KiB, Traffic left for user %s: %i KiB") % (size, self.user, traffic))
return size <= traffic
- # TODO: Remove in 0.5
+
+ #@TODO: Remove in 0.4.10
def wait(self, seconds=False, reconnect=False):
if seconds:
self.setWait(seconds, reconnect)
diff --git a/pyload/plugins/internal/UnRar.py b/pyload/plugins/internal/UnRar.py
index ed8478a3a..0f54e75b9 100644
--- a/pyload/plugins/internal/UnRar.py
+++ b/pyload/plugins/internal/UnRar.py
@@ -4,7 +4,7 @@ import os
import re
from glob import glob
-from os.path import join
+from os.path import basename, join
from string import digits
from subprocess import Popen, PIPE
@@ -12,14 +12,23 @@ from pyload.plugins.internal.AbstractExtractor import AbtractExtractor, WrongPas
from pyload.utils import safe_join, decode
+def renice(pid, value):
+ if os.name != "nt" and value:
+ try:
+ Popen(["renice", str(value), str(pid)], stdout=PIPE, stderr=PIPE, bufsize=-1)
+ except:
+ print "Renice failed"
+
+
class UnRar(AbtractExtractor):
__name__ = "UnRar"
- __version__ = "0.16"
+ __version__ = "0.18"
__description__ = """Rar extractor plugin"""
__author_name__ = "RaNaN"
__author_mail__ = "RaNaN@pyload.org"
+
CMD = "unrar"
# there are some more uncovered rar formats
@@ -50,6 +59,7 @@ class UnRar(AbtractExtractor):
return True
+
@staticmethod
def getTargets(files_ids):
result = []
@@ -68,12 +78,14 @@ class UnRar(AbtractExtractor):
return result
+
def init(self):
self.passwordProtected = False
self.headerProtected = False #: list files will not work without password
self.smallestFile = None #: small file to test passwords
self.password = "" #: save the correct password
+
def checkArchive(self):
p = self.call_unrar("l", "-v", self.file)
out, err = p.communicate()
@@ -100,6 +112,7 @@ class UnRar(AbtractExtractor):
return False
+
def checkPassword(self, password):
# at this point we can only verify header protected files
if self.headerProtected:
@@ -110,6 +123,7 @@ class UnRar(AbtractExtractor):
return True
+
def extract(self, progress, password=None):
command = "x" if self.fullpath else "e"
@@ -151,13 +165,15 @@ class UnRar(AbtractExtractor):
self.password = password
self.listContent()
+
def getDeleteFiles(self):
- if ".part" in self.file:
+ if ".part" in basename(self.file):
return glob(re.sub("(?<=\.part)([01]+)", "*", self.file, re.IGNORECASE))
# get files which matches .r* and filter unsuited files out
parts = glob(re.sub(r"(?<=\.r)ar$", "*", self.file, re.IGNORECASE))
return filter(lambda x: self.re_partfiles.match(x), parts)
+
def listContent(self):
command = "vb" if self.fullpath else "lb"
p = self.call_unrar(command, "-v", self.file, password=self.password)
@@ -177,6 +193,7 @@ class UnRar(AbtractExtractor):
self.files = result
+
def call_unrar(self, command, *xargs, **kwargs):
args = []
# overwrite flag
@@ -202,11 +219,3 @@ class UnRar(AbtractExtractor):
p = Popen(call, stdout=PIPE, stderr=PIPE)
return p
-
-
-def renice(pid, value):
- if os.name != "nt" and value:
- try:
- Popen(["renice", str(value), str(pid)], stdout=PIPE, stderr=PIPE, bufsize=-1)
- except:
- print "Renice failed"