summaryrefslogtreecommitdiffstats
path: root/module
diff options
context:
space:
mode:
authorGravatar Walter Purcaro <vuolter@users.noreply.github.com> 2015-10-08 12:24:34 +0200
committerGravatar Walter Purcaro <vuolter@users.noreply.github.com> 2015-10-08 12:24:34 +0200
commitc59aa4057608cd47084c66e41f363b5f981f2816 (patch)
tree73d787e55826537710ab526f583c46b0623c6c85 /module
parentSpare improvements (diff)
downloadpyload-c59aa4057608cd47084c66e41f363b5f981f2816.tar.xz
Fixpack (5)
Diffstat (limited to 'module')
-rw-r--r--module/plugins/hooks/LogMarker.py4
-rw-r--r--module/plugins/hooks/UpdateManager.py4
-rw-r--r--module/plugins/hoster/DailymotionCom.py4
-rw-r--r--module/plugins/hoster/FilefactoryCom.py4
-rw-r--r--module/plugins/hoster/FilerNet.py20
-rw-r--r--module/plugins/hoster/GigapetaCom.py22
-rw-r--r--module/plugins/hoster/QuickshareCz.py13
-rw-r--r--module/plugins/hoster/StreamCz.py2
-rw-r--r--module/plugins/hoster/UnibytesCom.py10
-rw-r--r--module/plugins/hoster/UploadedTo.py2
-rw-r--r--module/plugins/internal/Base.py52
-rw-r--r--module/plugins/internal/Plugin.py41
-rw-r--r--module/plugins/internal/SimpleHoster.py53
-rw-r--r--module/plugins/internal/XFSHoster.py7
14 files changed, 119 insertions, 119 deletions
diff --git a/module/plugins/hooks/LogMarker.py b/module/plugins/hooks/LogMarker.py
index e59e94598..0efab602f 100644
--- a/module/plugins/hooks/LogMarker.py
+++ b/module/plugins/hooks/LogMarker.py
@@ -9,7 +9,7 @@ from module.plugins.internal.Plugin import seconds_to_nexthour
class LogMarker(Addon):
__name__ = "LogMarker"
__type__ = "hook"
- __version__ = "0.02"
+ __version__ = "0.03"
__status__ = "testing"
__config__ = [("activated", "bool", "Activated" , False),
@@ -21,7 +21,7 @@ class LogMarker(Addon):
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
- def init(self):
+ def activated(self):
self.start_periodical(1 * 60 * 60 - 1, delay=seconds_to_nexthour(strict=True) - 1)
diff --git a/module/plugins/hooks/UpdateManager.py b/module/plugins/hooks/UpdateManager.py
index 872d4eb40..96b563ddf 100644
--- a/module/plugins/hooks/UpdateManager.py
+++ b/module/plugins/hooks/UpdateManager.py
@@ -212,8 +212,8 @@ class UpdateManager(Addon):
break
for t, n in self.remove_plugins(sorted(type_plugins)):
- self.log_info(_("Removed blacklisted plugin: [%(type)s] %(name)s") % {
- 'type': t,
+ self.log_info(_("Removed blacklisted plugin: %(type)s %(name)s") % {
+ 'type': t.upper(),
'name': n,
})
diff --git a/module/plugins/hoster/DailymotionCom.py b/module/plugins/hoster/DailymotionCom.py
index 63003dc06..73e119c8c 100644
--- a/module/plugins/hoster/DailymotionCom.py
+++ b/module/plugins/hoster/DailymotionCom.py
@@ -23,12 +23,16 @@ def get_info(urls):
if "error" in info or info['access_error']:
status = "offline"
+
else:
status = info['status']
+
if status in ("ready", "published"):
status = "online"
+
elif status in ("waiting", "processing"):
status = "temp. offline"
+
else:
status = "offline"
diff --git a/module/plugins/hoster/FilefactoryCom.py b/module/plugins/hoster/FilefactoryCom.py
index 637f3b2e0..b134abf30 100644
--- a/module/plugins/hoster/FilefactoryCom.py
+++ b/module/plugins/hoster/FilefactoryCom.py
@@ -10,9 +10,11 @@ def get_info(urls):
for url in urls:
h = get_url(url, just_header=True)
m = re.search(r'Location: (.+)\r\n', h)
+
if m and not re.match(m.group(1), FilefactoryCom.__pattern__): #: It's a direct link! Skipping
yield (url, 0, 3, url)
- else: #: It's a standard html page
+ else:
+ #: It's a standard html page
yield parse_fileInfo(FilefactoryCom, url, get_url(url))
diff --git a/module/plugins/hoster/FilerNet.py b/module/plugins/hoster/FilerNet.py
index 37c88dec7..db998f06d 100644
--- a/module/plugins/hoster/FilerNet.py
+++ b/module/plugins/hoster/FilerNet.py
@@ -48,19 +48,13 @@ class FilerNet(SimpleHoster):
recaptcha = ReCaptcha(self)
response, challenge = recaptcha.challenge()
- #@NOTE: Work-around for v0.4.9 just_header issue
- #@TODO: Check for v0.4.10
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
- self.load(pyfile.url, post={'recaptcha_challenge_field': challenge,
- 'recaptcha_response_field' : response,
- 'hash' : inputs['hash']})
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
-
- if 'location' in self.req.http.header.lower():
- self.captcha.correct()
- self.link = re.search(r'location: (\S+)', self.req.http.header, re.I).group(1)
- else:
- self.retry_captcha()
+ header = self.load(pyfile.url,
+ post={'recaptcha_challenge_field': challenge,
+ 'recaptcha_response_field' : response,
+ 'hash' : inputs['hash']},
+ just_header=True)
+
+ self.link = header.get('location')
getInfo = create_getInfo(FilerNet)
diff --git a/module/plugins/hoster/GigapetaCom.py b/module/plugins/hoster/GigapetaCom.py
index 85e5e4843..da2f82f8f 100644
--- a/module/plugins/hoster/GigapetaCom.py
+++ b/module/plugins/hoster/GigapetaCom.py
@@ -34,26 +34,16 @@ class GigapetaCom(SimpleHoster):
captcha_key = str(random.randint(1, 100000000))
captcha_url = "http://gigapeta.com/img/captcha.gif?x=%s" % captcha_key
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
-
self.check_errors()
captcha = self.captcha.decrypt(captcha_url)
- self.html = self.load(pyfile.url, post={
- 'captcha_key': captcha_key,
- 'captcha': captcha,
- 'download': "Download"})
-
- m = re.search(r'Location\s*:\s*(.+)', self.req.http.header, re.I)
- if m is not None:
- self.captcha.correct()
- self.link = m.group(1)
-
- elif "Entered figures don&#96;t coincide with the picture" in self.html:
- self.retry_captcha()
-
+ header = self.load(pyfile.url,
+ post={'captcha_key': captcha_key,
+ 'captcha' : captcha,
+ 'download' : "Download"},
+ just_header=True)
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
+ self.link = header.get('location')
getInfo = create_getInfo(GigapetaCom)
diff --git a/module/plugins/hoster/QuickshareCz.py b/module/plugins/hoster/QuickshareCz.py
index 62240667c..1bbc05d87 100644
--- a/module/plugins/hoster/QuickshareCz.py
+++ b/module/plugins/hoster/QuickshareCz.py
@@ -60,14 +60,11 @@ class QuickshareCz(SimpleHoster):
data = dict((x, self.jsvars[x]) for x in self.jsvars if x in ("ID1", "ID2", "ID3", "ID4"))
self.log_debug("FREE URL1:" + download_url, data)
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
- self.load(download_url, post=data)
- self.header = self.req.http.header
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
-
- m = re.search(r'Location\s*:\s*(.+)', self.header, re.I)
- if m is None:
- self.fail(_("File not found"))
+ header = self.load(download_url, post=data, just_header=True)
+
+ self.link = header.get('location')
+ if not self.link:
+ elf.fail(_("File not found"))
self.link = m.group(1)
self.log_debug("FREE URL2:" + self.link)
diff --git a/module/plugins/hoster/StreamCz.py b/module/plugins/hoster/StreamCz.py
index a5578fd96..632a2933e 100644
--- a/module/plugins/hoster/StreamCz.py
+++ b/module/plugins/hoster/StreamCz.py
@@ -10,13 +10,13 @@ def get_info(urls):
result = []
for url in urls:
-
html = get_url(url)
if re.search(StreamCz.OFFLINE_PATTERN, html):
#: File offline
result.append((url, 0, 1, url))
else:
result.append((url, 0, 2, url))
+
yield result
diff --git a/module/plugins/hoster/UnibytesCom.py b/module/plugins/hoster/UnibytesCom.py
index c0bb6a13b..f06bd4421 100644
--- a/module/plugins/hoster/UnibytesCom.py
+++ b/module/plugins/hoster/UnibytesCom.py
@@ -30,14 +30,14 @@ class UnibytesCom(SimpleHoster):
def handle_free(self, pyfile):
- domain = "http://www.%s/" % self.PLUGIN_DOMAIN
+ domain = "http://www.%s/" % self.PLUGIN_DOMAIN
action, post_data = self.parse_html_form('id="startForm"')
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
-
for _i in xrange(3):
self.log_debug(action, post_data)
- self.html = self.load(urlparse.urljoin(domain, action), post=post_data)
+ self.html = self.load(urlparse.urljoin(domain, action),
+ post=post_data,
+ redirect=False)
m = re.search(r'location:\s*(\S+)', self.req.http.header, re.I)
if m is not None:
@@ -67,7 +67,5 @@ class UnibytesCom(SimpleHoster):
elif last_step in ("captcha", "last"):
post_data['captcha'] = self.captcha.decrypt(urlparse.urljoin(domain, "captcha.jpg"))
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
-
getInfo = create_getInfo(UnibytesCom)
diff --git a/module/plugins/hoster/UploadedTo.py b/module/plugins/hoster/UploadedTo.py
index a3b9fbc2f..265fefe8a 100644
--- a/module/plugins/hoster/UploadedTo.py
+++ b/module/plugins/hoster/UploadedTo.py
@@ -46,7 +46,7 @@ class UploadedTo(SimpleHoster):
for _i in xrange(5):
html = get_url("http://uploaded.net/api/filemultiple",
get={'apikey': cls.API_KEY,
- 'id_0': re.match(cls.__pattern__, url).group('ID')})
+ 'id_0' : re.match(cls.__pattern__, url).group('ID')})
if html != "can't find request":
api = html.split(",", 4)
diff --git a/module/plugins/internal/Base.py b/module/plugins/internal/Base.py
index 46502a9d3..64d00b48a 100644
--- a/module/plugins/internal/Base.py
+++ b/module/plugins/internal/Base.py
@@ -28,12 +28,6 @@ def parse_fileInfo(klass, url="", html=""):
def create_getInfo(klass):
def get_info(urls):
for url in urls:
- try:
- url = replace_patterns(url, klass.URL_REPLACEMENTS)
-
- except Exception:
- pass
-
yield parse_fileInfo(klass, url)
return get_info
@@ -52,7 +46,7 @@ def check_abort(fn):
class Base(Plugin):
__name__ = "Base"
__type__ = "base"
- __version__ = "0.05"
+ __version__ = "0.06"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
@@ -63,6 +57,9 @@ class Base(Plugin):
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+ URL_REPLACEMENTS = None
+
+
def __init__(self, pyfile):
self._init(pyfile.m.core)
@@ -110,11 +107,18 @@ class Base(Plugin):
@classmethod
def get_info(cls, url="", html=""):
- url = fixurl(url, unquote=True)
- info = {'name' : parse_name(url),
- 'size' : 0,
- 'status': 3 if url else 8,
- 'url' : url}
+ url = fixurl(url, unquote=True)
+
+ info = {'name' : parse_name(url),
+ 'pattern': {},
+ 'size' : 0,
+ 'status' : 3 if url else 8,
+ 'url' : replace_patterns(url, cls.URL_REPLACEMENTS)}
+
+ try:
+ info['pattern'] = re.match(cls.__pattern__, url).groupdict()
+ except Exception:
+ pass
return info
@@ -363,7 +367,13 @@ class Base(Plugin):
:param wait: time to wait in seconds before retry
:param msg: message passed to fail if attemps value was reached
"""
- id = inspect.currentframe().f_back.f_lineno
+ frame = inspect.currentframe()
+
+ try:
+ id = frame.f_back.f_lineno
+ finally:
+ del frame
+
if id not in self.retries:
self.retries[id] = 0
@@ -420,19 +430,19 @@ class Base(Plugin):
self.abort()
- def direct_link(self, url, follow_location=None):
+ def direct_link(self, url, redirect=False):
link = ""
- if follow_location is None:
- redirect = 1
+ if not redirect:
+ conn = 1
- elif type(follow_location) is int:
- redirect = max(follow_location, 1)
+ elif type(redirect) is int:
+ conn = max(redirect, 1)
else:
- redirect = self.get_config("maxredirs", 10, "UserAgentSwitcher")
+ conn = self.get_config("maxredirs", 5, plugin="UserAgentSwitcher")
- for i in xrange(redirect):
+ for i in xrange(conn):
try:
self.log_debug("Redirect #%d to: %s" % (i, url))
header = self.load(url, just_header=True)
@@ -469,7 +479,7 @@ class Base(Plugin):
if header.get('code') == 302:
link = location
- if follow_location:
+ if redirect:
url = location
continue
diff --git a/module/plugins/internal/Plugin.py b/module/plugins/internal/Plugin.py
index 7bfdd588c..d6eac7e58 100644
--- a/module/plugins/internal/Plugin.py
+++ b/module/plugins/internal/Plugin.py
@@ -11,6 +11,8 @@ import traceback
import urllib
import urlparse
+import pycurl
+
if os.name is not "nt":
import grp
import pwd
@@ -232,7 +234,7 @@ def chunks(iterable, size):
class Plugin(object):
__name__ = "Plugin"
__type__ = "plugin"
- __version__ = "0.51"
+ __version__ = "0.52"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
@@ -280,35 +282,35 @@ class Plugin(object):
self._log("debug", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def log_info(self, *args, **kwargs):
self._log("info", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def log_warning(self, *args, **kwargs):
self._log("warning", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def log_error(self, *args, **kwargs):
self._log("error", self.__type__, self.__name__, args)
if kwargs.get('trace'):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def log_critical(self, *args, **kwargs):
self._log("critical", self.__type__, self.__name__, args)
if kwargs.get('trace', True):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def set_permissions(self, path):
@@ -396,7 +398,8 @@ class Plugin(object):
raise Fail(encode(msg)) #@TODO: Remove `encode` in 0.4.10
- def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=True, multipart=False, req=None):
+ def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=True,
+ multipart=False, redirect=True, req=None):
"""
Load content at url and returns it
@@ -422,9 +425,24 @@ class Plugin(object):
if isinstance(cookies, list):
set_cookies(req.cj, cookies)
+ #@TODO: Move to network in 0.4.10
+ if not redirect:
+ req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
+
+ elif type(redirect) is int:
+ req.http.c.setopt(pycurl.MAXREDIRS, redirect)
+
html = req.load(url, get, post, ref, bool(cookies), just_header, multipart, decode is True) #@TODO: Fix network multipart in 0.4.10
#@TODO: Move to network in 0.4.10
+ if not redirect:
+ req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
+
+ elif type(redirect) is int:
+ req.http.c.setopt(pycurl.MAXREDIRS,
+ self.get_config("maxredirs", 5, plugin="UserAgentSwitcher"))
+
+ #@TODO: Move to network in 0.4.10
if decode:
html = html_unescape(html)
@@ -436,18 +454,23 @@ class Plugin(object):
if self.pyload.debug:
frame = inspect.currentframe()
- framefile = fs_join("tmp", self.__name__, "%s_line%s.dump.html" % (frame.f_back.f_code.co_name, frame.f_back.f_lineno))
+
try:
+ framefile = fs_join("tmp", self.__name__, "%s_line%s.dump.html" % (frame.f_back.f_code.co_name, frame.f_back.f_lineno))
+
if not exists(os.path.join("tmp", self.__name__)):
os.makedirs(os.path.join("tmp", self.__name__))
with open(framefile, "wb") as f:
- del frame #: Delete the frame or it wont be cleaned
+
f.write(encode(html))
except IOError, e:
self.log_error(e, trace=True)
+ finally:
+ del frame #: Delete the frame or it wont be cleaned
+
if not just_header:
return html
diff --git a/module/plugins/internal/SimpleHoster.py b/module/plugins/internal/SimpleHoster.py
index a6d179ec7..0f27ecc91 100644
--- a/module/plugins/internal/SimpleHoster.py
+++ b/module/plugins/internal/SimpleHoster.py
@@ -16,7 +16,7 @@ from module.utils import fixup, fs_encode, parseFileSize as parse_size
class SimpleHoster(Hoster):
__name__ = "SimpleHoster"
__type__ = "hoster"
- __version__ = "1.96"
+ __version__ = "1.97"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
@@ -133,31 +133,24 @@ class SimpleHoster(Hoster):
def get_info(cls, url="", html=""):
info = super(SimpleHoster, cls).get_info(url)
- try:
- info['pattern'] = re.match(cls.__pattern__, url).groupdict() #: Pattern groups will be saved here
-
- except Exception:
- info['pattern'] = {}
-
- info = cls.api_info(url)
- online = True if info['status'] is 2 else False
+ info.update(cls.api_info(url))
- if not html and not online:
+ if not html and info['status'] is not 2:
if not url:
info['error'] = "missing url"
info['status'] = 1
- elif info['status'] == 3:
+ elif info['status'] is 3:
try:
html = get_url(url, cookies=cls.COOKIES, decode=cls.TEXT_ENCODING)
except BadHeader, e:
info['error'] = "%d: %s" % (e.code, e.content)
- if e.code == 404:
+ if e.code is 404:
info['status'] = 1
- elif e.code == 503:
+ elif e.code is 503:
info['status'] = 6
except Exception:
@@ -183,30 +176,24 @@ class SimpleHoster(Hoster):
continue
else:
- online = True
-
- if online:
- info['status'] = 2
-
- if 'N' in info['pattern']:
- name = replace_patterns(info['pattern']['N'], cls.NAME_REPLACEMENTS)
- info['name'] = parse_name(name)
+ info['status'] = 2
- if 'S' in info['pattern']:
- size = replace_patterns(info['pattern']['S'] + info['pattern']['U'] if 'U' in info['pattern'] else info['pattern']['S'],
- cls.SIZE_REPLACEMENTS)
- info['size'] = parse_size(size)
+ if 'N' in info['pattern']:
+ name = replace_patterns(info['pattern']['N'], cls.NAME_REPLACEMENTS)
+ info['name'] = parse_name(name)
- elif isinstance(info['size'], basestring):
- unit = info['units'] if 'units' in info else None
- info['size'] = parse_size(info['size'], unit)
+ if 'S' in info['pattern']:
+ size = replace_patterns(info['pattern']['S'] + info['pattern']['U'] if 'U' in info['pattern'] else info['pattern']['S'],
+ cls.SIZE_REPLACEMENTS)
+ info['size'] = parse_size(size)
- if 'H' in info['pattern']:
- hashtype = info['pattern']['T'] if 'T' in info['pattern'] else "hash"
- info[hashtype] = info['pattern']['H']
+ elif isinstance(info['size'], basestring):
+ unit = info['units'] if 'units' in info else None
+ info['size'] = parse_size(info['size'], unit)
- if not info['pattern']:
- info.pop('pattern', None)
+ if 'H' in info['pattern']:
+ hashtype = info['pattern']['T'] if 'T' in info['pattern'] else "hash"
+ info[hashtype] = info['pattern']['H']
return info
diff --git a/module/plugins/internal/XFSHoster.py b/module/plugins/internal/XFSHoster.py
index 2e9cbceb2..80ef9a977 100644
--- a/module/plugins/internal/XFSHoster.py
+++ b/module/plugins/internal/XFSHoster.py
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
-import pycurl
import random
import re
@@ -113,11 +112,7 @@ class XFSHoster(SimpleHoster):
data = self.get_post_parameters()
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
-
- self.html = self.load(pyfile.url, post=data)
-
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
+ self.html = self.load(pyfile.url, post=data, redirect=False)
m = re.search(r'Location\s*:\s*(.+)', self.req.http.header, re.I)
if m and not "op=" in m.group(1):