summaryrefslogtreecommitdiffstats
path: root/module/plugins/internal
diff options
context:
space:
mode:
authorGravatar Walter Purcaro <vuolter@users.noreply.github.com> 2015-10-08 12:24:34 +0200
committerGravatar Walter Purcaro <vuolter@users.noreply.github.com> 2015-10-08 12:24:34 +0200
commitc59aa4057608cd47084c66e41f363b5f981f2816 (patch)
tree73d787e55826537710ab526f583c46b0623c6c85 /module/plugins/internal
parentSpare improvements (diff)
downloadpyload-c59aa4057608cd47084c66e41f363b5f981f2816.tar.xz
Fixpack (5)
Diffstat (limited to 'module/plugins/internal')
-rw-r--r--module/plugins/internal/Base.py52
-rw-r--r--module/plugins/internal/Plugin.py41
-rw-r--r--module/plugins/internal/SimpleHoster.py53
-rw-r--r--module/plugins/internal/XFSHoster.py7
4 files changed, 84 insertions, 69 deletions
diff --git a/module/plugins/internal/Base.py b/module/plugins/internal/Base.py
index 46502a9d3..64d00b48a 100644
--- a/module/plugins/internal/Base.py
+++ b/module/plugins/internal/Base.py
@@ -28,12 +28,6 @@ def parse_fileInfo(klass, url="", html=""):
def create_getInfo(klass):
def get_info(urls):
for url in urls:
- try:
- url = replace_patterns(url, klass.URL_REPLACEMENTS)
-
- except Exception:
- pass
-
yield parse_fileInfo(klass, url)
return get_info
@@ -52,7 +46,7 @@ def check_abort(fn):
class Base(Plugin):
__name__ = "Base"
__type__ = "base"
- __version__ = "0.05"
+ __version__ = "0.06"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
@@ -63,6 +57,9 @@ class Base(Plugin):
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
+ URL_REPLACEMENTS = None
+
+
def __init__(self, pyfile):
self._init(pyfile.m.core)
@@ -110,11 +107,18 @@ class Base(Plugin):
@classmethod
def get_info(cls, url="", html=""):
- url = fixurl(url, unquote=True)
- info = {'name' : parse_name(url),
- 'size' : 0,
- 'status': 3 if url else 8,
- 'url' : url}
+ url = fixurl(url, unquote=True)
+
+ info = {'name' : parse_name(url),
+ 'pattern': {},
+ 'size' : 0,
+ 'status' : 3 if url else 8,
+ 'url' : replace_patterns(url, cls.URL_REPLACEMENTS)}
+
+ try:
+ info['pattern'] = re.match(cls.__pattern__, url).groupdict()
+ except Exception:
+ pass
return info
@@ -363,7 +367,13 @@ class Base(Plugin):
:param wait: time to wait in seconds before retry
:param msg: message passed to fail if attemps value was reached
"""
- id = inspect.currentframe().f_back.f_lineno
+ frame = inspect.currentframe()
+
+ try:
+ id = frame.f_back.f_lineno
+ finally:
+ del frame
+
if id not in self.retries:
self.retries[id] = 0
@@ -420,19 +430,19 @@ class Base(Plugin):
self.abort()
- def direct_link(self, url, follow_location=None):
+ def direct_link(self, url, redirect=False):
link = ""
- if follow_location is None:
- redirect = 1
+ if not redirect:
+ conn = 1
- elif type(follow_location) is int:
- redirect = max(follow_location, 1)
+ elif type(redirect) is int:
+ conn = max(redirect, 1)
else:
- redirect = self.get_config("maxredirs", 10, "UserAgentSwitcher")
+ conn = self.get_config("maxredirs", 5, plugin="UserAgentSwitcher")
- for i in xrange(redirect):
+ for i in xrange(conn):
try:
self.log_debug("Redirect #%d to: %s" % (i, url))
header = self.load(url, just_header=True)
@@ -469,7 +479,7 @@ class Base(Plugin):
if header.get('code') == 302:
link = location
- if follow_location:
+ if redirect:
url = location
continue
diff --git a/module/plugins/internal/Plugin.py b/module/plugins/internal/Plugin.py
index 7bfdd588c..d6eac7e58 100644
--- a/module/plugins/internal/Plugin.py
+++ b/module/plugins/internal/Plugin.py
@@ -11,6 +11,8 @@ import traceback
import urllib
import urlparse
+import pycurl
+
if os.name is not "nt":
import grp
import pwd
@@ -232,7 +234,7 @@ def chunks(iterable, size):
class Plugin(object):
__name__ = "Plugin"
__type__ = "plugin"
- __version__ = "0.51"
+ __version__ = "0.52"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
@@ -280,35 +282,35 @@ class Plugin(object):
self._log("debug", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def log_info(self, *args, **kwargs):
self._log("info", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def log_warning(self, *args, **kwargs):
self._log("warning", self.__type__, self.__name__, args)
if self.pyload.debug and kwargs.get('trace'):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def log_error(self, *args, **kwargs):
self._log("error", self.__type__, self.__name__, args)
if kwargs.get('trace'):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def log_critical(self, *args, **kwargs):
self._log("critical", self.__type__, self.__name__, args)
if kwargs.get('trace', True):
print "Traceback (most recent call last):"
- traceback.print_stack(inspect.currentframe().f_back)
+ traceback.print_stack()
def set_permissions(self, path):
@@ -396,7 +398,8 @@ class Plugin(object):
raise Fail(encode(msg)) #@TODO: Remove `encode` in 0.4.10
- def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=True, multipart=False, req=None):
+ def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=True,
+ multipart=False, redirect=True, req=None):
"""
Load content at url and returns it
@@ -422,9 +425,24 @@ class Plugin(object):
if isinstance(cookies, list):
set_cookies(req.cj, cookies)
+ #@TODO: Move to network in 0.4.10
+ if not redirect:
+ req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
+
+ elif type(redirect) is int:
+ req.http.c.setopt(pycurl.MAXREDIRS, redirect)
+
html = req.load(url, get, post, ref, bool(cookies), just_header, multipart, decode is True) #@TODO: Fix network multipart in 0.4.10
#@TODO: Move to network in 0.4.10
+ if not redirect:
+ req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
+
+ elif type(redirect) is int:
+ req.http.c.setopt(pycurl.MAXREDIRS,
+ self.get_config("maxredirs", 5, plugin="UserAgentSwitcher"))
+
+ #@TODO: Move to network in 0.4.10
if decode:
html = html_unescape(html)
@@ -436,18 +454,23 @@ class Plugin(object):
if self.pyload.debug:
frame = inspect.currentframe()
- framefile = fs_join("tmp", self.__name__, "%s_line%s.dump.html" % (frame.f_back.f_code.co_name, frame.f_back.f_lineno))
+
try:
+ framefile = fs_join("tmp", self.__name__, "%s_line%s.dump.html" % (frame.f_back.f_code.co_name, frame.f_back.f_lineno))
+
if not exists(os.path.join("tmp", self.__name__)):
os.makedirs(os.path.join("tmp", self.__name__))
with open(framefile, "wb") as f:
- del frame #: Delete the frame or it wont be cleaned
+
f.write(encode(html))
except IOError, e:
self.log_error(e, trace=True)
+ finally:
+ del frame #: Delete the frame or it wont be cleaned
+
if not just_header:
return html
diff --git a/module/plugins/internal/SimpleHoster.py b/module/plugins/internal/SimpleHoster.py
index a6d179ec7..0f27ecc91 100644
--- a/module/plugins/internal/SimpleHoster.py
+++ b/module/plugins/internal/SimpleHoster.py
@@ -16,7 +16,7 @@ from module.utils import fixup, fs_encode, parseFileSize as parse_size
class SimpleHoster(Hoster):
__name__ = "SimpleHoster"
__type__ = "hoster"
- __version__ = "1.96"
+ __version__ = "1.97"
__status__ = "testing"
__pattern__ = r'^unmatchable$'
@@ -133,31 +133,24 @@ class SimpleHoster(Hoster):
def get_info(cls, url="", html=""):
info = super(SimpleHoster, cls).get_info(url)
- try:
- info['pattern'] = re.match(cls.__pattern__, url).groupdict() #: Pattern groups will be saved here
-
- except Exception:
- info['pattern'] = {}
-
- info = cls.api_info(url)
- online = True if info['status'] is 2 else False
+ info.update(cls.api_info(url))
- if not html and not online:
+ if not html and info['status'] is not 2:
if not url:
info['error'] = "missing url"
info['status'] = 1
- elif info['status'] == 3:
+ elif info['status'] is 3:
try:
html = get_url(url, cookies=cls.COOKIES, decode=cls.TEXT_ENCODING)
except BadHeader, e:
info['error'] = "%d: %s" % (e.code, e.content)
- if e.code == 404:
+ if e.code is 404:
info['status'] = 1
- elif e.code == 503:
+ elif e.code is 503:
info['status'] = 6
except Exception:
@@ -183,30 +176,24 @@ class SimpleHoster(Hoster):
continue
else:
- online = True
-
- if online:
- info['status'] = 2
-
- if 'N' in info['pattern']:
- name = replace_patterns(info['pattern']['N'], cls.NAME_REPLACEMENTS)
- info['name'] = parse_name(name)
+ info['status'] = 2
- if 'S' in info['pattern']:
- size = replace_patterns(info['pattern']['S'] + info['pattern']['U'] if 'U' in info['pattern'] else info['pattern']['S'],
- cls.SIZE_REPLACEMENTS)
- info['size'] = parse_size(size)
+ if 'N' in info['pattern']:
+ name = replace_patterns(info['pattern']['N'], cls.NAME_REPLACEMENTS)
+ info['name'] = parse_name(name)
- elif isinstance(info['size'], basestring):
- unit = info['units'] if 'units' in info else None
- info['size'] = parse_size(info['size'], unit)
+ if 'S' in info['pattern']:
+ size = replace_patterns(info['pattern']['S'] + info['pattern']['U'] if 'U' in info['pattern'] else info['pattern']['S'],
+ cls.SIZE_REPLACEMENTS)
+ info['size'] = parse_size(size)
- if 'H' in info['pattern']:
- hashtype = info['pattern']['T'] if 'T' in info['pattern'] else "hash"
- info[hashtype] = info['pattern']['H']
+ elif isinstance(info['size'], basestring):
+ unit = info['units'] if 'units' in info else None
+ info['size'] = parse_size(info['size'], unit)
- if not info['pattern']:
- info.pop('pattern', None)
+ if 'H' in info['pattern']:
+ hashtype = info['pattern']['T'] if 'T' in info['pattern'] else "hash"
+ info[hashtype] = info['pattern']['H']
return info
diff --git a/module/plugins/internal/XFSHoster.py b/module/plugins/internal/XFSHoster.py
index 2e9cbceb2..80ef9a977 100644
--- a/module/plugins/internal/XFSHoster.py
+++ b/module/plugins/internal/XFSHoster.py
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
-import pycurl
import random
import re
@@ -113,11 +112,7 @@ class XFSHoster(SimpleHoster):
data = self.get_post_parameters()
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 0)
-
- self.html = self.load(pyfile.url, post=data)
-
- self.req.http.c.setopt(pycurl.FOLLOWLOCATION, 1)
+ self.html = self.load(pyfile.url, post=data, redirect=False)
m = re.search(r'Location\s*:\s*(.+)', self.req.http.header, re.I)
if m and not "op=" in m.group(1):