summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar Walter Purcaro <vuolter@users.noreply.github.com> 2015-07-18 20:04:36 +0200
committerGravatar Walter Purcaro <vuolter@users.noreply.github.com> 2015-07-18 20:04:36 +0200
commit9e5d813d7721e351ac02ba72bdc473a7d77ba6b7 (patch)
tree1a5167cea6492283bfb679c4efdb4c13534d844f
parentReorder some functions (diff)
downloadpyload-9e5d813d7721e351ac02ba72bdc473a7d77ba6b7.tar.xz
Code cosmetics
-rw-r--r--module/plugins/accounts/AlldebridCom.py4
-rw-r--r--module/plugins/captcha/LinksaveIn.py1
-rw-r--r--module/plugins/crypter/DuckCryptInfo.py4
-rw-r--r--module/plugins/crypter/HoerbuchIn.py4
-rw-r--r--module/plugins/crypter/RelinkUs.py3
-rw-r--r--module/plugins/crypter/SafelinkingNet.py4
-rw-r--r--module/plugins/crypter/SexuriaCom.py4
-rw-r--r--module/plugins/hooks/BypassCaptcha.py4
-rw-r--r--module/plugins/hooks/CaptchaBrotherhood.py5
-rw-r--r--module/plugins/hooks/DeathByCaptcha.py4
-rw-r--r--module/plugins/hooks/ExpertDecoders.py4
-rw-r--r--module/plugins/hooks/ImageTyperz.py4
-rw-r--r--module/plugins/hooks/MultiHome.py2
-rw-r--r--module/plugins/hooks/SkipRev.py2
-rw-r--r--module/plugins/hoster/AlldebridCom.py4
-rw-r--r--module/plugins/hoster/BasePlugin.py2
-rw-r--r--module/plugins/hoster/CzshareCom.py4
-rw-r--r--module/plugins/hoster/DailymotionCom.py4
-rw-r--r--module/plugins/hoster/DlFreeFr.py4
-rw-r--r--module/plugins/hoster/FilefactoryCom.py8
-rw-r--r--module/plugins/hoster/FilesMailRu.py7
-rw-r--r--module/plugins/hoster/FileserveCom.py17
-rw-r--r--module/plugins/hoster/FlyFilesNet.py3
-rw-r--r--module/plugins/hoster/FreakshareCom.py3
-rw-r--r--module/plugins/hoster/FshareVn.py8
-rw-r--r--module/plugins/hoster/LetitbitNet.py4
-rw-r--r--module/plugins/hoster/LuckyShareNet.py3
-rw-r--r--module/plugins/hoster/MegaRapidCz.py8
-rw-r--r--module/plugins/hoster/OboomCom.py3
-rw-r--r--module/plugins/hoster/OverLoadMe.py4
-rw-r--r--module/plugins/hoster/RPNetBiz.py6
-rw-r--r--module/plugins/hoster/RapiduNet.py2
-rw-r--r--module/plugins/hoster/RealdebridCom.py4
-rw-r--r--module/plugins/hoster/ShareonlineBiz.py6
-rw-r--r--module/plugins/hoster/ShareplaceCom.py4
-rw-r--r--module/plugins/hoster/StreamCz.py4
-rw-r--r--module/plugins/hoster/TurbobitNet.py3
-rw-r--r--module/plugins/hoster/UploadableCh.py2
-rw-r--r--module/plugins/hoster/UploadedTo.py4
-rw-r--r--module/plugins/hoster/WebshareCz.py6
-rw-r--r--module/plugins/hoster/Xdcc.py6
-rw-r--r--module/plugins/hoster/YourfilesTo.py4
-rw-r--r--module/plugins/hoster/ZippyshareCom.py4
-rw-r--r--module/plugins/internal/Account.py6
-rw-r--r--module/plugins/internal/AdYouLike.py2
-rw-r--r--module/plugins/internal/AdsCaptcha.py2
-rw-r--r--module/plugins/internal/DeadCrypter.py13
-rw-r--r--module/plugins/internal/DeadHoster.py13
-rw-r--r--module/plugins/internal/Hoster.py6
-rw-r--r--module/plugins/internal/MultiHook.py4
-rw-r--r--module/plugins/internal/Plugin.py2
-rw-r--r--module/plugins/internal/ReCaptcha.py2
-rw-r--r--module/plugins/internal/SimpleHoster.py8
-rw-r--r--module/plugins/internal/SolveMedia.py6
54 files changed, 118 insertions, 136 deletions
diff --git a/module/plugins/accounts/AlldebridCom.py b/module/plugins/accounts/AlldebridCom.py
index 903499d5b..cfa7840bc 100644
--- a/module/plugins/accounts/AlldebridCom.py
+++ b/module/plugins/accounts/AlldebridCom.py
@@ -4,7 +4,7 @@ import re
import time
import xml.dom.minidom as dom
-from BeautifulSoup import BeautifulSoup
+import BeautifulSoup
from module.plugins.internal.Account import Account
@@ -22,7 +22,7 @@ class AlldebridCom(Account):
def load_account_info(self, user, req):
data = self.get_account_data(user)
html = self.load("http://www.alldebrid.com/account/")
- soup = BeautifulSoup(html)
+ soup = BeautifulSoup.BeautifulSoup(html)
# Try to parse expiration date directly from the control panel page (better accuracy)
try:
diff --git a/module/plugins/captcha/LinksaveIn.py b/module/plugins/captcha/LinksaveIn.py
index cf00acf15..a9fe2d630 100644
--- a/module/plugins/captcha/LinksaveIn.py
+++ b/module/plugins/captcha/LinksaveIn.py
@@ -2,6 +2,7 @@
try:
from PIL import Image
+
except ImportError:
import Image
diff --git a/module/plugins/crypter/DuckCryptInfo.py b/module/plugins/crypter/DuckCryptInfo.py
index 7541ac1a6..7dc0fc6bc 100644
--- a/module/plugins/crypter/DuckCryptInfo.py
+++ b/module/plugins/crypter/DuckCryptInfo.py
@@ -2,7 +2,7 @@
import re
-from BeautifulSoup import BeautifulSoup
+import BeautifulSoup
from module.plugins.internal.Crypter import Crypter
@@ -41,7 +41,7 @@ class DuckCryptInfo(Crypter):
m = re.match(self.__pattern__, html)
self.log_debug("Redirectet to " + str(m.group(0)))
html = self.load(str(m.group(0)))
- soup = BeautifulSoup(html)
+ soup = BeautifulSoup.BeautifulSoup(html)
cryptlinks = soup.findAll("div", attrs={"class": "folderbox"})
self.log_debug("Redirectet to " + str(cryptlinks))
if not cryptlinks:
diff --git a/module/plugins/crypter/HoerbuchIn.py b/module/plugins/crypter/HoerbuchIn.py
index bf313dd99..1a897a3c1 100644
--- a/module/plugins/crypter/HoerbuchIn.py
+++ b/module/plugins/crypter/HoerbuchIn.py
@@ -2,7 +2,7 @@
import re
-from BeautifulSoup import BeautifulSoup, BeautifulStoneSoup
+import BeautifulSoup
from module.plugins.internal.Crypter import Crypter
@@ -31,7 +31,7 @@ class HoerbuchIn(Crypter):
if self.article.match(pyfile.url):
html = self.load(pyfile.url)
- soup = BeautifulSoup(html, convertEntities=BeautifulStoneSoup.HTML_ENTITIES)
+ soup = BeautifulSoup.BeautifulSoup(html, convertEntities=BeautifulSoup.BeautifulStoneSoup.HTML_ENTITIES)
abookname = soup.find("a", attrs={"rel": "bookmark"}).text
for a in soup.findAll("a", attrs={"href": self.protection}):
diff --git a/module/plugins/crypter/RelinkUs.py b/module/plugins/crypter/RelinkUs.py
index 01fb7bffc..b270ccbbd 100644
--- a/module/plugins/crypter/RelinkUs.py
+++ b/module/plugins/crypter/RelinkUs.py
@@ -252,8 +252,7 @@ class RelinkUs(Crypter):
except Exception, detail:
self.log_debug("Error decrypting Web link %s, %s" % (index, detail))
- self.set_wait(4)
- self.wait()
+ self.wait(4)
return package_links
diff --git a/module/plugins/crypter/SafelinkingNet.py b/module/plugins/crypter/SafelinkingNet.py
index 973d62985..733e4c79e 100644
--- a/module/plugins/crypter/SafelinkingNet.py
+++ b/module/plugins/crypter/SafelinkingNet.py
@@ -2,7 +2,7 @@
import re
-from BeautifulSoup import BeautifulSoup
+import BeautifulSoup
from module.common.json_layer import json_loads
from module.plugins.internal.Crypter import Crypter
@@ -66,7 +66,7 @@ class SafelinkingNet(Crypter):
break
pyfile.package().password = ""
- soup = BeautifulSoup(self.html)
+ soup = BeautifulSoup.BeautifulSoup(self.html)
scripts = soup.findAll("script")
for s in scripts:
if "d_links" in s.text:
diff --git a/module/plugins/crypter/SexuriaCom.py b/module/plugins/crypter/SexuriaCom.py
index db827ca27..0db90b73b 100644
--- a/module/plugins/crypter/SexuriaCom.py
+++ b/module/plugins/crypter/SexuriaCom.py
@@ -76,13 +76,13 @@ class SexuriaCom(Crypter):
html = self.load(url)
links = re.findall(self.PATTERN_REDIRECT_LINKS, html, re.I)
if len(links) == 0:
- self.log_error("Broken for link %s" % link)
+ self.log_error(_("Broken for link: %s") % link)
else:
for link in links:
link = link.replace("http://sexuria.com/", "http://www.sexuria.com/")
finallink = self.load(link, just_header=True)['location']
if not finallink or "sexuria.com/" in finallink:
- self.log_error("Broken for link %s" % link)
+ self.log_error(_("Broken for link: %s") % link)
else:
linklist.append(finallink)
diff --git a/module/plugins/hooks/BypassCaptcha.py b/module/plugins/hooks/BypassCaptcha.py
index cb91c06ce..ab08c68c1 100644
--- a/module/plugins/hooks/BypassCaptcha.py
+++ b/module/plugins/hooks/BypassCaptcha.py
@@ -3,7 +3,7 @@
import pycurl
from module.network.HTTPRequest import BadHeader
-from module.network.RequestFactory import getRequest
+from module.network.RequestFactory import getRequest as get_request
from module.plugins.internal.Hook import Hook, threaded
@@ -61,7 +61,7 @@ class BypassCaptcha(Hook):
def submit(self, captcha, captchaType="file", match=None):
- req = getRequest()
+ req = get_request()
# raise timeout threshold
req.c.setopt(pycurl.LOW_SPEED_TIME, 80)
diff --git a/module/plugins/hooks/CaptchaBrotherhood.py b/module/plugins/hooks/CaptchaBrotherhood.py
index b2f370f32..d35bc720d 100644
--- a/module/plugins/hooks/CaptchaBrotherhood.py
+++ b/module/plugins/hooks/CaptchaBrotherhood.py
@@ -9,10 +9,11 @@ import urllib
try:
from PIL import Image
+
except ImportError:
import Image
-from module.network.RequestFactory import getRequest
+from module.network.RequestFactory import getRequest as get_request
from module.plugins.internal.Hook import Hook, threaded
@@ -86,7 +87,7 @@ class CaptchaBrotherhood(Hook):
except Exception, e:
raise CaptchaBrotherhoodException("Reading or converting captcha image failed: %s" % e)
- req = getRequest()
+ req = get_request()
url = "%ssendNewCaptcha.aspx?%s" % (self.API_URL,
urllib.urlencode({'username' : self.get_config('username'),
diff --git a/module/plugins/hooks/DeathByCaptcha.py b/module/plugins/hooks/DeathByCaptcha.py
index ec2554a8f..43bad2d0b 100644
--- a/module/plugins/hooks/DeathByCaptcha.py
+++ b/module/plugins/hooks/DeathByCaptcha.py
@@ -10,7 +10,7 @@ from base64 import b64encode
from module.common.json_layer import json_loads
from module.network.HTTPRequest import BadHeader
-from module.network.RequestFactory import getRequest
+from module.network.RequestFactory import getRequest as get_request
from module.plugins.internal.Hook import Hook, threaded
@@ -73,7 +73,7 @@ class DeathByCaptcha(Hook):
def api_response(self, api="captcha", post=False, multipart=False):
- req = getRequest()
+ req = get_request()
req.c.setopt(pycurl.HTTPHEADER, ["Accept: application/json", "User-Agent: pyLoad %s" % self.core.version])
if post:
diff --git a/module/plugins/hooks/ExpertDecoders.py b/module/plugins/hooks/ExpertDecoders.py
index 6ec1f8bf1..919445db8 100644
--- a/module/plugins/hooks/ExpertDecoders.py
+++ b/module/plugins/hooks/ExpertDecoders.py
@@ -8,7 +8,7 @@ import uuid
from base64 import b64encode
from module.network.HTTPRequest import BadHeader
-from module.network.RequestFactory import getRequest
+from module.network.RequestFactory import getRequest as get_request
from module.plugins.internal.Hook import Hook, threaded
@@ -55,7 +55,7 @@ class ExpertDecoders(Hook):
with open(task.captchaFile, 'rb') as f:
data = f.read()
- req = getRequest()
+ req = get_request()
# raise timeout threshold
req.c.setopt(pycurl.LOW_SPEED_TIME, 80)
diff --git a/module/plugins/hooks/ImageTyperz.py b/module/plugins/hooks/ImageTyperz.py
index c70518c17..5e2f21c8b 100644
--- a/module/plugins/hooks/ImageTyperz.py
+++ b/module/plugins/hooks/ImageTyperz.py
@@ -7,7 +7,7 @@ import re
from base64 import b64encode
-from module.network.RequestFactory import getRequest
+from module.network.RequestFactory import getRequest as get_request
from module.plugins.internal.Hook import Hook, threaded
@@ -74,7 +74,7 @@ class ImageTyperz(Hook):
def submit(self, captcha, captchaType="file", match=None):
- req = getRequest()
+ req = get_request()
# raise timeout threshold
req.c.setopt(pycurl.LOW_SPEED_TIME, 80)
diff --git a/module/plugins/hooks/MultiHome.py b/module/plugins/hooks/MultiHome.py
index 12a65c601..7e4b5e583 100644
--- a/module/plugins/hooks/MultiHome.py
+++ b/module/plugins/hooks/MultiHome.py
@@ -56,7 +56,7 @@ class MultiHome(Hook):
self.log_debug("Using address", iface.adress)
return oldGetRequest(pluginName, account)
- requestFactory.getRequest = getRequest
+ requestFactory.getRequest = get_request
def best_interface(self, pluginName, account):
diff --git a/module/plugins/hooks/SkipRev.py b/module/plugins/hooks/SkipRev.py
index 8fd49889c..9f5f4f231 100644
--- a/module/plugins/hooks/SkipRev.py
+++ b/module/plugins/hooks/SkipRev.py
@@ -41,7 +41,7 @@ class SkipRev(Hook):
if hasattr(pyfile.pluginmodule, "getInfo"): #@NOTE: getInfo is deprecated in 0.4.10
return pyfile.pluginmodule.get_info([pyfile.url]).next()[0]
else:
- self.log_warning("Unable to grab file name")
+ self.log_warning(_("Unable to grab file name"))
return urlparse.urlparse(urllib.unquote(pyfile.url)).path.split('/')[-1]
diff --git a/module/plugins/hoster/AlldebridCom.py b/module/plugins/hoster/AlldebridCom.py
index 46379bdc9..73df734dd 100644
--- a/module/plugins/hoster/AlldebridCom.py
+++ b/module/plugins/hoster/AlldebridCom.py
@@ -5,7 +5,7 @@ import urllib
from module.common.json_layer import json_loads
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
-from module.utils import parseFileSize
+from module.utils import parseFileSize as parse_size
class AlldebridCom(MultiHoster):
@@ -43,7 +43,7 @@ class AlldebridCom(MultiHoster):
else:
if pyfile.name and not pyfile.name.endswith('.tmp'):
pyfile.name = data['filename']
- pyfile.size = parseFileSize(data['filesize'])
+ pyfile.size = parse_size(data['filesize'])
self.link = data['link']
diff --git a/module/plugins/hoster/BasePlugin.py b/module/plugins/hoster/BasePlugin.py
index 909b350c9..ed8c8b32c 100644
--- a/module/plugins/hoster/BasePlugin.py
+++ b/module/plugins/hoster/BasePlugin.py
@@ -97,7 +97,7 @@ class BasePlugin(Hoster):
except Exception:
pass
- self.log_warning("Check result: " + errmsg, "Waiting 1 minute and retry")
+ self.log_warning(_("Check result: ") + errmsg, _("Waiting 1 minute and retry"))
self.retry(3, 60, errmsg)
diff --git a/module/plugins/hoster/CzshareCom.py b/module/plugins/hoster/CzshareCom.py
index 0e28f31dd..d44b4046f 100644
--- a/module/plugins/hoster/CzshareCom.py
+++ b/module/plugins/hoster/CzshareCom.py
@@ -6,7 +6,7 @@
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
-from module.utils import parseFileSize
+from module.utils import parseFileSize as parse_size
class CzshareCom(SimpleHoster):
@@ -51,7 +51,7 @@ class CzshareCom(SimpleHoster):
#: check user credit
try:
- credit = parseFileSize(m.group(1).replace(' ', ''), m.group(2))
+ credit = parse_size(m.group(1).replace(' ', ''), m.group(2))
self.log_info(_("Premium download for %i KiB of Credit") % (self.pyfile.size / 1024))
self.log_info(_("User %s has %i KiB left") % (self.user, credit / 1024))
if credit < self.pyfile.size:
diff --git a/module/plugins/hoster/DailymotionCom.py b/module/plugins/hoster/DailymotionCom.py
index 18f84ff56..f549bbd79 100644
--- a/module/plugins/hoster/DailymotionCom.py
+++ b/module/plugins/hoster/DailymotionCom.py
@@ -4,7 +4,7 @@ import re
from module.PyFile import statusMap
from module.common.json_layer import json_loads
-from module.network.RequestFactory import getURL
+from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.Hoster import Hoster
@@ -16,7 +16,7 @@ def get_info(urls):
for url in urls:
id = regex.match(url).group('ID')
- html = getURL(apiurl % id, get=request)
+ html = get_url(apiurl % id, get=request)
info = json_loads(html)
name = info['title'] + ".mp4" if "title" in info else url
diff --git a/module/plugins/hoster/DlFreeFr.py b/module/plugins/hoster/DlFreeFr.py
index f8c684ed4..dd3325f0f 100644
--- a/module/plugins/hoster/DlFreeFr.py
+++ b/module/plugins/hoster/DlFreeFr.py
@@ -35,7 +35,7 @@ class CustomBrowser(Browser):
class DlFreeFr(SimpleHoster):
__name__ = "DlFreeFr"
__type__ = "hoster"
- __version__ = "0.31"
+ __version__ = "0.32"
__pattern__ = r'http://(?:www\.)?dl\.free\.fr/(\w+|getfile\.pl\?file=/\w+)'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
@@ -98,7 +98,7 @@ class DlFreeFr(SimpleHoster):
headers = self.get_last_headers()
if headers.get("code") == 302 and "set-cookie" in headers and "location" in headers:
m = re.search("(.*?)=(.*?); path=(.*?); domain=(.*)", headers.get("set-cookie"))
- cj = CookieJar(__name__)
+ cj = CookieJar(self.__name__)
if m:
cj.setCookie(m.group(4), m.group(1), m.group(2), m.group(3))
else:
diff --git a/module/plugins/hoster/FilefactoryCom.py b/module/plugins/hoster/FilefactoryCom.py
index 8d7409824..70f1e3bab 100644
--- a/module/plugins/hoster/FilefactoryCom.py
+++ b/module/plugins/hoster/FilefactoryCom.py
@@ -3,18 +3,18 @@
import re
import urlparse
-from module.network.RequestFactory import getURL
-from module.plugins.internal.SimpleHoster import SimpleHoster, parseFileInfo
+from module.network.RequestFactory import getURL as get_url
+from module.plugins.internal.SimpleHoster import SimpleHoster, parse_fileInfo
def get_info(urls):
for url in urls:
- h = getURL(url, just_header=True)
+ h = get_url(url, just_header=True)
m = re.search(r'Location: (.+)\r\n', h)
if m and not re.match(m.group(1), FilefactoryCom.__pattern__): #: It's a direct link! Skipping
yield (url, 0, 3, url)
else: #: It's a standard html page
- yield parseFileInfo(FilefactoryCom, url, getURL(url))
+ yield parse_fileInfo(FilefactoryCom, url, get_url(url))
class FilefactoryCom(SimpleHoster):
diff --git a/module/plugins/hoster/FilesMailRu.py b/module/plugins/hoster/FilesMailRu.py
index 71ef231b1..df6b8a2ad 100644
--- a/module/plugins/hoster/FilesMailRu.py
+++ b/module/plugins/hoster/FilesMailRu.py
@@ -2,7 +2,7 @@
import re
-from module.network.RequestFactory import getURL
+from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.Hoster import Hoster
from module.plugins.internal.Plugin import chunks
@@ -11,7 +11,7 @@ def get_info(urls):
result = []
for chunk in chunks(urls, 10):
for url in chunk:
- html = getURL(url)
+ html = get_url(url)
if r'<div class="errorMessage mb10">' in html:
result.append((url, 0, 1, url))
elif r'Page cannot be displayed' in html:
@@ -73,8 +73,7 @@ class FilesMailRu(Hoster):
"""
You have to wait some seconds. Otherwise you will get a 40Byte HTML Page instead of the file you expected
"""
- self.set_wait(10)
- self.wait()
+ self.wait(10)
return True
diff --git a/module/plugins/hoster/FileserveCom.py b/module/plugins/hoster/FileserveCom.py
index 5a792a425..f38de699e 100644
--- a/module/plugins/hoster/FileserveCom.py
+++ b/module/plugins/hoster/FileserveCom.py
@@ -3,16 +3,16 @@
import re
from module.common.json_layer import json_loads
-from module.network.RequestFactory import getURL
+from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.Hoster import Hoster
from module.plugins.internal.Plugin import chunks
from module.plugins.internal.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import seconds_to_midnight
-from module.utils import parseFileSize
+from module.utils import parseFileSize as parse_size
def check_file(plugin, urls):
- html = getURL(plugin.URLS[1], post={"urls": "\n".join(urls)})
+ html = get_url(plugin.URLS[1], post={"urls": "\n".join(urls)})
file_info = []
for li in re.finditer(plugin.LINKCHECK_TR, html, re.S):
@@ -21,7 +21,7 @@ def check_file(plugin, urls):
if cols:
file_info.append((
cols[1] if cols[1] != '--' else cols[0],
- parseFileSize(cols[2]) if cols[2] != '--' else 0,
+ parse_size(cols[2]) if cols[2] != '--' else 0,
2 if cols[3].startswith('Available') else 1,
cols[0]))
except Exception, e:
@@ -131,8 +131,7 @@ class FileserveCom(Hoster):
elif check == "limit":
self.log_warning(_("Download limited reached for today"))
- self.set_wait(seconds_to_midnight(gmt=2), True)
- self.wait()
+ self.wait(seconds_to_midnight(gmt=2), True)
self.retry()
self.thread.m.reconnecting.wait(3) #: Ease issue with later downloads appearing to be in parallel
@@ -153,8 +152,7 @@ class FileserveCom(Hoster):
else:
wait_time = int(res) + 3
- self.set_wait(wait_time)
- self.wait()
+ self.wait(wait_time)
def do_captcha(self):
@@ -178,8 +176,7 @@ class FileserveCom(Hoster):
def do_long_wait(self, m):
wait_time = (int(m.group(1)) * {'seconds': 1, 'minutes': 60, 'hours': 3600}[m.group(2)]) if m else 12 * 60
- self.set_wait(wait_time, True)
- self.wait()
+ self.wait(wait_time, True)
self.retry()
diff --git a/module/plugins/hoster/FlyFilesNet.py b/module/plugins/hoster/FlyFilesNet.py
index 63cabda66..28c4ce813 100644
--- a/module/plugins/hoster/FlyFilesNet.py
+++ b/module/plugins/hoster/FlyFilesNet.py
@@ -3,7 +3,6 @@
import re
import urllib
-from module.network.RequestFactory import getURL
from module.plugins.internal.SimpleHoster import SimpleHoster
@@ -32,7 +31,7 @@ class FlyFilesNet(SimpleHoster):
url = "http://flyfiles.net"
#: get download URL
- parsed_url = getURL(url, post={"getDownLink": session})
+ parsed_url = self.load(url, post={"getDownLink": session})
self.log_debug("Parsed URL: %s" % parsed_url)
if parsed_url == '#downlink|' or parsed_url == "#downlink|#":
diff --git a/module/plugins/hoster/FreakshareCom.py b/module/plugins/hoster/FreakshareCom.py
index 86dc45585..135315197 100644
--- a/module/plugins/hoster/FreakshareCom.py
+++ b/module/plugins/hoster/FreakshareCom.py
@@ -53,8 +53,7 @@ class FreakshareCom(Hoster):
self.fail(_("Bad Try"))
elif check == "paralell":
- self.set_wait(300, True)
- self.wait()
+ self.wait(300, True)
self.retry()
elif check == "empty":
diff --git a/module/plugins/hoster/FshareVn.py b/module/plugins/hoster/FshareVn.py
index 7e1d573d8..3ece85c96 100644
--- a/module/plugins/hoster/FshareVn.py
+++ b/module/plugins/hoster/FshareVn.py
@@ -4,16 +4,16 @@ import re
import time
import urlparse
-from module.network.RequestFactory import getURL
-from module.plugins.internal.SimpleHoster import SimpleHoster, parseFileInfo
+from module.network.RequestFactory import getURL as get_url
+from module.plugins.internal.SimpleHoster import SimpleHoster, parse_fileInfo
def get_info(urls):
for url in urls:
- html = getURL("http://www.fshare.vn/check_link.php",
+ html = get_url("http://www.fshare.vn/check_link.php",
post={'action': "check_link", 'arrlinks': url})
- yield parseFileInfo(FshareVn, url, html)
+ yield parse_fileInfo(FshareVn, url, html)
def double_decode(m):
diff --git a/module/plugins/hoster/LetitbitNet.py b/module/plugins/hoster/LetitbitNet.py
index 15a46066a..061311bab 100644
--- a/module/plugins/hoster/LetitbitNet.py
+++ b/module/plugins/hoster/LetitbitNet.py
@@ -10,14 +10,14 @@ import re
import urlparse
from module.common.json_layer import json_loads, json_dumps
-from module.network.RequestFactory import getURL
+from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, seconds_to_midnight
def api_response(url):
json_data = ["yw7XQy2v9", ["download/info", {"link": url}]]
- api_rep = getURL("http://api.letitbit.net/json",
+ api_rep = get_url("http://api.letitbit.net/json",
post={'r': json_dumps(json_data)})
return json_loads(api_rep)
diff --git a/module/plugins/hoster/LuckyShareNet.py b/module/plugins/hoster/LuckyShareNet.py
index ec5e87f12..4f10b8eed 100644
--- a/module/plugins/hoster/LuckyShareNet.py
+++ b/module/plugins/hoster/LuckyShareNet.py
@@ -2,8 +2,7 @@
import re
-from bottle import json_loads
-
+from module.common.json_layer import json_loads
from module.plugins.internal.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
diff --git a/module/plugins/hoster/MegaRapidCz.py b/module/plugins/hoster/MegaRapidCz.py
index e00efc1a9..a5feb5668 100644
--- a/module/plugins/hoster/MegaRapidCz.py
+++ b/module/plugins/hoster/MegaRapidCz.py
@@ -4,19 +4,19 @@ import pycurl
import re
from module.network.HTTPRequest import BadHeader
-from module.network.RequestFactory import getRequest
-from module.plugins.internal.SimpleHoster import SimpleHoster, parseFileInfo
+from module.network.RequestFactory import getRequest as get_request
+from module.plugins.internal.SimpleHoster import SimpleHoster, parse_fileInfo
def get_info(urls):
- h = getRequest()
+ h = get_request()
h.c.setopt(pycurl.HTTPHEADER,
["Accept: text/html",
"User-Agent: Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0"])
for url in urls:
html = h.load(url)
- yield parseFileInfo(MegaRapidCz, url, html)
+ yield parse_fileInfo(MegaRapidCz, url, html)
class MegaRapidCz(SimpleHoster):
diff --git a/module/plugins/hoster/OboomCom.py b/module/plugins/hoster/OboomCom.py
index 190b04814..37dce3750 100644
--- a/module/plugins/hoster/OboomCom.py
+++ b/module/plugins/hoster/OboomCom.py
@@ -87,8 +87,7 @@ class OboomCom(Hoster):
self.download_token = result[1]
self.download_auth = result[2]
self.correct_captcha()
- self.set_wait(30)
- self.wait()
+ self.wait(30)
break
elif result[0] == 400:
diff --git a/module/plugins/hoster/OverLoadMe.py b/module/plugins/hoster/OverLoadMe.py
index 1d2650887..cb10e9500 100644
--- a/module/plugins/hoster/OverLoadMe.py
+++ b/module/plugins/hoster/OverLoadMe.py
@@ -5,7 +5,7 @@ import urllib
from module.common.json_layer import json_loads
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
-from module.utils import parseFileSize
+from module.utils import parseFileSize as parse_size
class OverLoadMe(MultiHoster):
@@ -43,7 +43,7 @@ class OverLoadMe(MultiHoster):
self.link = data['downloadlink']
if pyfile.name and pyfile.name.endswith('.tmp') and data['filename']:
pyfile.name = data['filename']
- pyfile.size = parseFileSize(data['filesize'])
+ pyfile.size = parse_size(data['filesize'])
getInfo = create_getInfo(OverLoadMe)
diff --git a/module/plugins/hoster/RPNetBiz.py b/module/plugins/hoster/RPNetBiz.py
index d151ce5e7..a449ed276 100644
--- a/module/plugins/hoster/RPNetBiz.py
+++ b/module/plugins/hoster/RPNetBiz.py
@@ -40,8 +40,7 @@ class RPNetBiz(MultiHoster):
#: Check if we only have an id as a HDD link
if 'id' in link_status:
self.log_debug("Need to wait at least 30 seconds before requery")
- self.set_wait(30) #: wait for 30 seconds
- self.wait()
+ self.wait(30) #: wait for 30 seconds
#: Lets query the server again asking for the status on the link,
#: we need to keep doing this until we reach 100
max_tries = 30
@@ -63,8 +62,7 @@ class RPNetBiz(MultiHoster):
else:
self.log_debug("At %s%% for the file download" % download_status['status'])
- self.set_wait(30)
- self.wait()
+ self.wait(30)
my_try += 1
if my_try > max_tries: #: We went over the limit!
diff --git a/module/plugins/hoster/RapiduNet.py b/module/plugins/hoster/RapiduNet.py
index 30e453b93..1489b49f0 100644
--- a/module/plugins/hoster/RapiduNet.py
+++ b/module/plugins/hoster/RapiduNet.py
@@ -48,7 +48,7 @@ class RapiduNet(SimpleHoster):
if str(jsvars['timeToDownload']) is "stop":
t = (24 * 60 * 60) - (int(time.time()) % (24 * 60 * 60)) + time.altzone
- self.log_info("You've reach your daily download transfer")
+ self.log_info(_("You've reach your daily download transfer"))
self.retry(10, 10 if t < 1 else None, _("Try tomorrow again")) #@NOTE: check t in case of not synchronised clock
diff --git a/module/plugins/hoster/RealdebridCom.py b/module/plugins/hoster/RealdebridCom.py
index 812598f77..22eabae95 100644
--- a/module/plugins/hoster/RealdebridCom.py
+++ b/module/plugins/hoster/RealdebridCom.py
@@ -6,7 +6,7 @@ import urllib
from module.common.json_layer import json_loads
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
-from module.utils import parseFileSize
+from module.utils import parseFileSize as parse_size
class RealdebridCom(MultiHoster):
@@ -45,7 +45,7 @@ class RealdebridCom(MultiHoster):
else:
if pyfile.name and pyfile.name.endswith('.tmp') and data['file_name']:
pyfile.name = data['file_name']
- pyfile.size = parseFileSize(data['file_size'])
+ pyfile.size = parse_size(data['file_size'])
self.link = data['generated_links'][0][-1]
diff --git a/module/plugins/hoster/ShareonlineBiz.py b/module/plugins/hoster/ShareonlineBiz.py
index 413c796bb..53b3d4ae8 100644
--- a/module/plugins/hoster/ShareonlineBiz.py
+++ b/module/plugins/hoster/ShareonlineBiz.py
@@ -5,7 +5,7 @@ import time
import urllib
import urlparse
-from module.network.RequestFactory import getURL
+from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
@@ -39,7 +39,7 @@ class ShareonlineBiz(SimpleHoster):
def api_info(cls, url):
info = super(ShareonlineBiz, cls).api_info(url)
- field = getURL("http://api.share-online.biz/linkcheck.php",
+ field = get_url("http://api.share-online.biz/linkcheck.php",
get={'md5' : "1",
'links': re.match(cls.__pattern__, url).group("ID")}).split(";")
@@ -160,7 +160,7 @@ class ShareonlineBiz(SimpleHoster):
try:
self.log_error(errmsg, re.search(self.ERROR_PATTERN, self.html).group(1))
except Exception:
- self.log_error("Unknown error occurred", errmsg)
+ self.log_error(_("Unknown error occurred"), errmsg)
if errmsg is "invalid":
self.fail(_("File not available"))
diff --git a/module/plugins/hoster/ShareplaceCom.py b/module/plugins/hoster/ShareplaceCom.py
index d917e3f35..caafe7952 100644
--- a/module/plugins/hoster/ShareplaceCom.py
+++ b/module/plugins/hoster/ShareplaceCom.py
@@ -30,9 +30,7 @@ class ShareplaceCom(Hoster):
self.pyfile.name = self.get_file_name()
- wait_time = self.get_waiting_time()
- self.set_wait(wait_time)
- self.wait()
+ self.wait(self.get_waiting_time())
def get_waiting_time(self):
diff --git a/module/plugins/hoster/StreamCz.py b/module/plugins/hoster/StreamCz.py
index e2ae0ab83..cd62cbcf9 100644
--- a/module/plugins/hoster/StreamCz.py
+++ b/module/plugins/hoster/StreamCz.py
@@ -2,7 +2,7 @@
import re
-from module.network.RequestFactory import getURL
+from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.Hoster import Hoster
@@ -11,7 +11,7 @@ def get_info(urls):
for url in urls:
- html = getURL(url)
+ html = get_url(url)
if re.search(StreamCz.OFFLINE_PATTERN, html):
#: File offline
result.append((url, 0, 1, url))
diff --git a/module/plugins/hoster/TurbobitNet.py b/module/plugins/hoster/TurbobitNet.py
index e4ff000b2..46b15169b 100644
--- a/module/plugins/hoster/TurbobitNet.py
+++ b/module/plugins/hoster/TurbobitNet.py
@@ -9,7 +9,6 @@ import urllib
from Crypto.Cipher import ARC4
-from module.network.RequestFactory import getURL
from module.plugins.internal.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo, timestamp
@@ -101,7 +100,7 @@ class TurbobitNet(SimpleHoster):
if self.retrieve("version") != self.__version__ \
or int(self.retrieve("timestamp", 0)) + 86400000 < timestamp():
#: that's right, we are even using jdownloader updates
- rtUpdate = getURL("http://update0.jdownloader.org/pluginstuff/tbupdate.js")
+ rtUpdate = self.load("http://update0.jdownloader.org/pluginstuff/tbupdate.js")
rtUpdate = self.decrypt(rtUpdate.splitlines()[1])
#: but we still need to fix the syntax to work with other engines than rhino
rtUpdate = re.sub(r'for each\(var (\w+) in(\[[^\]]+\])\)\{',
diff --git a/module/plugins/hoster/UploadableCh.py b/module/plugins/hoster/UploadableCh.py
index 21d9f298e..6f589efa3 100644
--- a/module/plugins/hoster/UploadableCh.py
+++ b/module/plugins/hoster/UploadableCh.py
@@ -66,7 +66,7 @@ class UploadableCh(SimpleHoster):
def check_file(self):
if self.check_download({'wait': re.compile("Please wait for")}):
- self.log_info("Downloadlimit reached, please wait or reconnect")
+ self.log_info(_("Downloadlimit reached, please wait or reconnect"))
self.wait(60 * 60, True)
self.retry()
diff --git a/module/plugins/hoster/UploadedTo.py b/module/plugins/hoster/UploadedTo.py
index 32b54d1ae..e8c317296 100644
--- a/module/plugins/hoster/UploadedTo.py
+++ b/module/plugins/hoster/UploadedTo.py
@@ -4,7 +4,7 @@ import re
import time
import urlparse
-from module.network.RequestFactory import getURL
+from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
@@ -44,7 +44,7 @@ class UploadedTo(SimpleHoster):
info = super(UploadedTo, cls).api_info(url)
for _i in xrange(5):
- html = getURL("http://uploaded.net/api/filemultiple",
+ html = get_url("http://uploaded.net/api/filemultiple",
get={"apikey": cls.API_KEY, 'id_0': re.match(cls.__pattern__, url).group('ID')})
if html != "can't find request":
diff --git a/module/plugins/hoster/WebshareCz.py b/module/plugins/hoster/WebshareCz.py
index b650e9219..feea234f3 100644
--- a/module/plugins/hoster/WebshareCz.py
+++ b/module/plugins/hoster/WebshareCz.py
@@ -2,7 +2,7 @@
import re
-from module.network.RequestFactory import getURL
+from module.network.RequestFactory import getURL as get_url
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
@@ -26,7 +26,7 @@ class WebshareCz(SimpleHoster):
info['pattern'] = re.match(cls.__pattern__, url).groupdict()
- api_data = getURL("https://webshare.cz/api/file_info/",
+ api_data = get_url("https://webshare.cz/api/file_info/",
post={'ident': info['pattern']['ID'], 'wst': ""})
if not re.search(r'<status>OK', api_data):
@@ -42,7 +42,7 @@ class WebshareCz(SimpleHoster):
def handle_free(self, pyfile):
wst = self.account.get_account_data(self.user).get('wst', None) if self.account else None
- api_data = getURL("https://webshare.cz/api/file_link/",
+ api_data = get_url("https://webshare.cz/api/file_link/",
post={'ident': self.info['pattern']['ID'], 'wst': wst})
self.log_debug("API data: " + api_data)
diff --git a/module/plugins/hoster/Xdcc.py b/module/plugins/hoster/Xdcc.py
index d78794f99..0e200307e 100644
--- a/module/plugins/hoster/Xdcc.py
+++ b/module/plugins/hoster/Xdcc.py
@@ -50,8 +50,7 @@ class Xdcc(Hoster):
if errno == 10054:
self.log_debug("Server blocked our ip, retry in 5 min")
- self.set_wait(300)
- self.wait()
+ self.wait(300)
continue
self.fail(_("Failed due to socket errors. Code: %d") % errno)
@@ -91,8 +90,7 @@ class Xdcc(Hoster):
sock.send("NICK %s\r\n" % nick)
sock.send("USER %s %s bla :%s\r\n" % (ident, host, real))
- self.set_wait(3)
- self.wait()
+ self.wait(3)
sock.send("JOIN #%s\r\n" % chan)
sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack))
diff --git a/module/plugins/hoster/YourfilesTo.py b/module/plugins/hoster/YourfilesTo.py
index c6e106027..0b631fb23 100644
--- a/module/plugins/hoster/YourfilesTo.py
+++ b/module/plugins/hoster/YourfilesTo.py
@@ -31,9 +31,7 @@ class YourfilesTo(Hoster):
self.pyfile.name = self.get_file_name()
- wait_time = self.get_waiting_time()
- self.set_wait(wait_time)
- self.wait()
+ self.wait(self.get_waiting_time())
def get_waiting_time(self):
diff --git a/module/plugins/hoster/ZippyshareCom.py b/module/plugins/hoster/ZippyshareCom.py
index 80d495f78..48c17ea5f 100644
--- a/module/plugins/hoster/ZippyshareCom.py
+++ b/module/plugins/hoster/ZippyshareCom.py
@@ -3,7 +3,7 @@
import re
import urllib
-from BeautifulSoup import BeautifulSoup
+import BeautifulSoup
from module.plugins.internal.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
@@ -59,7 +59,7 @@ class ZippyshareCom(SimpleHoster):
def get_link(self):
#: get all the scripts inside the html body
- soup = BeautifulSoup(self.html)
+ soup = BeautifulSoup.BeautifulSoup(self.html)
scripts = (s.getText().strip() for s in soup.body.findAll('script', type='text/javascript'))
#: meant to be populated with the initialization of all the DOM elements found in the scripts
diff --git a/module/plugins/internal/Account.py b/module/plugins/internal/Account.py
index 8fd16bba9..b437b4a55 100644
--- a/module/plugins/internal/Account.py
+++ b/module/plugins/internal/Account.py
@@ -6,7 +6,7 @@ import time
import traceback
from module.plugins.internal.Plugin import Plugin
-from module.utils import compare_time, lock, parseFileSize
+from module.utils import compare_time, lock, parseFileSize as parse_size
class WrongPassword(Exception):
@@ -266,7 +266,7 @@ class Account(Plugin):
if not compare_time(start.split(":"), end.split(":")):
continue
except Exception:
- self.log_warning(_("Your Time %s has wrong format, use: 1:22-3:44") % time_data)
+ self.log_warning(_("Your Time %s has wrong format, use 1:22-3:44") % time_data)
if user in self.infos:
if "validuntil" in self.infos[user]:
@@ -291,7 +291,7 @@ class Account(Plugin):
def parse_traffic(self, value, unit=None): #: return kilobytes
if not unit and not isinstance(value, basestring):
unit = "KB"
- return parseFileSize(value, unit)
+ return parse_size(value, unit)
def wrong_password(self):
diff --git a/module/plugins/internal/AdYouLike.py b/module/plugins/internal/AdYouLike.py
index 07e7e4d17..9036b4632 100644
--- a/module/plugins/internal/AdYouLike.py
+++ b/module/plugins/internal/AdYouLike.py
@@ -30,7 +30,7 @@ class AdYouLike(Captcha):
self.log_debug("Ayl: %s | Callback: %s" % self.key)
return self.key #: key is the tuple(ayl, callback)
else:
- self.log_warning("Ayl or callback pattern not found")
+ self.log_warning(_("Ayl or callback pattern not found"))
return None
diff --git a/module/plugins/internal/AdsCaptcha.py b/module/plugins/internal/AdsCaptcha.py
index b45a6dfda..788b5e71a 100644
--- a/module/plugins/internal/AdsCaptcha.py
+++ b/module/plugins/internal/AdsCaptcha.py
@@ -30,7 +30,7 @@ class AdsCaptcha(Captcha):
self.log_debug("Key: %s | ID: %s" % self.key)
return self.key
else:
- self.log_warning("Key or id pattern not found")
+ self.log_warning(_("Key or id pattern not found"))
return None
diff --git a/module/plugins/internal/DeadCrypter.py b/module/plugins/internal/DeadCrypter.py
index d79551b52..15ba68b28 100644
--- a/module/plugins/internal/DeadCrypter.py
+++ b/module/plugins/internal/DeadCrypter.py
@@ -1,13 +1,12 @@
# -*- coding: utf-8 -*-
-from module.plugins.internal.Crypter import Crypter
-from module.plugins.internal.SimpleCrypter import create_getInfo
+from module.plugins.internal.Crypter import Crypter, create_getInfo
class DeadCrypter(Crypter):
__name__ = "DeadCrypter"
__type__ = "crypter"
- __version__ = "0.07"
+ __version__ = "0.08"
__pattern__ = r'^unmatchable$'
@@ -17,10 +16,10 @@ class DeadCrypter(Crypter):
@classmethod
- def api_info(cls, *args, **kwargs):
- api = super(DeadCrypter, cls).api_info(*args, **kwargs)
- api['status'] = 1
- return api
+ def get_info(cls, *args, **kwargs):
+ info = super(DeadCrypter, cls).get_info(*args, **kwargs)
+ info['status'] = 1
+ return info
def setup(self):
diff --git a/module/plugins/internal/DeadHoster.py b/module/plugins/internal/DeadHoster.py
index 86f4381a3..5050ba059 100644
--- a/module/plugins/internal/DeadHoster.py
+++ b/module/plugins/internal/DeadHoster.py
@@ -1,13 +1,12 @@
# -*- coding: utf-8 -*-
-from module.plugins.internal.Hoster import Hoster
-from module.plugins.internal.SimpleHoster import create_getInfo
+from module.plugins.internal.Hoster import Hoster, create_getInfo
class DeadHoster(Hoster):
__name__ = "DeadHoster"
__type__ = "hoster"
- __version__ = "0.17"
+ __version__ = "0.18"
__pattern__ = r'^unmatchable$'
@@ -17,10 +16,10 @@ class DeadHoster(Hoster):
@classmethod
- def api_info(cls, *args, **kwargs):
- api = super(DeadHoster, cls).api_info(*args, **kwargs)
- api['status'] = 1
- return api
+ def get_info(cls, *args, **kwargs):
+ info = super(DeadHoster, cls).get_info(*args, **kwargs)
+ info['status'] = 1
+ return info
def setup(self):
diff --git a/module/plugins/internal/Hoster.py b/module/plugins/internal/Hoster.py
index 9a9e61e46..15c46101b 100644
--- a/module/plugins/internal/Hoster.py
+++ b/module/plugins/internal/Hoster.py
@@ -216,7 +216,7 @@ class Hoster(Plugin):
wait_until = time.time() + wait_time + 1
self.log_debug("Set waitUntil to: %f (previous: %f)" % (wait_until, self.pyfile.waitUntil),
- "Wait: %d+1 seconds" % wait_time)
+ "Wait: %d (+1) seconds" % wait_time)
self.pyfile.waitUntil = wait_until
@@ -242,7 +242,7 @@ class Hoster(Plugin):
pyfile.setStatus("waiting")
self.log_info(_("Wait: %d seconds") % (pyfile.waitUntil - time.time()),
- _("Reconnect: %s") % self.want_reconnect)
+ _("Reconnect: %s") % self.want_reconnect)
if self.account:
self.log_debug("Ignore reconnection due account logged")
@@ -443,7 +443,7 @@ class Hoster(Plugin):
self.fail(_("No url given"))
if self.core.debug:
- self.log_debug("Download url: " + url, *["%s=%s" % (key, val) for key, val in locals().iteritems() if key not in ("self", "url")])
+ self.log_debug("Download url " + url, *["%s=%s" % (key, val) for key, val in locals().iteritems() if key not in ("self", "url")])
self.correct_captcha()
self.check_for_same_files()
diff --git a/module/plugins/internal/MultiHook.py b/module/plugins/internal/MultiHook.py
index 2e99afa1c..7afe95705 100644
--- a/module/plugins/internal/MultiHook.py
+++ b/module/plugins/internal/MultiHook.py
@@ -76,7 +76,7 @@ class MultiHook(Hook):
self.pluginmodule = self.core.pluginManager.loadModule(self.plugintype, self.pluginname)
self.pluginclass = getattr(self.pluginmodule, self.pluginname)
else:
- self.log_warning("Hook plugin will be deactivated due missing plugin reference")
+ self.log_warning(_("Hook plugin will be deactivated due missing plugin reference"))
self.set_config('activated', False)
@@ -87,7 +87,7 @@ class MultiHook(Hook):
self.account = None
if not self.account and hasattr(self.pluginclass, "LOGIN_ACCOUNT") and self.pluginclass.LOGIN_ACCOUNT:
- self.log_warning("Hook plugin will be deactivated due missing account reference")
+ self.log_warning(_("Hook plugin will be deactivated due missing account reference"))
self.set_config('activated', False)
diff --git a/module/plugins/internal/Plugin.py b/module/plugins/internal/Plugin.py
index d1b8bb1cf..dc5995e65 100644
--- a/module/plugins/internal/Plugin.py
+++ b/module/plugins/internal/Plugin.py
@@ -282,7 +282,7 @@ class Plugin(object):
self.fail(_("No url given"))
if self.core.debug:
- self.log_debug("Load url: " + url, *["%s=%s" % (key, val) for key, val in locals().iteritems() if key not in ("self", "url")])
+ self.log_debug("Load url " + url, *["%s=%s" % (key, val) for key, val in locals().iteritems() if key not in ("self", "url")])
if req is None:
if hasattr(self, "req"):
diff --git a/module/plugins/internal/ReCaptcha.py b/module/plugins/internal/ReCaptcha.py
index 79bda9051..1caf6b7b0 100644
--- a/module/plugins/internal/ReCaptcha.py
+++ b/module/plugins/internal/ReCaptcha.py
@@ -35,7 +35,7 @@ class ReCaptcha(Captcha):
self.log_debug("Key: %s" % self.key)
return self.key
else:
- self.log_warning("Key pattern not found")
+ self.log_warning(_("Key pattern not found"))
return None
diff --git a/module/plugins/internal/SimpleHoster.py b/module/plugins/internal/SimpleHoster.py
index 772e6ea2f..93008f16d 100644
--- a/module/plugins/internal/SimpleHoster.py
+++ b/module/plugins/internal/SimpleHoster.py
@@ -12,8 +12,8 @@ import urlparse
from module.PyFile import statusMap as _statusMap
from module.network.HTTPRequest import BadHeader
-from module.network.RequestFactory import getURL
-from module.plugins.internal.Hoster import Hoster, parse_fileInfo, create_getInfo
+from module.network.RequestFactory import getURL as get_url
+from module.plugins.internal.Hoster import Hoster, create_getInfo, parse_fileInfo
from module.plugins.internal.Plugin import Fail, Retry, replace_patterns, set_cookies
from module.utils import fixup, fs_encode, parseFileSize as parse_size
@@ -136,7 +136,7 @@ class SimpleHoster(Hoster):
elif info['status'] is 3:
try:
- html = getURL(url, cookies=cls.COOKIES, decode=cls.TEXT_ENCODING)
+ html = get_url(url, cookies=cls.COOKIES, decode=cls.TEXT_ENCODING)
except BadHeader, e:
info['error'] = "%d: %s" % (e.code, e.content)
@@ -322,7 +322,7 @@ class SimpleHoster(Hoster):
except Exception:
pass
- self.log_warning("Check result: " + errmsg, "Waiting 1 minute and retry")
+ self.log_warning(_("Check result: ") + errmsg, _("Waiting 1 minute and retry"))
self.want_reconnect = True
self.retry(wait_time=60, reason=errmsg)
else:
diff --git a/module/plugins/internal/SolveMedia.py b/module/plugins/internal/SolveMedia.py
index 927d4e536..87537470f 100644
--- a/module/plugins/internal/SolveMedia.py
+++ b/module/plugins/internal/SolveMedia.py
@@ -28,7 +28,7 @@ class SolveMedia(Captcha):
self.log_debug("Key: %s" % self.key)
return self.key
else:
- self.log_warning("Key pattern not found")
+ self.log_warning(_("Key pattern not found")
return None
@@ -43,7 +43,7 @@ class SolveMedia(Captcha):
magic = re.search(r'name="magic" value="(.+?)"', html).group(1)
except AttributeError:
- self.log_warning("Magic pattern not found")
+ self.log_warning(_("Magic pattern not found")
magic = None
try:
@@ -81,7 +81,7 @@ class SolveMedia(Captcha):
else:
if "error" in html:
- self.log_warning("Captcha code was invalid")
+ self.log_warning(_("Captcha code was invalid"))
self.log_debug("Retry #%d" % i)
html = self.plugin.load(redirect)
else: