summaryrefslogtreecommitdiffstats
path: root/module/plugins/crypter
diff options
context:
space:
mode:
authorGravatar Stefano <l.stickell@yahoo.it> 2013-07-25 21:01:35 +0200
committerGravatar Stefano <l.stickell@yahoo.it> 2013-07-25 21:01:35 +0200
commitcfbdcc19337314c0bc8901711ec0e63147e1c3d0 (patch)
tree0a7df1beef1c7d81aab0d739b56ffbeb17029f83 /module/plugins/crypter
parentFixed PEP 8 violations in Hooks (diff)
downloadpyload-cfbdcc19337314c0bc8901711ec0e63147e1c3d0.tar.xz
Fixed some pylint violations
Diffstat (limited to 'module/plugins/crypter')
-rw-r--r--module/plugins/crypter/LinkSaveIn.py2
-rw-r--r--module/plugins/crypter/SecuredIn.py12
-rw-r--r--module/plugins/crypter/SerienjunkiesOrg.py20
-rw-r--r--module/plugins/crypter/ShareLinksBiz.py26
4 files changed, 29 insertions, 31 deletions
diff --git a/module/plugins/crypter/LinkSaveIn.py b/module/plugins/crypter/LinkSaveIn.py
index a73b7cc19..129da6608 100644
--- a/module/plugins/crypter/LinkSaveIn.py
+++ b/module/plugins/crypter/LinkSaveIn.py
@@ -48,7 +48,7 @@ class LinkSaveIn(Crypter):
if not self.isOnline():
self.offline()
- # Check for protection
+ # Check for protection
if self.isPasswordProtected():
self.unlockPasswordProtection()
self.handleErrors()
diff --git a/module/plugins/crypter/SecuredIn.py b/module/plugins/crypter/SecuredIn.py
index 7c0b27c8b..9bce390ac 100644
--- a/module/plugins/crypter/SecuredIn.py
+++ b/module/plugins/crypter/SecuredIn.py
@@ -11,7 +11,7 @@ class SecuredIn(Crypter):
__name__ = "SecuredIn"
__type__ = "container"
__pattern__ = r"http://[\w\.]*?secured\.in/download-[\d]+-[\w]{8}\.html"
- __version__ = "0.1"
+ __version__ = "0.2"
__description__ = """secured.in Container Plugin"""
__author_name__ = ("mkaay")
__author_mail__ = ("mkaay@mkaay.de")
@@ -31,7 +31,7 @@ class SecuredIn(Crypter):
src = self.req.load(url, cookies=True)
soup = BeautifulSoup(src)
img = soup.find("img", attrs={"id": "captcha_img"})
- for i in range(3):
+ for _ in range(3):
form = soup.find("form", attrs={"id": "frm_captcha"})
captchaHash = form.find("input", attrs={"id": "captcha_hash"})["value"]
captchaUrl = "http://secured.in/%s" % img["src"]
@@ -41,8 +41,8 @@ class SecuredIn(Crypter):
soup = BeautifulSoup(src)
img = soup.find("img", attrs={"id": "captcha_img"})
if not img:
- files = soup.findAll("tr", attrs={"id": re.compile("file-\d+")})
- dlIDPattern = re.compile("accessDownload\(\d, \d+, '(.*?)', \d\)")
+ files = soup.findAll("tr", attrs={"id": re.compile(r"file-\d+")})
+ dlIDPattern = re.compile(r"accessDownload\(\d, \d+, '(.*?)', \d\)")
cypher = self.Cypher()
for cfile in files:
m = dlIDPattern.search(cfile["onclick"])
@@ -396,13 +396,13 @@ class SecuredIn(Crypter):
self.pqmyzkid = self.thdlpsmy
self.pldmjnde = self.plkodnyq
- yaqpolft = [0 for i in range(len(kaiumylq))]
+ yaqpolft = [0] * len(kaiumylq)
yaqwsedr = 0
btzqwsay = 0
while yaqwsedr < len(kaiumylq):
wlqoakmy = 0
- for lopiuztr in range(0, 4):
+ for _ in range(0, 4):
wlqoakmy = wlqoakmy << 8 | ord(kaiumylq[yaqwsedr % len(kaiumylq)])
yaqwsedr += 1
yaqpolft[btzqwsay] = wlqoakmy
diff --git a/module/plugins/crypter/SerienjunkiesOrg.py b/module/plugins/crypter/SerienjunkiesOrg.py
index 5b720533c..e0eb7e240 100644
--- a/module/plugins/crypter/SerienjunkiesOrg.py
+++ b/module/plugins/crypter/SerienjunkiesOrg.py
@@ -12,13 +12,13 @@ class SerienjunkiesOrg(Crypter):
__name__ = "SerienjunkiesOrg"
__type__ = "container"
__pattern__ = r"http://.*?(serienjunkies.org|dokujunkies.org)/.*?"
- __version__ = "0.38"
+ __version__ = "0.39"
__config__ = [
("changeNameSJ", "Packagename;Show;Season;Format;Episode", "Take SJ.org name", "Show"),
("changeNameDJ", "Packagename;Show;Format;Episode", "Take DJ.org name", "Show"),
("randomPreferred", "bool", "Randomize Preferred-List", False),
- (
- "hosterListMode", "OnlyOne;OnlyPreferred(One);OnlyPreferred(All);All", "Use for hosters (if supported)", "All"),
+ ("hosterListMode", "OnlyOne;OnlyPreferred(One);OnlyPreferred(All);All",
+ "Use for hosters (if supported)", "All"),
("hosterList", "str", "Preferred Hoster list (comma separated)",
"RapidshareCom,UploadedTo,NetloadIn,FilefactoryCom,FreakshareNet,FilebaseTo,HotfileCom,DepositfilesCom,EasyshareCom,KickloadCom"),
("ignoreList", "str", "Ignored Hoster list (comma separated)", "MegauploadCom")
@@ -27,7 +27,6 @@ class SerienjunkiesOrg(Crypter):
__author_name__ = ("mkaay", "godofdream")
__author_mail__ = ("mkaay@mkaay.de", "soilfiction@gmail.com")
-
def setup(self):
self.multiDL = False
@@ -62,7 +61,6 @@ class SerienjunkiesOrg(Crypter):
else:
self.core.files.addLinks(package_links, self.pyfile.package().id)
-
def handleSeason(self, url):
src = self.getSJSrc(url)
soup = BeautifulSoup(src)
@@ -99,7 +97,7 @@ class SerienjunkiesOrg(Crypter):
groups[gid]["ep"][ename] = {}
parts.remove(parts[0])
for part in parts:
- hostername = re.search(" \| ([-a-zA-Z0-9]+\.\w+)", part)
+ hostername = re.search(r" \| ([-a-zA-Z0-9]+\.\w+)", part)
if hostername:
hostername = hostername.group(1)
groups[gid]["ep"][ename][hostername] = []
@@ -229,7 +227,7 @@ class SerienjunkiesOrg(Crypter):
groups[gid]["ep"][ename] = {}
parts.remove(parts[0])
for part in parts:
- hostername = re.search(" \| ([-a-zA-Z0-9]+\.\w+)", part)
+ hostername = re.search(r" \| ([-a-zA-Z0-9]+\.\w+)", part)
if hostername:
hostername = hostername.group(1)
groups[gid]["ep"][ename][hostername] = []
@@ -268,7 +266,7 @@ class SerienjunkiesOrg(Crypter):
episodePattern = re.compile("^http://download.serienjunkies.org/f-.*?.html(#hasName)?$")
oldStyleLink = re.compile("^http://serienjunkies.org/safe/(.*)$")
categoryPatternDJ = re.compile("^http://dokujunkies.org/.*?(.*)$")
- showPatternDJ = re.compile("^http://dokujunkies.org/.*?/(.*)\.html(#hasName)?$")
+ showPatternDJ = re.compile(r"^http://dokujunkies.org/.*?/(.*)\.html(#hasName)?$")
framePattern = re.compile("^http://download.(serienjunkies.org|dokujunkies.org)/frame/go-.*?/$")
url = pyfile.url
if framePattern.match(url):
@@ -291,8 +289,8 @@ class SerienjunkiesOrg(Crypter):
result = []
preferredList = self.getConfig("hosterList").strip().lower().replace(
- '|', ',').replace('.', '').replace(';', ',').split( ',')
- if (self.getConfig("randomPreferred") == True) and (
+ '|', ',').replace('.', '').replace(';', ',').split(',')
+ if (self.getConfig("randomPreferred") is True) and (
self.getConfig("hosterListMode") in ["OnlyOne", "OnlyPreferred(One)"]):
random.shuffle(preferredList)
# we don't want hosters be read two times
@@ -309,7 +307,7 @@ class SerienjunkiesOrg(Crypter):
return result
ignorelist = self.getConfig("ignoreList").strip().lower().replace(
- '|', ',').replace('.', '').replace(';', ',').split( ',')
+ '|', ',').replace('.', '').replace(';', ',').split(',')
if self.getConfig('hosterListMode') in ["OnlyOne", "All"]:
for Hoster in hosterlist2:
if Hoster.strip().lower().replace('.', '') not in ignorelist:
diff --git a/module/plugins/crypter/ShareLinksBiz.py b/module/plugins/crypter/ShareLinksBiz.py
index 252766811..09ac21873 100644
--- a/module/plugins/crypter/ShareLinksBiz.py
+++ b/module/plugins/crypter/ShareLinksBiz.py
@@ -12,7 +12,7 @@ class ShareLinksBiz(Crypter):
__name__ = "ShareLinksBiz"
__type__ = "crypter"
__pattern__ = r"(?P<base>http://[\w\.]*?(share-links|s2l)\.biz)/(?P<id>_?[0-9a-z]+)(/.*)?"
- __version__ = "1.12"
+ __version__ = "1.13"
__description__ = """Share-Links.biz Crypter"""
__author_name__ = ("fragonib")
__author_mail__ = ("fragonib[AT]yahoo[DOT]es")
@@ -36,7 +36,7 @@ class ShareLinksBiz(Crypter):
# Unblock server (load all images)
self.unblockServer()
- # Check for protection
+ # Check for protection
if self.isPasswordProtected():
self.unlockPasswordProtection()
self.handleErrors()
@@ -53,7 +53,7 @@ class ShareLinksBiz(Crypter):
package_links.extend(self.handleCNL2())
package_links = set(package_links)
- # Get package info
+ # Get package info
package_name, package_folder = self.getPackageInfo()
# Pack
@@ -86,7 +86,7 @@ class ShareLinksBiz(Crypter):
return False
def unblockServer(self):
- imgs = re.findall("(/template/images/.*?\.gif)", self.html)
+ imgs = re.findall(r"(/template/images/.*?\.gif)", self.html)
for img in imgs:
self.load(self.baseUrl + img)
@@ -121,12 +121,12 @@ class ShareLinksBiz(Crypter):
self.html = self.load(url, decode=True)
def _getCaptchaMap(self):
- map = {}
+ mapp = {}
for m in re.finditer(r'<area shape="rect" coords="(.*?)" href="(.*?)"', self.html):
rect = eval('(' + m.group(1) + ')')
href = m.group(2)
- map[rect] = href
- return map
+ mapp[rect] = href
+ return mapp
def _resolveCoords(self, coords, captchaMap):
x, y = coords
@@ -168,21 +168,21 @@ class ShareLinksBiz(Crypter):
folder = self.package.folder
self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
- # Return package info
+ # Return package info
return name, folder
def handleWebLinks(self):
package_links = []
self.logDebug("Handling Web links")
- #@TODO: Gather paginated web links
+ #@TODO: Gather paginated web links
pattern = r"javascript:_get\('(.*?)', \d+, ''\)"
ids = re.findall(pattern, self.html)
self.logDebug("Decrypting %d Web links" % len(ids))
- for i, id in enumerate(ids):
+ for i, ID in enumerate(ids):
try:
- self.logDebug("Decrypting Web link %d, [%s]" % (i + 1, id))
- dwLink = self.baseUrl + "/get/lnk/" + id
+ self.logDebug("Decrypting Web link %d, [%s]" % (i + 1, ID))
+ dwLink = self.baseUrl + "/get/lnk/" + ID
response = self.load(dwLink)
code = re.search(r'frm/(\d+)', response).group(1)
fwLink = self.baseUrl + "/get/frm/" + code
@@ -195,7 +195,7 @@ class ShareLinksBiz(Crypter):
self.logDebug("JsEngine returns value [%s] for redirection link" % dlLink)
package_links.append(dlLink)
except Exception, detail:
- self.logDebug("Error decrypting Web link [%s], %s" % (id, detail))
+ self.logDebug("Error decrypting Web link [%s], %s" % (ID, detail))
return package_links
def handleContainers(self):