diff options
author | Walter Purcaro <vuolter@gmail.com> | 2014-07-14 16:10:01 +0200 |
---|---|---|
committer | Walter Purcaro <vuolter@gmail.com> | 2014-07-15 16:26:07 +0200 |
commit | 7b8c458cca7d21a029620f98e453f746fce69cd1 (patch) | |
tree | 9e97b0003a00ff8ac9ee6b777d94bb998c911d05 /module/plugins/crypter | |
parent | Fix code indentation, some bad whitespaces and missing authors + use 'not' in... (diff) | |
download | pyload-7b8c458cca7d21a029620f98e453f746fce69cd1.tar.xz |
Prefer single quote for dict key name
Diffstat (limited to 'module/plugins/crypter')
-rw-r--r-- | module/plugins/crypter/DailymotionBatch.py | 16 | ||||
-rw-r--r-- | module/plugins/crypter/DlProtectCom.py | 4 | ||||
-rw-r--r-- | module/plugins/crypter/DuckCryptInfo.py | 2 | ||||
-rw-r--r-- | module/plugins/crypter/HoerbuchIn.py | 2 | ||||
-rw-r--r-- | module/plugins/crypter/LinkSaveIn.py | 2 | ||||
-rw-r--r-- | module/plugins/crypter/NCryptIn.py | 2 | ||||
-rw-r--r-- | module/plugins/crypter/RelinkUs.py | 4 | ||||
-rw-r--r-- | module/plugins/crypter/SafelinkingNet.py | 12 | ||||
-rw-r--r-- | module/plugins/crypter/SerienjunkiesOrg.py | 50 | ||||
-rw-r--r-- | module/plugins/crypter/TurbobitNetFolder.py | 6 | ||||
-rw-r--r-- | module/plugins/crypter/YoutubeBatch.py | 50 |
11 files changed, 75 insertions, 75 deletions
diff --git a/module/plugins/crypter/DailymotionBatch.py b/module/plugins/crypter/DailymotionBatch.py index e13f267f4..d9309af90 100644 --- a/module/plugins/crypter/DailymotionBatch.py +++ b/module/plugins/crypter/DailymotionBatch.py @@ -45,8 +45,8 @@ class DailymotionBatch(Crypter): if "error" in playlist: return - name = playlist["name"] - owner = playlist["owner.screenname"] + name = playlist['name'] + owner = playlist['owner.screenname'] return name, owner def _getPlaylists(self, user_id, page=1): @@ -57,10 +57,10 @@ class DailymotionBatch(Crypter): if "error" in user: return - for playlist in user["list"]: - yield playlist["id"] + for playlist in user['list']: + yield playlist['id'] - if user["has_more"]: + if user['has_more']: for item in self._getPlaylists(user_id, page + 1): yield item @@ -75,10 +75,10 @@ class DailymotionBatch(Crypter): if "error" in playlist: return - for video in playlist["list"]: - yield video["url"] + for video in playlist['list']: + yield video['url'] - if playlist["has_more"]: + if playlist['has_more']: for item in self._getVideos(id, page + 1): yield item diff --git a/module/plugins/crypter/DlProtectCom.py b/module/plugins/crypter/DlProtectCom.py index f9214f0c8..5ef8eab21 100644 --- a/module/plugins/crypter/DlProtectCom.py +++ b/module/plugins/crypter/DlProtectCom.py @@ -53,14 +53,14 @@ class DlProtectCom(SimpleCrypter): post_req.update({"i": b64time, "submitform": "Decrypt+link"}) if ">Password :" in self.html: - post_req["pwd"] = self.getPassword() + post_req['pwd'] = self.getPassword() if ">Security Code" in self.html: captcha_id = re.search(r'/captcha\.php\?uid=(.+?)"', self.html).group(1) captcha_url = "http://www.dl-protect.com/captcha.php?uid=" + captcha_id captcha_code = self.decryptCaptcha(captcha_url, imgtype="gif") - post_req["secure"] = captcha_code + post_req['secure'] = captcha_code self.html = self.load(self.pyfile.url, post=post_req) diff --git a/module/plugins/crypter/DuckCryptInfo.py b/module/plugins/crypter/DuckCryptInfo.py index 4cd3ec197..bbf6e6b0d 100644 --- a/module/plugins/crypter/DuckCryptInfo.py +++ b/module/plugins/crypter/DuckCryptInfo.py @@ -49,7 +49,7 @@ class DuckCryptInfo(Crypter): def handleLink(self, url): src = self.load(url) soup = BeautifulSoup(src) - link = soup.find("iframe")["src"] + link = soup.find("iframe")['src'] if not link: self.logDebug('no links found - (Plugin out of date?)') else: diff --git a/module/plugins/crypter/HoerbuchIn.py b/module/plugins/crypter/HoerbuchIn.py index c6773b3f0..06a15fc65 100644 --- a/module/plugins/crypter/HoerbuchIn.py +++ b/module/plugins/crypter/HoerbuchIn.py @@ -28,7 +28,7 @@ class HoerbuchIn(Crypter): abookname = soup.find("a", attrs={"rel": "bookmark"}).text for a in soup.findAll("a", attrs={"href": self.protection}): package = "%s (%s)" % (abookname, a.previousSibling.previousSibling.text[:-1]) - links = self.decryptFolder(a["href"]) + links = self.decryptFolder(a['href']) self.packages.append((package, links, pyfile.package().folder)) else: diff --git a/module/plugins/crypter/LinkSaveIn.py b/module/plugins/crypter/LinkSaveIn.py index e4497eb09..5fde4e958 100644 --- a/module/plugins/crypter/LinkSaveIn.py +++ b/module/plugins/crypter/LinkSaveIn.py @@ -34,7 +34,7 @@ class LinkSaveIn(Crypter): self.fileid = None self.captcha = False self.package = None - self.preferred_sources = ['cnl2', 'rsdf', 'ccf', 'dlc', 'web'] + self.preferred_sources = ["cnl2", "rsdf", "ccf", "dlc", "web"] def decrypt(self, pyfile): # Init diff --git a/module/plugins/crypter/NCryptIn.py b/module/plugins/crypter/NCryptIn.py index cadf2760f..ec6533ffc 100644 --- a/module/plugins/crypter/NCryptIn.py +++ b/module/plugins/crypter/NCryptIn.py @@ -28,7 +28,7 @@ class NCryptIn(Crypter): self.package = None self.html = None self.cleanedHtml = None - self.links_source_order = ['cnl2', 'rsdf', 'ccf', 'dlc', 'web'] + self.links_source_order = ["cnl2", "rsdf", "ccf", "dlc", "web"] self.protection_type = None def decrypt(self, pyfile): diff --git a/module/plugins/crypter/RelinkUs.py b/module/plugins/crypter/RelinkUs.py index f5c158d2e..1187692a0 100644 --- a/module/plugins/crypter/RelinkUs.py +++ b/module/plugins/crypter/RelinkUs.py @@ -19,7 +19,7 @@ class RelinkUs(Crypter): __author_mail__ = "fragonib[AT]yahoo[DOT]es" # Constants - PREFERRED_LINK_SOURCES = ['cnl2', 'dlc', 'web'] + PREFERRED_LINK_SOURCES = ["cnl2", "dlc", "web"] OFFLINE_TOKEN = r'<title>Tattooside' PASSWORD_TOKEN = r'container_password\.php' @@ -197,7 +197,7 @@ class RelinkUs(Crypter): try: dlc = self.load(container_url) dlc_filename = self.fileid + ".dlc" - dlc_filepath = os.path.join(self.config["general"]["download_folder"], dlc_filename) + dlc_filepath = os.path.join(self.config['general']['download_folder'], dlc_filename) f = open(dlc_filepath, "wb") f.write(dlc) f.close() diff --git a/module/plugins/crypter/SafelinkingNet.py b/module/plugins/crypter/SafelinkingNet.py index cb2617168..022f3e5ff 100644 --- a/module/plugins/crypter/SafelinkingNet.py +++ b/module/plugins/crypter/SafelinkingNet.py @@ -40,7 +40,7 @@ class SafelinkingNet(Crypter): if "link-password" in self.html: password = pyfile.package().password - postData["link-password"] = password + postData['link-password'] = password if "altcaptcha" in self.html: for _ in xrange(5): @@ -53,8 +53,8 @@ class SafelinkingNet(Crypter): self.fail("Error parsing captcha") challenge, response = captcha.challenge(captchaKey) - postData["adcopy_challenge"] = challenge - postData["adcopy_response"] = response + postData['adcopy_challenge'] = challenge + postData['adcopy_response'] = response self.html = self.load(url, post=postData) if "The password you entered was incorrect" in self.html: @@ -72,9 +72,9 @@ class SafelinkingNet(Crypter): if m: linkDict = json_loads(m.group(1)) for link in linkDict: - if not "http://" in link["full"]: - packageLinks.append("https://safelinking.net/d/" + link["full"]) + if not "http://" in link['full']: + packageLinks.append("https://safelinking.net/d/" + link['full']) else: - packageLinks.append(link["full"]) + packageLinks.append(link['full']) self.core.files.addLinks(packageLinks, pyfile.package().id) diff --git a/module/plugins/crypter/SerienjunkiesOrg.py b/module/plugins/crypter/SerienjunkiesOrg.py index 6fbbfedb3..ecca56209 100644 --- a/module/plugins/crypter/SerienjunkiesOrg.py +++ b/module/plugins/crypter/SerienjunkiesOrg.py @@ -51,9 +51,9 @@ class SerienjunkiesOrg(Crypter): package_links = [] for a in nav.findAll("a"): if self.getConfig("changeNameSJ") == "Show": - package_links.append(a["href"]) + package_links.append(a['href']) else: - package_links.append(a["href"] + "#hasName") + package_links.append(a['href'] + "#hasName") if self.getConfig("changeNameSJ") == "Show": self.packages.append((packageName, package_links, packageName)) else: @@ -85,32 +85,32 @@ class SerienjunkiesOrg(Crypter): opts[n.strip()] = val.strip() gid += 1 groups[gid] = {} - groups[gid]["ep"] = {} - groups[gid]["opts"] = opts + groups[gid]['ep'] = {} + groups[gid]['opts'] = opts elif re.search("<strong>Download:", str(p)): parts = str(p).split("<br />") if re.search("<strong>", parts[0]): ename = re.search('<strong>(.*?)</strong>', parts[0]).group(1).strip().decode("utf-8").replace( "–", "-") - groups[gid]["ep"][ename] = {} + groups[gid]['ep'][ename] = {} parts.remove(parts[0]) for part in parts: hostername = re.search(r" \| ([-a-zA-Z0-9]+\.\w+)", part) if hostername: hostername = hostername.group(1) - groups[gid]["ep"][ename][hostername] = [] + groups[gid]['ep'][ename][hostername] = [] links = re.findall('href="(.*?)"', part) for link in links: - groups[gid]["ep"][ename][hostername].append(link + "#hasName") + groups[gid]['ep'][ename][hostername].append(link + "#hasName") links = [] for g in groups.values(): - for ename in g["ep"]: - links.extend(self.getpreferred(g["ep"][ename])) + for ename in g['ep']: + links.extend(self.getpreferred(g['ep'][ename])) if self.getConfig("changeNameSJ") == "Episode": self.packages.append((ename, links, ename)) links = [] - package = "%s (%s, %s)" % (seasonName, g["opts"]["Format"], g["opts"]["Sprache"]) + package = "%s (%s, %s)" % (seasonName, g['opts']['Format'], g['opts']['Sprache']) if self.getConfig("changeNameSJ") == "Format": self.packages.append((package, links, package)) links = [] @@ -135,12 +135,12 @@ class SerienjunkiesOrg(Crypter): sleep(5) self.retry() - captchaUrl = "http://download.serienjunkies.org" + captchaTag["src"] + captchaUrl = "http://download.serienjunkies.org" + captchaTag['src'] result = self.decryptCaptcha(str(captchaUrl), imgtype="png") sinp = form.find(attrs={"name": "s"}) self.req.lastURL = str(url) - sj = self.load(str(url), post={'s': sinp["value"], 'c': result, 'action': "Download"}) + sj = self.load(str(url), post={'s': sinp['value'], 'c': result, 'action': "Download"}) soup = BeautifulSoup(sj) rawLinks = soup.findAll(attrs={"action": re.compile("^http://download.serienjunkies.org/")}) @@ -154,7 +154,7 @@ class SerienjunkiesOrg(Crypter): links = [] for link in rawLinks: - frameUrl = link["action"].replace("/go-", "/frame/go-") + frameUrl = link['action'].replace("/go-", "/frame/go-") links.append(self.handleFrame(frameUrl)) if re.search("#hasName", url) or ((self.getConfig("changeNameSJ") == "Packagename") and (self.getConfig("changeNameDJ") == "Packagename")): @@ -171,12 +171,12 @@ class SerienjunkiesOrg(Crypter): soup = BeautifulSoup(sj) form = soup.find("form", attrs={"action": re.compile("^http://serienjunkies.org")}) captchaTag = form.find(attrs={"src": re.compile("^/safe/secure/")}) - captchaUrl = "http://serienjunkies.org" + captchaTag["src"] + captchaUrl = "http://serienjunkies.org" + captchaTag['src'] result = self.decryptCaptcha(str(captchaUrl)) - url = form["action"] + url = form['action'] sinp = form.find(attrs={"name": "s"}) - self.req.load(str(url), post={'s': sinp["value"], 'c': result, 'dl.start': "Download"}, cookies=False, + self.req.load(str(url), post={'s': sinp['value'], 'c': result, 'dl.start': "Download"}, cookies=False, just_header=True) decrypted = self.req.lastEffectiveURL if decrypted == str(url): @@ -215,32 +215,32 @@ class SerienjunkiesOrg(Crypter): opts[n.strip()] = val.strip() gid += 1 groups[gid] = {} - groups[gid]["ep"] = {} - groups[gid]["opts"] = opts + groups[gid]['ep'] = {} + groups[gid]['opts'] = opts elif re.search("<strong>Download:", str(p)): parts = str(p).split("<br />") if re.search("<strong>", parts[0]): ename = re.search('<strong>(.*?)</strong>', parts[0]).group(1).strip().decode("utf-8").replace( "–", "-") - groups[gid]["ep"][ename] = {} + groups[gid]['ep'][ename] = {} parts.remove(parts[0]) for part in parts: hostername = re.search(r" \| ([-a-zA-Z0-9]+\.\w+)", part) if hostername: hostername = hostername.group(1) - groups[gid]["ep"][ename][hostername] = [] + groups[gid]['ep'][ename][hostername] = [] links = re.findall('href="(.*?)"', part) for link in links: - groups[gid]["ep"][ename][hostername].append(link + "#hasName") + groups[gid]['ep'][ename][hostername].append(link + "#hasName") links = [] for g in groups.values(): - for ename in g["ep"]: - links.extend(self.getpreferred(g["ep"][ename])) + for ename in g['ep']: + links.extend(self.getpreferred(g['ep'][ename])) if self.getConfig("changeNameDJ") == "Episode": self.packages.append((ename, links, ename)) links = [] - package = "%s (%s, %s)" % (seasonName, g["opts"]["Format"], g["opts"]["Sprache"]) + package = "%s (%s, %s)" % (seasonName, g['opts']['Format'], g['opts']['Sprache']) if self.getConfig("changeNameDJ") == "Format": self.packages.append((package, links, package)) links = [] @@ -255,7 +255,7 @@ class SerienjunkiesOrg(Crypter): soup = BeautifulSoup(src) content = soup.find("div", attrs={"id": "content"}) for a in content.findAll("a", attrs={"rel": "bookmark"}): - package_links.append(a["href"]) + package_links.append(a['href']) self.core.files.addLinks(package_links, self.pyfile.package().id) def decrypt(self, pyfile): diff --git a/module/plugins/crypter/TurbobitNetFolder.py b/module/plugins/crypter/TurbobitNetFolder.py index d9e63b4ce..2dcd65757 100644 --- a/module/plugins/crypter/TurbobitNetFolder.py +++ b/module/plugins/crypter/TurbobitNetFolder.py @@ -36,9 +36,9 @@ class TurbobitNetFolder(SimpleCrypter): get={"rootId": id, "rows": 200, "page": page}, decode=True) grid = json_loads(gridFile) - if grid["rows"]: - for i in grid["rows"]: - yield i["id"] + if grid['rows']: + for i in grid['rows']: + yield i['id'] for id in self._getLinks(id, page + 1): yield id else: diff --git a/module/plugins/crypter/YoutubeBatch.py b/module/plugins/crypter/YoutubeBatch.py index 1af9475eb..dd5d937d5 100644 --- a/module/plugins/crypter/YoutubeBatch.py +++ b/module/plugins/crypter/YoutubeBatch.py @@ -45,21 +45,21 @@ class YoutubeBatch(Crypter): def getChannel(self, user): channels = self.api_response("channels", {"part": "id,snippet,contentDetails", "forUsername": user, "maxResults": "50"}) - if channels["items"]: - channel = channels["items"][0] - return {"id": channel["id"], - "title": channel["snippet"]["title"], - "relatedPlaylists": channel["contentDetails"]["relatedPlaylists"], + if channels['items']: + channel = channels['items'][0] + return {"id": channel['id'], + "title": channel['snippet']['title'], + "relatedPlaylists": channel['contentDetails']['relatedPlaylists'], "user": user} # One lone channel for user? def getPlaylist(self, p_id): playlists = self.api_response("playlists", {"part": "snippet", "id": p_id}) - if playlists["items"]: - playlist = playlists["items"][0] + if playlists['items']: + playlist = playlists['items'][0] return {"id": p_id, - "title": playlist["snippet"]["title"], - "channelId": playlist["snippet"]["channelId"], - "channelTitle": playlist["snippet"]["channelTitle"]} + "title": playlist['snippet']['title'], + "channelId": playlist['snippet']['channelId'], + "channelTitle": playlist['snippet']['channelTitle']} def _getPlaylists(self, id, token=None): req = {"part": "id", "maxResults": "50", "channelId": id} @@ -68,11 +68,11 @@ class YoutubeBatch(Crypter): playlists = self.api_response("playlists", req) - for playlist in playlists["items"]: - yield playlist["id"] + for playlist in playlists['items']: + yield playlist['id'] if "nextPageToken" in playlists: - for item in self._getPlaylists(id, playlists["nextPageToken"]): + for item in self._getPlaylists(id, playlists['nextPageToken']): yield item def getPlaylists(self, ch_id): @@ -85,11 +85,11 @@ class YoutubeBatch(Crypter): playlist = self.api_response("playlistItems", req) - for item in playlist["items"]: - yield item["contentDetails"]["videoId"] + for item in playlist['items']: + yield item['contentDetails']['videoId'] if "nextPageToken" in playlist: - for item in self._getVideosId(id, playlist["nextPageToken"]): + for item in self._getVideosId(id, playlist['nextPageToken']): yield item def getVideosId(self, p_id): @@ -106,18 +106,18 @@ class YoutubeBatch(Crypter): channel = self.getChannel(user) if channel: - playlists = self.getPlaylists(channel["id"]) - self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel["title"])) + playlists = self.getPlaylists(channel['id']) + self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel['title'])) - relatedplaylist = {p_name: self.getPlaylist(p_id) for p_name, p_id in channel["relatedPlaylists"].iteritems()} + relatedplaylist = {p_name: self.getPlaylist(p_id) for p_name, p_id in channel['relatedPlaylists'].iteritems()} self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys()) - relatedplaylist["uploads"]["title"] = "Unplaylisted videos" - relatedplaylist["uploads"]["checkDups"] = True #: checkDups flag + relatedplaylist['uploads']['title'] = "Unplaylisted videos" + relatedplaylist['uploads']['checkDups'] = True #: checkDups flag for p_name, p_data in relatedplaylist.iteritems(): if self.getConfig(p_name): - p_data["title"] += " of " + user + p_data['title'] += " of " + user playlists.append(p_data) else: playlists = [] @@ -131,9 +131,9 @@ class YoutubeBatch(Crypter): addedvideos = [] urlize = lambda x: "https://www.youtube.com/watch?v=" + x for p in playlists: - p_name = p["title"] - p_videos = self.getVideosId(p["id"]) - p_folder = save_join(self.config['general']['download_folder'], p["channelTitle"], p_name) + p_name = p['title'] + p_videos = self.getVideosId(p['id']) + p_folder = save_join(self.config['general']['download_folder'], p['channelTitle'], p_name) self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name)) if not p_videos: |