diff options
Diffstat (limited to 'module/plugins/crypter')
-rw-r--r-- | module/plugins/crypter/FileserveComFolder.py | 32 | ||||
-rw-r--r-- | module/plugins/crypter/FilesonicComFolder.py | 4 | ||||
-rw-r--r-- | module/plugins/crypter/MediafireComFolder.py | 43 | ||||
-rw-r--r-- | module/plugins/crypter/SerienjunkiesOrg.py | 82 |
4 files changed, 121 insertions, 40 deletions
diff --git a/module/plugins/crypter/FileserveComFolder.py b/module/plugins/crypter/FileserveComFolder.py new file mode 100644 index 000000000..9fe806971 --- /dev/null +++ b/module/plugins/crypter/FileserveComFolder.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +import re + +from module.plugins.Crypter import Crypter + +class FileserveComFolder(Crypter): + __name__ = "FileserveComFolder" + __type__ = "crypter" + __pattern__ = r"http://(?:www\.)?fileserve.com/list/\w+" + __version__ = "0.11" + __description__ = """FileServeCom.com Folder Plugin""" + __author_name__ = ("fionnc") + __author_mail__ = ("fionnc@gmail.com") + + FOLDER_PATTERN = r'<table class="file_list">(.*?)</table>' + LINK_PATTERN = r'<a href="([^"]+)" class="sheet_icon wbold">' + + def decrypt(self, pyfile): + html = self.load(self.pyfile.url) + + new_links = [] + + folder = re.search(self.FOLDER_PATTERN, html, re.DOTALL) + if folder is None: self.fail("Parse error (FOLDER)") + + new_links.extend(re.findall(self.LINK_PATTERN, folder.group(1))) + + if new_links: + self.core.files.addLinks(map(lambda s:"http://fileserve.com%s" % s, new_links), self.pyfile.package().id) + else: + self.fail('Could not extract any links')
\ No newline at end of file diff --git a/module/plugins/crypter/FilesonicComFolder.py b/module/plugins/crypter/FilesonicComFolder.py index 7bf1df381..b967a74a1 100644 --- a/module/plugins/crypter/FilesonicComFolder.py +++ b/module/plugins/crypter/FilesonicComFolder.py @@ -6,8 +6,8 @@ from module.plugins.Crypter import Crypter class FilesonicComFolder(Crypter): __name__ = "FilesonicComFolder" __type__ = "crypter" - __pattern__ = r"http://(\w*\.)?(sharingmatrix|filesonic|wupload)\.[^/]*/folder/\d+/?" - __version__ = "0.10" + __pattern__ = r"http://(\w*\.)?(sharingmatrix|filesonic|wupload)\.[^/]*/folder/\w+/?" + __version__ = "0.11" __description__ = """Filesonic.com/Wupload.com Folder Plugin""" __author_name__ = ("zoidberg") __author_mail__ = ("zoidberg@mujmail.cz") diff --git a/module/plugins/crypter/MediafireComFolder.py b/module/plugins/crypter/MediafireComFolder.py new file mode 100644 index 000000000..49a72ca76 --- /dev/null +++ b/module/plugins/crypter/MediafireComFolder.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- + +import re +from module.plugins.Crypter import Crypter +from module.common.json_layer import json_loads + +class MediafireComFolder(Crypter): + __name__ = "MediafireComFolder" + __type__ = "crypter" + __pattern__ = r"http://(\w*\.)*mediafire\.com/(folder/|\?).*" + __version__ = "0.10" + __description__ = """Mediafire.com Folder Plugin""" + __author_name__ = ("zoidberg") + __author_mail__ = ("zoidberg@mujmail.cz") + + FOLDER_KEY_PATTERN = r"var afI= '(\w+)';" + FILE_URL_PATTERN = '<meta property="og:url" content="http://www.mediafire.com/\?(\w+)"/>' + + def decrypt(self, pyfile): + new_links = [] + + html = self.load(pyfile.url) + found = re.search(self.FILE_URL_PATTERN, html) + if found: + new_links.append("http://www.mediafire.com/download.php?" + found.group(1)) + else: + found = re.search(self.FOLDER_KEY_PATTERN, html) + if not found: self.fail('Parse error: Folder Key') + folder_key = found.group(1) + self.logDebug("FOLDER KEY: %s" % folder_key) + + json_resp = json_loads(self.load("http://www.mediafire.com/api/folder/get_info.php?folder_key=%s&response_format=json&version=1" % folder_key)) + #self.logInfo(json_resp) + if json_resp['response']['result'] == "Success": + for link in json_resp['response']['folder_info']['files']: + new_links.append("http://www.mediafire.com/download.php?%s" % link['quickkey']) + else: + self.fail(json_resp['response']['message']) + + if new_links: + self.core.files.addLinks(new_links, self.pyfile.package().id) + else: + self.fail('Could not extract any links')
\ No newline at end of file diff --git a/module/plugins/crypter/SerienjunkiesOrg.py b/module/plugins/crypter/SerienjunkiesOrg.py index 5b6295fe7..2178f5300 100644 --- a/module/plugins/crypter/SerienjunkiesOrg.py +++ b/module/plugins/crypter/SerienjunkiesOrg.py @@ -12,12 +12,14 @@ class SerienjunkiesOrg(Crypter): __type__ = "container" __pattern__ = r"http://.*?serienjunkies.org/.*?" __version__ = "0.31" - __config__ = [ ("preferredHoster", "str", "preferred hoster" , "RapidshareCom,UploadedTo,NetloadIn,FilefactoryCom,FreakshareNet,FilebaseTo,MegauploadCom,HotfileCom,DepositfilesCom,EasyshareCom,KickloadCom"), - ("changeName", "bool", "Take SJ.org episode name", "True") ] + __config__ = [("preferredHoster", "str", "preferred hoster", + "RapidshareCom,UploadedTo,NetloadIn,FilefactoryCom,FreakshareNet,FilebaseTo,MegauploadCom,HotfileCom,DepositfilesCom,EasyshareCom,KickloadCom") + , + ("changeName", "bool", "Take SJ.org episode name", "True")] __description__ = """serienjunkies.org Container Plugin""" __author_name__ = ("mkaay") __author_mail__ = ("mkaay@mkaay.de") - + def setup(self): self.hosterMap = { "rc": "RapidshareCom", @@ -33,26 +35,26 @@ class SerienjunkiesOrg(Crypter): "es": "EasyshareCom", "kl": "KickloadCom", "fc": "FilesonicCom", - } - self.hosterMapReverse = dict((v,k) for k, v in self.hosterMap.iteritems()) - + } + self.hosterMapReverse = dict((v, k) for k, v in self.hosterMap.iteritems()) + self.multiDL = False self.limitDL = 4 - + def getSJSrc(self, url): src = self.req.load(str(url)) if not src.find("Enter Serienjunkies") == -1: sleep(1) src = self.req.load(str(url)) return src - + def handleShow(self, url): src = self.getSJSrc(url) soup = BeautifulSoup(src) nav = soup.find("div", attrs={"id": "scb"}) for a in nav.findAll("a"): self.packages.append((unescape(a.text), [a["href"]], unescape(a.text))) - + def handleSeason(self, url): src = self.getSJSrc(url) soup = BeautifulSoup(src) @@ -63,7 +65,7 @@ class SerienjunkiesOrg(Crypter): self.log.debug("Preferred hoster: %s" % ", ".join(preferredHoster)) groups = {} gid = -1 - seasonName = unescape(soup.find("a", attrs={"rel":"bookmark"}).string) + seasonName = unescape(soup.find("a", attrs={"rel": "bookmark"}).string) for p in ps: if re.search("<strong>Dauer|<strong>Sprache|<strong>Format", str(p)): var = p.findAll("strong") @@ -115,37 +117,40 @@ class SerienjunkiesOrg(Crypter): if hmatch: break self.packages.append((package, links, package)) - + def handleEpisode(self, url): src = self.getSJSrc(url) - if not src.find("Du hast das Download-Limit überschritten! Bitte versuche es später nocheinmal.") == -1: + if not src.find( + "Du hast das Download-Limit überschritten! Bitte versuche es später nocheinmal.") == -1: self.fail(_("Downloadlimit reached")) else: soup = BeautifulSoup(src) form = soup.find("form") - packageName = soup.find("h1", attrs={"class":"wrap"}).text - captchaTag = soup.find(attrs={"src":re.compile("^/secure/")}) - if not captchaTag: - sleep(1) - self.retry() - - captchaUrl = "http://download.serienjunkies.org"+captchaTag["src"] - result = self.decryptCaptcha(str(captchaUrl), imgtype="png") - sinp = form.find(attrs={"name":"s"}) - - self.req.lastURL = str(url) - sj = self.load(str(url), post={'s': sinp["value"], 'c': result, 'action': "Download"}) - - soup = BeautifulSoup(sj) + h1 = soup.find("h1") + packageName = h1.text + if h1.get("class") == "wrap": + captchaTag = soup.find(attrs={"src": re.compile("^/secure/")}) + if not captchaTag: + sleep(1) + self.retry() + + captchaUrl = "http://download.serienjunkies.org" + captchaTag["src"] + result = self.decryptCaptcha(str(captchaUrl), imgtype="png") + sinp = form.find(attrs={"name": "s"}) + + self.req.lastURL = str(url) + sj = self.load(str(url), post={'s': sinp["value"], 'c': result, 'action': "Download"}) + + soup = BeautifulSoup(sj) rawLinks = soup.findAll(attrs={"action": re.compile("^http://download.serienjunkies.org/")}) - + if not len(rawLinks) > 0: sleep(1) self.retry() return - + self.correctCaptcha() - + links = [] for link in rawLinks: frameUrl = link["action"].replace("/go-", "/frame/go-") @@ -156,27 +161,28 @@ class SerienjunkiesOrg(Crypter): packageName = self.pyfile.package().name self.packages.append((packageName, links, packageName)) - + def handleOldStyleLink(self, url): sj = self.req.load(str(url)) soup = BeautifulSoup(sj) - form = soup.find("form", attrs={"action":re.compile("^http://serienjunkies.org")}) - captchaTag = form.find(attrs={"src":re.compile("^/safe/secure/")}) - captchaUrl = "http://serienjunkies.org"+captchaTag["src"] + form = soup.find("form", attrs={"action": re.compile("^http://serienjunkies.org")}) + captchaTag = form.find(attrs={"src": re.compile("^/safe/secure/")}) + captchaUrl = "http://serienjunkies.org" + captchaTag["src"] result = self.decryptCaptcha(str(captchaUrl)) url = form["action"] - sinp = form.find(attrs={"name":"s"}) - - self.req.load(str(url), post={'s': sinp["value"], 'c': result, 'dl.start': "Download"}, cookies=False, just_header=True) + sinp = form.find(attrs={"name": "s"}) + + self.req.load(str(url), post={'s': sinp["value"], 'c': result, 'dl.start': "Download"}, cookies=False, + just_header=True) decrypted = self.req.lastEffectiveURL if decrypted == str(url): self.retry() self.packages.append((self.pyfile.package().name, [decrypted], self.pyfile.package().folder)) - + def handleFrame(self, url): self.req.load(str(url)) return self.req.lastEffectiveURL - + def decrypt(self, pyfile): showPattern = re.compile("^http://serienjunkies.org/serie/(.*)/$") seasonPattern = re.compile("^http://serienjunkies.org/.*?/(.*)/$") |