summaryrefslogtreecommitdiffstats
path: root/pyload/api/DownloadPreparingApi.py
diff options
context:
space:
mode:
authorGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2013-08-18 17:01:17 +0200
committerGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2013-08-18 17:01:17 +0200
commit9a6ea22616cf3cc67e292c908521b79764400faf (patch)
tree1924843f28d992490d867d0557da90dfb1da6404 /pyload/api/DownloadPreparingApi.py
parentfixed login (diff)
downloadpyload-9a6ea22616cf3cc67e292c908521b79764400faf.tar.xz
new linkgrabber
Diffstat (limited to 'pyload/api/DownloadPreparingApi.py')
-rw-r--r--pyload/api/DownloadPreparingApi.py72
1 files changed, 31 insertions, 41 deletions
diff --git a/pyload/api/DownloadPreparingApi.py b/pyload/api/DownloadPreparingApi.py
index 0a47fe5ab..d74f0aee2 100644
--- a/pyload/api/DownloadPreparingApi.py
+++ b/pyload/api/DownloadPreparingApi.py
@@ -3,7 +3,9 @@
from itertools import chain
-from pyload.Api import Api, RequirePerm, Permission, OnlineCheck, LinkStatus, urlmatcher
+from pyload.Api import Api, DownloadStatus as DS,\
+ RequirePerm, Permission, OnlineCheck, LinkStatus, urlmatcher
+from pyload.utils import uniqify
from pyload.utils.fs import join
from pyload.utils.packagetools import parseNames
from pyload.network.RequestFactory import getURL
@@ -14,33 +16,13 @@ class DownloadPreparingApi(ApiComponent):
""" All kind of methods to parse links or retrieve online status """
@RequirePerm(Permission.Add)
- def parseURLs(self, html=None, url=None):
- """Parses html content or any arbitrary text for links and returns result of `checkURLs`
-
- :param html: html source
- :return:
- """
- urls = []
-
- if html:
- urls += [x[0] for x in urlmatcher.findall(html)]
-
- if url:
- page = getURL(url)
- urls += [x[0] for x in urlmatcher.findall(page)]
-
- # remove duplicates
- return self.checkURLs(set(urls))
-
-
- @RequirePerm(Permission.Add)
- def checkURLs(self, urls):
+ def parseLinks(self, links):
""" Gets urls and returns pluginname mapped to list of matching urls.
- :param urls:
+ :param links:
:return: {plugin: urls}
"""
- data, crypter = self.core.pluginManager.parseUrls(urls)
+ data, crypter = self.core.pluginManager.parseUrls(links)
plugins = {}
for url, plugin in chain(data, crypter):
@@ -52,31 +34,23 @@ class DownloadPreparingApi(ApiComponent):
return plugins
@RequirePerm(Permission.Add)
- def checkOnlineStatus(self, urls):
+ def checkLinks(self, links):
""" initiates online status check, will also decrypt files.
:param urls:
:return: initial set of data as :class:`OnlineCheck` instance containing the result id
"""
- data, crypter = self.core.pluginManager.parseUrls(urls)
+ hoster, crypter = self.core.pluginManager.parseUrls(links)
# initial result does not contain the crypter links
- tmp = [(url, (url, LinkStatus(url, pluginname, "unknown", 3, 0))) for url, pluginname in data]
+ tmp = [(url, LinkStatus(url, url, pluginname, -1, DS.Queued)) for url, pluginname in hoster + crypter]
data = parseNames(tmp)
- result = {}
-
- for k, v in data.iteritems():
- for url, status in v:
- status.packagename = k
- result[url] = status
-
- data.update(crypter) # hoster and crypter will be processed
- rid = self.core.threadManager.createResultThread(data, False)
+ rid = self.core.threadManager.createResultThread(data)
- return OnlineCheck(rid, result)
+ return OnlineCheck(rid, data)
@RequirePerm(Permission.Add)
- def checkOnlineStatusContainer(self, urls, container, data):
+ def checkContainer(self, filename, data):
""" checks online status of urls and a submitted container file
:param urls: list of urls
@@ -84,11 +58,27 @@ class DownloadPreparingApi(ApiComponent):
:param data: file content
:return: :class:`OnlineCheck`
"""
- th = open(join(self.core.config["general"]["download_folder"], "tmp_" + container), "wb")
+ th = open(join(self.core.config["general"]["download_folder"], "tmp_" + filename), "wb")
th.write(str(data))
th.close()
- urls.append(th.name)
- return self.checkOnlineStatus(urls)
+ return self.checkLinks([th.name])
+
+ @RequirePerm(Permission.Add)
+ def checkHTML(self, html, url):
+ """Parses html content or any arbitrary text for links and returns result of `checkURLs`
+
+ :param html: html source
+ :return:
+ """
+ urls = []
+ if html:
+ urls += [x[0] for x in urlmatcher.findall(html)]
+ if url:
+ page = getURL(url)
+ urls += [x[0] for x in urlmatcher.findall(page)]
+
+ # remove duplicates
+ return self.checkLinks(uniqify(urls))
@RequirePerm(Permission.Add)
def pollResults(self, rid):