summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2011-12-31 16:01:24 +0100
committerGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2011-12-31 16:01:24 +0100
commitd35c003cc53d4723d1dfe0d81eeb9bea78cee594 (patch)
treeff9d47a0cee6116836955e37bf4471c1f1d82bee
parentsome account fixes (diff)
downloadpyload-d35c003cc53d4723d1dfe0d81eeb9bea78cee594.tar.xz
new crypter plugin API, now decrypting possible for now.
-rw-r--r--module/Api.py52
-rw-r--r--module/HookManager.py12
-rw-r--r--module/PluginThread.py673
-rw-r--r--module/PyFile.py3
-rw-r--r--module/PyPackage.py3
-rw-r--r--module/config/ConfigParser.py8
-rw-r--r--module/database/FileDatabase.py151
-rw-r--r--module/interaction/EventManager.py3
-rw-r--r--module/interaction/PullEvents.py68
-rw-r--r--module/plugins/Account.py7
-rw-r--r--module/plugins/AccountManager.py10
-rw-r--r--module/plugins/Base.py101
-rw-r--r--module/plugins/Container.py75
-rw-r--r--module/plugins/Crypter.py250
-rw-r--r--module/plugins/Hook.py2
-rw-r--r--module/plugins/Hoster.py444
-rw-r--r--module/plugins/PluginManager.py52
-rw-r--r--module/plugins/container/CCF.py4
-rw-r--r--module/plugins/container/LinkList.py4
-rw-r--r--module/plugins/container/RSDF.py4
-rw-r--r--module/plugins/hooks/UpdateManager.py5
-rw-r--r--module/remote/socketbackend/ttypes.py13
-rw-r--r--module/remote/thriftbackend/pyload.thrift11
-rwxr-xr-xmodule/remote/thriftbackend/thriftgen/pyload/Pyload-remote8
-rw-r--r--module/remote/thriftbackend/thriftgen/pyload/Pyload.py32
-rw-r--r--module/threads/BaseThread.py117
-rw-r--r--module/threads/DecrypterThread.py35
-rw-r--r--module/threads/DownloadThread.py215
-rw-r--r--module/threads/HookThread.py56
-rw-r--r--module/threads/InfoThread.py215
-rw-r--r--module/threads/ThreadManager.py (renamed from module/ThreadManager.py)68
-rw-r--r--module/threads/__init__.py0
-rw-r--r--module/unescape.py3
-rw-r--r--module/utils/__init__.py (renamed from module/Utils.py)59
-rw-r--r--module/utils/fs.py67
-rw-r--r--module/web/json_app.py6
-rwxr-xr-xpyLoadCore.py16
37 files changed, 1645 insertions, 1207 deletions
diff --git a/module/Api.py b/module/Api.py
index 99fb4c1e7..deac1a19f 100644
--- a/module/Api.py
+++ b/module/Api.py
@@ -285,12 +285,13 @@ class Api(Iface):
return data
@permission(PERMS.ADD)
- def addPackage(self, name, links, dest=Destination.Queue):
+ def addPackage(self, name, links, dest=Destination.Queue, password=""):
"""Adds a package, with links to desired destination.
:param name: name of the new package
:param links: list of urls
:param dest: `Destination`
+ :param password: password as string, can be empty
:return: package id of the new package
"""
if self.core.config['general']['folder_per_package']:
@@ -300,15 +301,28 @@ class Api(Iface):
folder = folder.replace("http://", "").replace(":", "").replace("/", "_").replace("\\", "_")
- pid = self.core.files.addPackage(name, folder, dest)
+ self.core.log.info(_("Added package %(name)s containing %(count)d links") % {"name": name, "count": len(links)})
+ pid = self.core.files.addPackage(name, folder, dest, password)
+ self.addFiles(pid, links)
- self.core.files.addLinks(links, pid)
+ return pid
- self.core.log.info(_("Added package %(name)s containing %(count)d links") % {"name": name, "count": len(links)})
+ @permission(PERMS.ADD)
+ def addFiles(self, pid, links):
+ """Adds files to specific package.
- self.core.files.save()
+ :param pid: package id
+ :param links: list of urls
+ """
+ hoster, crypter = self.core.pluginManager.parseUrls(links)
- return pid
+ self.core.files.addLinks(hoster, pid)
+
+ self.core.threadManager.createInfoThread(hoster, pid)
+ self.core.threadManager.createDecryptThread(crypter, pid)
+
+ self.core.log.info(_("Added %(count)d links to package #%(package)d ") % {"count": len(links), "package": pid})
+ self.core.files.save()
@permission(PERMS.ADD)
def parseURLs(self, html=None, url=None):
@@ -337,7 +351,7 @@ class Api(Iface):
:param urls:
:return: {plugin: urls}
"""
- data = self.core.pluginManager.parseUrls(urls)
+ data, crypter = self.core.pluginManager.parseUrls(urls)
plugins = {}
for url, plugin in data:
@@ -355,7 +369,7 @@ class Api(Iface):
:param urls:
:return: initial set of data as `OnlineCheck` instance containing the result id
"""
- data = self.core.pluginManager.parseUrls(urls)
+ data, crypter = self.core.pluginManager.parseUrls(urls)
rid = self.core.threadManager.createResultThread(data, False)
@@ -431,7 +445,7 @@ class Api(Iface):
:param dest: `Destination`
:return: None
"""
- data = self.core.pluginManager.parseUrls(links)
+ data, crypter = self.core.pluginManager.parseUrls(links)
self.core.threadManager.createResultThread(data, True)
@@ -557,19 +571,6 @@ class Api(Iface):
links=[self._convertPyFile(x) for x in pack["links"].itervalues()])
for pack in self.core.files.getCompleteData(Destination.Collector).itervalues()]
-
- @permission(PERMS.ADD)
- def addFiles(self, pid, links):
- """Adds files to specific package.
-
- :param pid: package id
- :param links: list of urls
- """
- self.core.files.addLinks(links, int(pid))
-
- self.core.log.info(_("Added %(count)d links to package #%(package)d ") % {"count": len(links), "package": pid})
- self.core.files.save()
-
@permission(PERMS.MODIFY)
def pushToQueue(self, pid):
"""Moves package from Collector to Queue.
@@ -925,8 +926,8 @@ class Api(Iface):
user = self.checkAuth(username, password)
if user:
return UserData(user["name"], user["email"], user["role"], user["permission"], user["template"])
- else:
- return UserData()
+
+ raise UserDoesNotExists(username)
def getAllUserData(self):
@@ -972,13 +973,12 @@ class Api(Iface):
plugin = info.plugin
func = info.func
args = info.arguments
- parse = info.parseArguments
if not self.hasService(plugin, func):
raise ServiceDoesNotExists(plugin, func)
try:
- ret = self.core.hookManager.callRPC(plugin, func, args, parse)
+ ret = self.core.hookManager.callRPC(plugin, func, args)
return str(ret)
except Exception, e:
raise ServiceException(e.message)
diff --git a/module/HookManager.py b/module/HookManager.py
index e32508c48..386be0f5c 100644
--- a/module/HookManager.py
+++ b/module/HookManager.py
@@ -25,7 +25,7 @@ from threading import RLock
from types import MethodType
-from module.PluginThread import HookThread
+from module.threads.HookThread import HookThread
from module.plugins.PluginManager import literal_eval
from utils import lock
@@ -39,7 +39,7 @@ class HookManager:
Only do very short tasks or use threads.
**Known Events:**
- Most hook methods exists as events. These are the additional known events.
+ Most hook methods exists as events. These are some additional known events.
===================== ============== ==================================
Name Arguments Description
@@ -103,10 +103,10 @@ class HookManager:
else:
self.methods[plugin] = {func: doc}
- def callRPC(self, plugin, func, args, parse):
- if not args: args = tuple()
- if parse:
- args = tuple([literal_eval(x) for x in args])
+ def callRPC(self, plugin, func, args):
+ if not args: args = []
+ else:
+ args = literal_eval(args)
plugin = self.pluginMap[plugin]
f = getattr(plugin, func)
diff --git a/module/PluginThread.py b/module/PluginThread.py
deleted file mode 100644
index 71089482f..000000000
--- a/module/PluginThread.py
+++ /dev/null
@@ -1,673 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-"""
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 3 of the License,
- or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- See the GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, see <http://www.gnu.org/licenses/>.
-
- @author: RaNaN
-"""
-
-from Queue import Queue
-from threading import Thread
-from os import listdir, stat
-from os.path import join
-from time import sleep, time, strftime, gmtime
-from traceback import print_exc, format_exc
-from pprint import pformat
-from sys import exc_info, exc_clear
-from copy import copy
-from types import MethodType
-
-from pycurl import error
-
-from PyFile import PyFile
-from plugins.Plugin import Abort, Fail, Reconnect, Retry, SkipDownload
-from common.packagetools import parseNames
-from utils import save_join
-from Api import OnlineStatus
-
-class PluginThread(Thread):
- """abstract base class for thread types"""
-
- def __init__(self, manager):
- """Constructor"""
- Thread.__init__(self)
- self.setDaemon(True)
- self.m = manager #thread manager
-
-
- def writeDebugReport(self, pyfile):
- """ writes a
- :return:
- """
-
- dump_name = "debug_%s_%s.zip" % (pyfile.pluginname, strftime("%d-%m-%Y_%H-%M-%S"))
- dump = self.getDebugDump(pyfile)
-
- try:
- import zipfile
-
- zip = zipfile.ZipFile(dump_name, "w")
-
- for f in listdir(join("tmp", pyfile.pluginname)):
- try:
- # avoid encoding errors
- zip.write(join("tmp", pyfile.pluginname, f), save_join(pyfile.pluginname, f))
- except:
- pass
-
- info = zipfile.ZipInfo(save_join(pyfile.pluginname, "debug_Report.txt"), gmtime())
- info.external_attr = 0644 << 16L # change permissions
-
- zip.writestr(info, dump)
- zip.close()
-
- if not stat(dump_name).st_size:
- raise Exception("Empty Zipfile")
-
- except Exception, e:
- self.m.log.debug("Error creating zip file: %s" % e)
-
- dump_name = dump_name.replace(".zip", ".txt")
- f = open(dump_name, "wb")
- f.write(dump)
- f.close()
-
- self.m.core.log.info("Debug Report written to %s" % dump_name)
-
- def getDebugDump(self, pyfile):
- dump = "pyLoad %s Debug Report of %s %s \n\nTRACEBACK:\n %s \n\nFRAMESTACK:\n" % (
- self.m.core.api.getServerVersion(), pyfile.pluginname, pyfile.plugin.__version__, format_exc())
-
- tb = exc_info()[2]
- stack = []
- while tb:
- stack.append(tb.tb_frame)
- tb = tb.tb_next
-
- for frame in stack[1:]:
- dump += "\nFrame %s in %s at line %s\n" % (frame.f_code.co_name,
- frame.f_code.co_filename,
- frame.f_lineno)
-
- for key, value in frame.f_locals.items():
- dump += "\t%20s = " % key
- try:
- dump += pformat(value) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- del frame
-
- del stack #delete it just to be sure...
-
- dump += "\n\nPLUGIN OBJECT DUMP: \n\n"
-
- for name in dir(pyfile.plugin):
- attr = getattr(pyfile.plugin, name)
- if not name.endswith("__") and type(attr) != MethodType:
- dump += "\t%20s = " % name
- try:
- dump += pformat(attr) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- dump += "\nPYFILE OBJECT DUMP: \n\n"
-
- for name in dir(pyfile):
- attr = getattr(pyfile, name)
- if not name.endswith("__") and type(attr) != MethodType:
- dump += "\t%20s = " % name
- try:
- dump += pformat(attr) + "\n"
- except Exception, e:
- dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
-
- dump += "\n\nCONFIG: \n\n"
- dump += pformat(self.m.core.config.values) + "\n"
-
- return dump
-
- def clean(self, pyfile):
- """ set thread unactive and release pyfile """
- self.active = False
- pyfile.release()
-
-
-class DownloadThread(PluginThread):
- """thread for downloading files from 'real' hoster plugins"""
-
- def __init__(self, manager):
- """Constructor"""
- PluginThread.__init__(self, manager)
-
- self.queue = Queue() # job queue
- self.active = False
-
- self.start()
-
- def run(self):
- """run method"""
- pyfile = None
-
- while True:
- del pyfile
- self.active = self.queue.get()
- pyfile = self.active
-
- if self.active == "quit":
- self.active = False
- self.m.threads.remove(self)
- return True
-
- try:
- if not pyfile.hasPlugin(): continue
- #this pyfile was deleted while queueing
-
- pyfile.plugin.checkForSameFiles(starting=True)
- self.m.log.info(_("Download starts: %s" % pyfile.name))
-
- # start download
- self.m.core.hookManager.downloadPreparing(pyfile)
- pyfile.plugin.preprocessing(self)
-
- self.m.log.info(_("Download finished: %s") % pyfile.name)
- self.m.core.hookManager.downloadFinished(pyfile)
- self.m.core.files.checkPackageFinished(pyfile)
-
- except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
- pyfile.setStatus("failed")
- pyfile.error = "Plugin does not work"
- self.clean(pyfile)
- continue
-
- except Abort:
- try:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- except:
- pass
-
- pyfile.setStatus("aborted")
-
- self.clean(pyfile)
- continue
-
- except Reconnect:
- self.queue.put(pyfile)
- #pyfile.req.clearCookies()
-
- while self.m.reconnecting.isSet():
- sleep(0.5)
-
- continue
-
- except Retry, e:
- reason = e.args[0]
- self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
- self.queue.put(pyfile)
- continue
-
- except Fail, e:
- msg = e.args[0]
-
- if msg == "offline":
- pyfile.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % pyfile.name)
- elif msg == "temp. offline":
- pyfile.setStatus("temp. offline")
- self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name)
- else:
- pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
- pyfile.error = msg
-
- self.m.core.hookManager.downloadFailed(pyfile)
- self.clean(pyfile)
- continue
-
- except error, e:
- if len(e.args) == 2:
- code, msg = e.args
- else:
- code = 0
- msg = e.args
-
- self.m.log.debug("pycurl exception %s: %s" % (code, msg))
-
- if code in (7, 18, 28, 52, 56):
- self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
- wait = time() + 60
-
- pyfile.waitUntil = wait
- pyfile.setStatus("waiting")
- while time() < wait:
- sleep(1)
- if pyfile.abort:
- break
-
- if pyfile.abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- pyfile.setStatus("aborted")
-
- self.clean(pyfile)
- else:
- self.queue.put(pyfile)
-
- continue
-
- else:
- pyfile.setStatus("failed")
- self.m.log.error("pycurl error %s: %s" % (code, msg))
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- self.m.core.hookManager.downloadFailed(pyfile)
-
- self.clean(pyfile)
- continue
-
- except SkipDownload, e:
- pyfile.setStatus("skipped")
-
- self.m.log.info(
- _("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
-
- self.clean(pyfile)
-
- self.m.core.files.checkPackageFinished(pyfile)
-
- self.active = False
- self.m.core.files.save()
-
- continue
-
-
- except Exception, e:
- pyfile.setStatus("failed")
- self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
- pyfile.error = str(e)
-
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- self.m.core.hookManager.downloadFailed(pyfile)
- self.clean(pyfile)
- continue
-
- finally:
- self.m.core.files.save()
- pyfile.checkIfProcessed()
- exc_clear()
-
-
- #pyfile.plugin.req.clean()
-
- self.active = False
- pyfile.finishIfDone()
- self.m.core.files.save()
-
-
- def put(self, job):
- """assing job to thread"""
- self.queue.put(job)
-
-
- def stop(self):
- """stops the thread"""
- self.put("quit")
-
-
-class DecrypterThread(PluginThread):
- """thread for decrypting"""
-
- def __init__(self, manager, pyfile):
- """constructor"""
- PluginThread.__init__(self, manager)
-
- self.active = pyfile
- manager.localThreads.append(self)
-
- pyfile.setStatus("decrypting")
-
- self.start()
-
- def getActiveFiles(self):
- return [self.active]
-
- def run(self):
- """run method"""
-
- pyfile = self.active
- retry = False
-
- try:
- self.m.log.info(_("Decrypting starts: %s") % self.active.name)
- self.active.plugin.preprocessing(self)
-
- except NotImplementedError:
- self.m.log.error(_("Plugin %s is missing a function.") % self.active.pluginname)
- return
-
- except Fail, e:
- msg = e.args[0]
-
- if msg == "offline":
- self.active.setStatus("offline")
- self.m.log.warning(_("Download is offline: %s") % self.active.name)
- else:
- self.active.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": self.active.name, "msg": msg})
- self.active.error = msg
-
- return
-
- except Abort:
- self.m.log.info(_("Download aborted: %s") % pyfile.name)
- pyfile.setStatus("aborted")
-
- return
-
- except Retry:
- self.m.log.info(_("Retrying %s") % self.active.name)
- retry = True
- return self.run()
-
- except Exception, e:
- self.active.setStatus("failed")
- self.m.log.error(_("Decrypting failed: %(name)s | %(msg)s") % {"name": self.active.name, "msg": str(e)})
- self.active.error = str(e)
-
- if self.m.core.debug:
- print_exc()
- self.writeDebugReport(pyfile)
-
- return
-
-
- finally:
- if not retry:
- self.active.release()
- self.active = False
- self.m.core.files.save()
- self.m.localThreads.remove(self)
- exc_clear()
-
-
- #self.m.core.hookManager.downloadFinished(pyfile)
-
-
- #self.m.localThreads.remove(self)
- #self.active.finishIfDone()
- if not retry:
- pyfile.delete()
-
-
-class HookThread(PluginThread):
- """thread for hooks"""
-
- #----------------------------------------------------------------------
- def __init__(self, m, function, args, kwargs):
- """Constructor"""
- PluginThread.__init__(self, m)
-
- self.f = function
- self.args = args
- self.kwargs = kwargs
-
- self.active = []
-
- m.localThreads.append(self)
-
- self.start()
-
- def getActiveFiles(self):
- return self.active
-
- def addActive(self, pyfile):
- """ Adds a pyfile to active list and thus will be displayed on overview"""
- if pyfile not in self.active:
- self.active.append(pyfile)
-
- def finishFile(self, pyfile):
- if pyfile in self.active:
- self.active.remove(pyfile)
-
- pyfile.finishIfDone()
-
- def run(self):
- try:
- try:
- self.kwargs["thread"] = self
- self.f(*self.args, **self.kwargs)
- except TypeError, e:
- #dirty method to filter out exceptions
- if "unexpected keyword argument 'thread'" not in e.args[0]:
- raise
-
- del self.kwargs["thread"]
- self.f(*self.args, **self.kwargs)
- finally:
- local = copy(self.active)
- for x in local:
- self.finishFile(x)
-
- self.m.localThreads.remove(self)
-
-
-class InfoThread(PluginThread):
- def __init__(self, manager, data, pid=-1, rid=-1, add=False):
- """Constructor"""
- PluginThread.__init__(self, manager)
-
- self.data = data
- self.pid = pid # package id
- # [ .. (name, plugin) .. ]
-
- self.rid = rid #result id
- self.add = add #add packages instead of return result
-
- self.cache = [] #accumulated data
-
- self.start()
-
- def run(self):
- """run method"""
-
- plugins = {}
- container = []
-
- for url, plugin in self.data:
- if plugin in plugins:
- plugins[plugin].append(url)
- else:
- plugins[plugin] = [url]
-
-
- # filter out container plugins
- for name in self.m.core.pluginManager.getPlugins("container"):
- if name in plugins:
- container.extend([(name, url) for url in plugins[name]])
-
- del plugins[name]
-
- #directly write to database
- if self.pid > -1:
- for pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateDB)
- self.m.core.files.save()
-
- elif self.add:
- for pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True)
-
- else:
- #generate default result
- result = [(url, 0, 3, url) for url in urls]
-
- self.updateCache(pluginname, result)
-
- packs = parseNames([(name, url) for name, x, y, url in self.cache])
-
- self.m.log.debug("Fetched and generated %d packages" % len(packs))
-
- for k, v in packs:
- self.m.core.api.addPackage(k, v)
-
- #empty cache
- del self.cache[:]
-
- else: #post the results
-
-
- for name, url in container:
- #attach container content
- try:
- data = self.decryptContainer(name, url)
- except:
- print_exc()
- self.m.log.error("Could not decrypt container.")
- data = []
-
- for url, plugin in data:
- if plugin in plugins:
- plugins[plugin].append(url)
- else:
- plugins[plugin] = [url]
-
- self.m.infoResults[self.rid] = {}
-
- for pluginname, urls in plugins.iteritems():
- plugin = self.m.core.pluginManager.getPlugin(pluginname, True)
- if hasattr(plugin, "getInfo"):
- self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True)
-
- #force to process cache
- if self.cache:
- self.updateResult(pluginname, [], True)
-
- else:
- #generate default result
- result = [(url, 0, 3, url) for url in urls]
-
- self.updateResult(pluginname, result, True)
-
- self.m.infoResults[self.rid]["ALL_INFO_FETCHED"] = {}
-
- self.m.timestamp = time() + 5 * 60
-
-
- def updateDB(self, plugin, result):
- self.m.core.files.updateFileInfo(result, self.pid)
-
- def updateResult(self, plugin, result, force=False):
- #parse package name and generate result
- #accumulate results
-
- self.cache.extend(result)
-
- if len(self.cache) >= 20 or force:
- #used for package generating
- tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size))))
- for name, size, status, url in self.cache]
-
- data = parseNames(tmp)
- result = {}
- for k, v in data.iteritems():
- for url, status in v:
- status.packagename = k
- result[url] = status
-
- self.m.setInfoResults(self.rid, result)
-
- self.cache = []
-
- def updateCache(self, plugin, result):
- self.cache.extend(result)
-
- def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None):
- try:
- result = [] #result loaded from cache
- process = [] #urls to process
- for url in urls:
- if url in self.m.infoCache:
- result.append(self.m.infoCache[url])
- else:
- process.append(url)
-
- if result:
- self.m.log.debug("Fetched %d values from cache for %s" % (len(result), pluginname))
- cb(pluginname, result)
-
- if process:
- self.m.log.debug("Run Info Fetching for %s" % pluginname)
- for result in plugin.getInfo(process):
- #result = [ .. (name, size, status, url) .. ]
- if not type(result) == list: result = [result]
-
- for res in result:
- self.m.infoCache[res[3]] = res
-
- cb(pluginname, result)
-
- self.m.log.debug("Finished Info Fetching for %s" % pluginname)
- except Exception, e:
- self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") %
- {"name": pluginname, "err": str(e)})
- if self.m.core.debug:
- print_exc()
-
- # generate default results
- if err:
- result = [(url, 0, 3, url) for url in urls]
- cb(pluginname, result)
-
-
- def decryptContainer(self, plugin, url):
- data = []
- # only works on container plugins
-
- self.m.log.debug("Pre decrypting %s with %s" % (url, plugin))
-
- # dummy pyfile
- pyfile = PyFile(self.m.core.files, -1, url, url, 0, 0, "", plugin, -1, -1)
-
- pyfile.initPlugin()
-
- # little plugin lifecycle
- try:
- pyfile.plugin.setup()
- pyfile.plugin.loadToDisk()
- pyfile.plugin.decrypt(pyfile)
- pyfile.plugin.deleteTmp()
-
- for pack in pyfile.plugin.packages:
- pyfile.plugin.urls.extend(pack[1])
-
- data = self.m.core.pluginManager.parseUrls(pyfile.plugin.urls)
-
- self.m.log.debug("Got %d links." % len(data))
-
- except Exception, e:
- self.m.log.debug("Pre decrypting error: %s" % str(e))
- finally:
- pyfile.release()
-
- return data
diff --git a/module/PyFile.py b/module/PyFile.py
index e2d906705..dae61e361 100644
--- a/module/PyFile.py
+++ b/module/PyFile.py
@@ -276,8 +276,7 @@ class PyFile(object):
return self.size
def notifyChange(self):
- e = UpdateEvent("file", self.id, "collector" if not self.package().queue else "queue")
- self.m.core.pullManager.addEvent(e)
+ self.m.core.eventManager.dispatchEvent("linkUpdated", self.id, self.packageid)
def setProgress(self, value):
if not value == self.progress:
diff --git a/module/PyPackage.py b/module/PyPackage.py
index b194e3dc8..dce501d93 100644
--- a/module/PyPackage.py
+++ b/module/PyPackage.py
@@ -71,5 +71,4 @@ class PyPackage():
self.m.deletePackage(self.id)
def notifyChange(self):
- e = UpdateEvent("pack", self.id, "collector" if not self.queue else "queue")
- self.m.core.pullManager.addEvent(e)
+ self.m.core.eventManager.dispatchEvent("packageUpdated", self.id)
diff --git a/module/config/ConfigParser.py b/module/config/ConfigParser.py
index 82c6a9f91..d7ecab5a0 100644
--- a/module/config/ConfigParser.py
+++ b/module/config/ConfigParser.py
@@ -5,7 +5,7 @@ from time import sleep
from os.path import exists
from gettext import gettext
-from module.utils import chmod
+from module.utils.fs import chmod
CONF_VERSION = 2
@@ -64,6 +64,10 @@ class ConfigParser:
f.write("version: " + str(CONF_VERSION))
f.close()
print "Old version of %s deleted" % conf
+ else:
+ f = open(conf, "wb")
+ f.write("version:" + str(CONF_VERSION))
+ f.close()
except Exception, ex:
e = ex
@@ -115,7 +119,7 @@ class ConfigParser:
for c in (self.CONFIG, self.PLUGIN):
f = open(c, "wb")
configs.append(f)
- chmod(c)
+ chmod(c, 0600)
f.write("version: %i\n\n" % CONF_VERSION)
diff --git a/module/database/FileDatabase.py b/module/database/FileDatabase.py
index b5c386802..abe7c8fc9 100644
--- a/module/database/FileDatabase.py
+++ b/module/database/FileDatabase.py
@@ -22,7 +22,6 @@ from threading import RLock
from time import time
from module.utils import formatSize, lock
-from module.interaction.PullEvents import InsertEvent, ReloadAllEvent, RemoveEvent, UpdateEvent
from module.PyPackage import PyPackage
from module.PyFile import PyFile
from module.database import DatabaseBackend, queue, async, inner
@@ -40,11 +39,12 @@ class FileHandler:
def __init__(self, core):
"""Constructor"""
self.core = core
+ self.ev = None #event manager, set later
# translations
self.statusMsg = [_("finished"), _("offline"), _("online"), _("queued"), _("skipped"), _("waiting"), _("temp. offline"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), _("downloading"), _("processing"), _("unknown")]
- self.cache = {} #holds instances for files
+ self.cache = {} # holds instances for files
self.packageCache = {} # same for packages
#@TODO: purge the cache
@@ -54,14 +54,12 @@ class FileHandler:
#self.lock._Verbose__verbose = True
self.filecount = -1 # if an invalid value is set get current value from db
- self.queuecount = -1 #number of package to be loaded
- self.unchanged = False #determines if any changes was made since last call
+ self.queuecount = -1 # number of package to be loaded
self.db = self.core.db
def change(func):
def new(*args):
- args[0].unchanged = False
args[0].filecount = -1
args[0].queuecount = -1
args[0].jobCache = {}
@@ -118,31 +116,23 @@ class FileHandler:
@lock
@change
- def addLinks(self, urls, package):
- """adds links"""
-
- self.core.hookManager.dispatchEvent("linksAdded", urls, package)
-
- data = self.core.pluginManager.parseUrls(urls)
-
+ def addLinks(self, data, package):
+ """Add links, data = (plugin, url) tuple. Internal method you should use API."""
self.db.addLinks(data, package)
- self.core.threadManager.createInfoThread(data, package)
+ self.ev.dispatchEvent("packageUpdated", package)
- #@TODO change from reloadAll event to package update event
- self.core.pullManager.addEvent(ReloadAllEvent("collector"))
- #----------------------------------------------------------------------
@lock
@change
- def addPackage(self, name, folder, queue=0):
+ def addPackage(self, name, folder, queue=0, password=""):
"""adds a package, default to link collector"""
- lastID = self.db.addPackage(name, folder, queue)
- p = self.db.getPackage(lastID)
- e = InsertEvent("pack", lastID, p.order, "collector" if not queue else "queue")
- self.core.pullManager.addEvent(e)
- return lastID
+ pid = self.db.addPackage(name, folder, queue, password)
+ p = self.db.getPackage(pid)
+
+ self.ev.dispatchEvent("packageInserted", pid, p.queue, p.order)
+ return pid
+
- #----------------------------------------------------------------------
@lock
@change
def deletePackage(self, id):
@@ -156,7 +146,6 @@ class FileHandler:
oldorder = p.order
queue = p.queue
- e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
pyfiles = self.cache.values()
@@ -166,8 +155,7 @@ class FileHandler:
pyfile.release()
self.db.deletePackage(p)
- self.core.pullManager.addEvent(e)
- self.core.hookManager.dispatchEvent("packageDeleted", id)
+ self.ev.dispatchEvent("packageDeleted", id)
if id in self.packageCache:
del self.packageCache[id]
@@ -178,7 +166,7 @@ class FileHandler:
pack.order -= 1
pack.notifyChange()
- #----------------------------------------------------------------------
+
@lock
@change
def deleteLink(self, id):
@@ -189,8 +177,6 @@ class FileHandler:
return None
pid = f.packageid
- e = RemoveEvent("file", id, "collector" if not f.package().queue else "queue")
-
oldorder = f.order
if id in self.core.threadManager.processingIds():
@@ -201,7 +187,7 @@ class FileHandler:
self.db.deleteLink(f)
- self.core.pullManager.addEvent(e)
+ self.ev.dispatchEvent("linkDeleted", id, pid)
p = self.getPackage(pid)
if not len(p.getChildren()):
@@ -213,35 +199,26 @@ class FileHandler:
pyfile.order -= 1
pyfile.notifyChange()
- #----------------------------------------------------------------------
def releaseLink(self, id):
"""removes pyfile from cache"""
if id in self.cache:
del self.cache[id]
- #----------------------------------------------------------------------
def releasePackage(self, id):
"""removes package from cache"""
if id in self.packageCache:
del self.packageCache[id]
- #----------------------------------------------------------------------
def updateLink(self, pyfile):
"""updates link"""
self.db.updateLink(pyfile)
+ self.ev.dispatchEvent("linkUpdated", pyfile.id, pyfile.packageid)
- e = UpdateEvent("file", pyfile.id, "collector" if not pyfile.package().queue else "queue")
- self.core.pullManager.addEvent(e)
-
- #----------------------------------------------------------------------
def updatePackage(self, pypack):
"""updates a package"""
self.db.updatePackage(pypack)
+ self.ev.dispatchEvent("packageUpdated", pypack.id)
- e = UpdateEvent("pack", pypack.id, "collector" if not pypack.queue else "queue")
- self.core.pullManager.addEvent(e)
-
- #----------------------------------------------------------------------
def getPackage(self, id):
"""return package instance"""
@@ -250,7 +227,6 @@ class FileHandler:
else:
return self.db.getPackage(id)
- #----------------------------------------------------------------------
def getPackageData(self, id):
"""returns dict with package information"""
pack = self.getPackage(id)
@@ -274,7 +250,7 @@ class FileHandler:
return pack
- #----------------------------------------------------------------------
+
def getFileData(self, id):
"""returns dict with file information"""
if id in self.cache:
@@ -282,7 +258,7 @@ class FileHandler:
return self.db.getLinkData(id)
- #----------------------------------------------------------------------
+
def getFile(self, id):
"""returns pyfile instance"""
if id in self.cache:
@@ -290,7 +266,7 @@ class FileHandler:
else:
return self.db.getFile(id)
- #----------------------------------------------------------------------
+
@lock
def getJob(self, occ):
"""get suitable job"""
@@ -334,21 +310,6 @@ class FileHandler:
#pyfile = self.getFile(self.jobCache[occ].pop())
return pyfile
- @lock
- def getDecryptJob(self):
- """return job for decrypting"""
- if "decrypt" in self.jobCache:
- return None
-
- plugins = self.core.pluginManager.getPlugins("crypter").keys() + self.core.pluginManager.getPlugins("container").keys()
- plugins = str(tuple(plugins))
-
- jobs = self.db.getPluginJob(plugins)
- if jobs:
- return self.getFile(jobs[0])
- else:
- self.jobCache["decrypt"] = "empty"
- return None
def getFileCount(self):
"""returns number of files"""
@@ -405,8 +366,7 @@ class FileHandler:
if id in self.packageCache:
self.packageCache[id].setFinished = False
- e = UpdateEvent("pack", id, "collector" if not self.getPackage(id).queue else "queue")
- self.core.pullManager.addEvent(e)
+ self.ev.dispatchEvent("packageUpdated", id)
@lock
@change
@@ -420,9 +380,8 @@ class FileHandler:
self.db.restartFile(id)
+ self.ev.dispatchEvent("linkUpdated", id)
- e = UpdateEvent("file", id, "collector" if not self.getFile(id).package().queue else "queue")
- self.core.pullManager.addEvent(e)
@lock
@change
@@ -431,17 +390,10 @@ class FileHandler:
p = self.db.getPackage(id)
oldorder = p.order
+ p.queue = queue
- e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
- self.core.pullManager.addEvent(e)
-
self.db.clearPackageOrder(p)
-
- p = self.db.getPackage(id)
-
- p.queue = queue
self.db.updatePackage(p)
-
self.db.reorderPackage(p, -1, True)
packs = self.packageCache.values()
@@ -452,37 +404,34 @@ class FileHandler:
self.db.commit()
self.releasePackage(id)
- p = self.getPackage(id)
-
- e = InsertEvent("pack", id, p.order, "collector" if not p.queue else "queue")
- self.core.pullManager.addEvent(e)
+
+ self.ev.dispatchEvent("packageDeleted", id)
+ self.ev.dispatchEvent("packageInserted", id, p.queue, p.order)
@lock
@change
def reorderPackage(self, id, position):
p = self.getPackage(id)
- e = RemoveEvent("pack", id, "collector" if not p.queue else "queue")
- self.core.pullManager.addEvent(e)
self.db.reorderPackage(p, position)
packs = self.packageCache.values()
for pack in packs:
if pack.queue != p.queue or pack.order < 0 or pack == p: continue
if p.order > position:
- if pack.order >= position and pack.order < p.order:
+ if position <= pack.order < p.order:
pack.order += 1
pack.notifyChange()
elif p.order < position:
- if pack.order <= position and pack.order > p.order:
+ if position >= pack.order > p.order:
pack.order -= 1
pack.notifyChange()
p.order = position
self.db.commit()
- e = InsertEvent("pack", id, position, "collector" if not p.queue else "queue")
- self.core.pullManager.addEvent(e)
+ self.ev.dispatchEvent("packageDeleted", id)
+ self.ev.dispatchEvent("packageInserted", id, p.queue, p.order)
@lock
@change
@@ -490,20 +439,17 @@ class FileHandler:
f = self.getFileData(id)
f = f[id]
- e = RemoveEvent("file", id, "collector" if not self.getPackage(f["package"]).queue else "queue")
- self.core.pullManager.addEvent(e)
-
self.db.reorderLink(f, position)
pyfiles = self.cache.values()
for pyfile in pyfiles:
if pyfile.packageid != f["package"] or pyfile.order < 0: continue
if f["order"] > position:
- if pyfile.order >= position and pyfile.order < f["order"]:
+ if position <= pyfile.order < f["order"]:
pyfile.order += 1
pyfile.notifyChange()
elif f["order"] < position:
- if pyfile.order <= position and pyfile.order > f["order"]:
+ if position >= pyfile.order > f["order"]:
pyfile.order -= 1
pyfile.notifyChange()
@@ -512,15 +458,14 @@ class FileHandler:
self.db.commit()
- e = InsertEvent("file", id, position, "collector" if not self.getPackage(f["package"]).queue else "queue")
- self.core.pullManager.addEvent(e)
+ self.ev.dispatchEvent("packageUpdated", f["package"])
+
@change
def updateFileInfo(self, data, pid):
""" updates file info (name, size, status, url)"""
ids = self.db.updateLinkInfo(data)
- e = UpdateEvent("pack", pid, "collector" if not self.getPackage(pid).queue else "queue")
- self.core.pullManager.addEvent(e)
+ self.ev.dispatchEvent("packageUpdated", pid)
def checkPackageFinished(self, pyfile):
""" checks if package is finished and calls hookmanager """
@@ -625,9 +570,9 @@ class FileMethods():
self.c.executemany('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', links)
@queue
- def addPackage(self, name, folder, queue):
+ def addPackage(self, name, folder, queue, password):
order = self._nextPackageOrder(queue)
- self.c.execute('INSERT INTO packages(name, folder, queue, packageorder) VALUES(?,?,?,?)', (name, folder, queue, order))
+ self.c.execute('INSERT INTO packages(name, folder, queue, packageorder, password) VALUES(?,?,?,?,?)', (name, folder, queue, order, password))
return self.c.lastrowid
@queue
@@ -824,7 +769,7 @@ class FileMethods():
if not r: return None
return PyPackage(self.manager, id, * r)
- #----------------------------------------------------------------------
+
@queue
def getFile(self, id):
"""return link instance from id"""
@@ -837,28 +782,14 @@ class FileMethods():
@queue
def getJob(self, occ):
"""return pyfile ids, which are suitable for download and dont use a occupied plugin"""
-
- #@TODO improve this hardcoded method
- pre = "('DLC', 'LinkList', 'SerienjunkiesOrg', 'CCF', 'RSDF')" #plugins which are processed in collector
-
cmd = "("
for i, item in enumerate(occ):
if i: cmd += ", "
cmd += "'%s'" % item
-
- cmd += ")"
- cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE ((p.queue=1 AND l.plugin NOT IN %s) OR l.plugin IN %s) AND l.status IN (2,3,14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % (cmd, pre)
-
- self.c.execute(cmd) # very bad!
-
- return [x[0] for x in self.c]
-
- @queue
- def getPluginJob(self, plugins):
- """returns pyfile ids with suited plugins"""
- cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE l.plugin IN %s AND l.status IN (2,3,14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % plugins
+ cmd += ")"
+ cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=1 AND l.plugin NOT IN %s AND l.status IN (2,3,14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % cmd
self.c.execute(cmd) # very bad!
return [x[0] for x in self.c]
diff --git a/module/interaction/EventManager.py b/module/interaction/EventManager.py
index c45c388f3..0c4fc80c9 100644
--- a/module/interaction/EventManager.py
+++ b/module/interaction/EventManager.py
@@ -36,6 +36,9 @@ class EventManager:
for client in self.clients:
client.addEvent(event)
+ def dispatchEvent(self, *args):
+ pass
+
class Client:
def __init__(self, uuid):
diff --git a/module/interaction/PullEvents.py b/module/interaction/PullEvents.py
deleted file mode 100644
index f34b01d48..000000000
--- a/module/interaction/PullEvents.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 3 of the License,
- or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- See the GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, see <http://www.gnu.org/licenses/>.
-
- @author: mkaay
-"""
-
-class UpdateEvent():
- def __init__(self, itype, iid, destination):
- assert itype == "pack" or itype == "file"
- assert destination == "queue" or destination == "collector"
- self.type = itype
- self.id = iid
- self.destination = destination
-
- def toList(self):
- return ["update", self.destination, self.type, self.id]
-
-class RemoveEvent():
- def __init__(self, itype, iid, destination):
- assert itype == "pack" or itype == "file"
- assert destination == "queue" or destination == "collector"
- self.type = itype
- self.id = iid
- self.destination = destination
-
- def toList(self):
- return ["remove", self.destination, self.type, self.id]
-
-class InsertEvent():
- def __init__(self, itype, iid, after, destination):
- assert itype == "pack" or itype == "file"
- assert destination == "queue" or destination == "collector"
- self.type = itype
- self.id = iid
- self.after = after
- self.destination = destination
-
- def toList(self):
- return ["insert", self.destination, self.type, self.id, self.after]
-
-class ReloadAllEvent():
- def __init__(self, destination):
- assert destination == "queue" or destination == "collector"
- self.destination = destination
-
- def toList(self):
- return ["reload", self.destination]
-
-class AccountUpdateEvent():
- def toList(self):
- return ["account"]
-
-class ConfigUpdateEvent():
- def toList(self):
- return ["config"]
diff --git a/module/plugins/Account.py b/module/plugins/Account.py
index 86b73c99c..6b65051db 100644
--- a/module/plugins/Account.py
+++ b/module/plugins/Account.py
@@ -149,6 +149,13 @@ class Account(Base, AccountInfo):
def getAccountRequest(self):
return self.core.requestFactory.getRequest(self.__name__, self.cj)
+ def getDownloadSettings(self):
+ """ Can be overwritten to change download settings. Default is no chunkLimit, multiDL, resumeDownload
+
+ :return: (chunkLimit, multiDL, resumeDownload) / (int,bool,bool)
+ """
+ return -1, True, True
+
@lock
def getAccountInfo(self, force=False):
"""retrieve account infos for an user, do **not** overwrite this method!\\
diff --git a/module/plugins/AccountManager.py b/module/plugins/AccountManager.py
index c718510ed..77139206c 100644
--- a/module/plugins/AccountManager.py
+++ b/module/plugins/AccountManager.py
@@ -21,7 +21,6 @@ from threading import Lock
from random import choice
from module.common.json_layer import json
-from module.interaction.PullEvents import AccountUpdateEvent
from module.utils import lock
class AccountManager():
@@ -85,12 +84,15 @@ class AccountManager():
self.createAccount(plugin, user, password, options)
self.saveAccounts()
+ self.sendChange()
+
@lock
def removeAccount(self, plugin, user):
"""remove account"""
if plugin in self.accounts and user in self.accounts[plugin]:
del self.accounts[plugin][user]
self.core.db.removeAccount(plugin, user)
+ self.sendChange()
else:
self.core.log.debug("Remove non existing account %s %s" % (plugin, user))
@@ -118,9 +120,6 @@ class AccountManager():
for acc in p_dict.itervalues():
acc.getAccountInfo()
- e = AccountUpdateEvent()
- self.core.pullManager.addEvent(e)
-
return self.accounts
def refreshAllAccounts(self):
@@ -131,5 +130,4 @@ class AccountManager():
def sendChange(self):
- e = AccountUpdateEvent()
- self.core.pullManager.addEvent(e)
+ self.core.eventManager.dispatchEvent("accountsUpdated") \ No newline at end of file
diff --git a/module/plugins/Base.py b/module/plugins/Base.py
index 36df7e423..b2338a01f 100644
--- a/module/plugins/Base.py
+++ b/module/plugins/Base.py
@@ -18,12 +18,19 @@
"""
import sys
+from module.utils.fs import exists, makedirs, join
-# TODO: config format definition
+# TODO
# more attributes if needed
# get rid of catpcha & container plugins ?! (move to crypter & internals)
# adapt old plugins as needed
+class Fail(Exception):
+ """ raised when failed """
+
+class Retry(Exception):
+ """ raised when start again from beginning """
+
class Base(object):
"""
The Base plugin class with all shared methods and every possible attribute for plugin definition.
@@ -31,7 +38,8 @@ class Base(object):
__version__ = "0.1"
#: Regexp pattern which will be matched for download plugins
__pattern__ = r""
- #: Flat config definition
+ #: Config definition: list of (name, type, verbose_name, default_value) or
+ #: (name, type, verbose_name, short_description, default_value)
__config__ = tuple()
#: Short description, one liner
__description__ = ""
@@ -41,7 +49,7 @@ class Base(object):
__dependencies__ = tuple()
#: Tags to categorize the plugin
__tags__ = tuple()
- #: Base64 encoded .png icon
+ #: Base64 encoded .png icon, please don't use sizes above ~3KB
__icon__ = ""
#: Alternative, link to png icon
__icon_url__ = ""
@@ -62,18 +70,25 @@ class Base(object):
self.config = core.config
#log functions
- def logInfo(self, *args):
- self.log.info("%s: %s" % (self.__name__, " | ".join([a if isinstance(a, basestring) else str(a) for a in args])))
+ def logInfo(self, *args, **kwargs):
+ self._log("info", *args, **kwargs)
+
+ def logWarning(self, *args, **kwargs):
+ self._log("warning", *args, **kwargs)
- def logWarning(self, *args):
- self.log.warning("%s: %s" % (self.__name__, " | ".join([a if isinstance(a, basestring) else str(a) for a in args])))
+ def logError(self, *args, **kwargs):
+ self._log("error", *args, **kwargs)
- def logError(self, *args):
- self.log.error("%s: %s" % (self.__name__, " | ".join([a if isinstance(a, basestring) else str(a) for a in args])))
+ def logDebug(self, *args, **kwargs):
+ self._log("debug", *args, **kwargs)
- def logDebug(self, *args):
- self.log.debug("%s: %s" % (self.__name__, " | ".join([a if isinstance(a, basestring) else str(a) for a in args])))
+ def _log(self, level, *args, **kwargs):
+ if "sep" in kwargs:
+ sep = "%s" % kwargs["sep"]
+ else:
+ sep = " | "
+ getattr(self.log, level)("%s: %s" % (self.__name__, sep.join([a if isinstance(a, basestring) else str(a) for a in args])))
def setConf(self, option, value):
""" see `setConfig` """
@@ -129,3 +144,67 @@ class Base(object):
#noinspection PyUnresolvedReferences
sys.stdout = sys._stdout
embed()
+
+ def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=False):
+ """Load content at url and returns it
+
+ :param url:
+ :param get:
+ :param post:
+ :param ref:
+ :param cookies:
+ :param just_header: if True only the header will be retrieved and returned as dict
+ :param decode: Wether to decode the output according to http header, should be True in most cases
+ :return: Loaded content
+ """
+ if not hasattr(self, "req"): raise Exception("Plugin type does not have Request attribute.")
+
+ if type(url) == unicode: url = str(url)
+
+ res = self.req.load(url, get, post, ref, cookies, just_header, decode=decode)
+
+ if self.core.debug:
+ from inspect import currentframe
+
+ frame = currentframe()
+ if not exists(join("tmp", self.__name__)):
+ makedirs(join("tmp", self.__name__))
+
+ f = open(
+ join("tmp", self.__name__, "%s_line%s.dump.html" % (frame.f_back.f_code.co_name, frame.f_back.f_lineno))
+ , "wb")
+ del frame # delete the frame or it wont be cleaned
+
+ try:
+ tmp = res.encode("utf8")
+ except:
+ tmp = res
+
+ f.write(tmp)
+ f.close()
+
+ if just_header:
+ #parse header
+ header = {"code": self.req.code}
+ for line in res.splitlines():
+ line = line.strip()
+ if not line or ":" not in line: continue
+
+ key, none, value = line.partition(":")
+ key = key.lower().strip()
+ value = value.strip()
+
+ if key in header:
+ if type(header[key]) == list:
+ header[key].append(value)
+ else:
+ header[key] = [header[key], value]
+ else:
+ header[key] = value
+ res = header
+
+ return res
+
+ def fail(self, reason):
+ """ fail and give reason """
+ raise Fail(reason) \ No newline at end of file
diff --git a/module/plugins/Container.py b/module/plugins/Container.py
deleted file mode 100644
index c233d3710..000000000
--- a/module/plugins/Container.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 3 of the License,
- or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- See the GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, see <http://www.gnu.org/licenses/>.
-
- @author: mkaay
-"""
-
-from module.plugins.Crypter import Crypter
-
-from os.path import join, exists, basename
-from os import remove
-import re
-
-class Container(Crypter):
- __name__ = "Container"
- __version__ = "0.1"
- __pattern__ = None
- __type__ = "container"
- __description__ = """Base container plugin"""
- __author_name__ = ("mkaay")
- __author_mail__ = ("mkaay@mkaay.de")
-
-
- def preprocessing(self, thread):
- """prepare"""
-
- self.setup()
- self.thread = thread
-
- self.loadToDisk()
-
- self.decrypt(self.pyfile)
- self.deleteTmp()
-
- self.createPackages()
-
-
- def loadToDisk(self):
- """loads container to disk if its stored remotely and overwrite url,
- or check existent on several places at disk"""
-
- if self.pyfile.url.startswith("http"):
- self.pyfile.name = re.findall("([^\/=]+)", self.pyfile.url)[-1]
- content = self.load(self.pyfile.url)
- self.pyfile.url = join(self.config["general"]["download_folder"], self.pyfile.name)
- f = open(self.pyfile.url, "wb" )
- f.write(content)
- f.close()
-
- else:
- self.pyfile.name = basename(self.pyfile.url)
- if not exists(self.pyfile.url):
- if exists(join(pypath, self.pyfile.url)):
- self.pyfile.url = join(pypath, self.pyfile.url)
- else:
- self.fail(_("File not exists."))
-
-
- def deleteTmp(self):
- if self.pyfile.name.startswith("tmp_"):
- remove(self.pyfile.url)
-
-
diff --git a/module/plugins/Crypter.py b/module/plugins/Crypter.py
index d1549fe80..fc54b32d7 100644
--- a/module/plugins/Crypter.py
+++ b/module/plugins/Crypter.py
@@ -1,72 +1,214 @@
# -*- coding: utf-8 -*-
-"""
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 3 of the License,
- or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
- See the GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, see <http://www.gnu.org/licenses/>.
-
- @author: mkaay
-"""
-
-from module.plugins.Plugin import Plugin
-
-class Crypter(Plugin):
- __name__ = "Crypter"
- __version__ = "0.1"
- __pattern__ = None
- __type__ = "container"
- __description__ = """Base crypter plugin"""
- __author_name__ = ("mkaay")
- __author_mail__ = ("mkaay@mkaay.de")
-
- def __init__(self, pyfile):
- Plugin.__init__(self, pyfile)
-
- #: Put all packages here. It's a list of tuples like: ( name, [list of links], folder )
- self.packages = []
+from module.Api import Destination
+from module.common.packagetools import parseNames
+from module.utils import to_list
+from module.utils.fs import exists
+
+from Base import Base, Retry
+
+class Package:
+ """ Container that indicates new package should be created """
+ def __init__(self, name, urls=None, dest=Destination.Queue):
+ self.name = name,
+ self.urls = urls if urls else []
+ self.dest = dest
+
+ def addUrl(self, url):
+ self.urls.append(url)
+
+class PyFileMockup:
+ """ Legacy class needed by old crypter plugins """
+ def __init__(self, url):
+ self.url = url
+ self.name = url
+
+class Crypter(Base):
+ """
+ Base class for (de)crypter plugins. Overwrite decrypt* methods.
+
+ How to use decrypt* methods
+ ---------------------------
+
+ You have to overwrite at least one method of decryptURL, decryptURLs, decryptFile.
+
+ After decrypting and generating urls/packages you have to return the result at the\
+ end of your method. Valid return Data is:
+
+ `Package` instance
+ A **new** package will be created with the name and the urls of the object.
+
+ List of urls and `Package` instances
+ All urls in the list will be added to the **current** package. For each `Package`\
+ instance a new package will be created.
+
+ """
- #: List of urls, pyLoad will generate packagenames
+ @classmethod
+ def decrypt(cls, core, url_or_urls):
+ """Static method to decrypt, something. Can be used by other plugins.
+
+ :param core: pyLoad `Core`, needed in decrypt context
+ :param url_or_urls: List of urls or urls
+ :return: List of decrypted urls, all packages info removed
+ """
+ urls = to_list(url_or_urls)
+ p = cls(core)
+ try:
+ result = p.processDecrypt(urls)
+ finally:
+ p.clean()
+
+ ret = []
+
+ for url_or_pack in result:
+ if isinstance(url_or_pack, Package): #package
+ ret.extend(url_or_pack.urls)
+ else: # single url
+ ret.append(url_or_pack)
+
+ return ret
+
+ def __init__(self, core, pid=-1, password=None):
+ Base.__init__(self, core)
+ self.req = core.requestFactory.getRequest(self.__name__)
+
+ # Package id plugin was initilized for, dont use this, its not guaranteed to be set
+ self.pid = pid
+
+ #: Password supplied by user
+ self.password = password
+
+ # For old style decrypter, do not use these !
+ self.packages = []
self.urls = []
-
- self.multiDL = True
- self.limitDL = 0
-
-
- def preprocessing(self, thread):
- """prepare"""
- self.setup()
- self.thread = thread
-
- self.decrypt(self.pyfile)
-
- self.createPackages()
-
-
- def decrypt(self, pyfile):
+ self.pyfile = None
+
+ self.init()
+
+ def init(self):
+ """More init stuff if needed"""
+
+ def setup(self):
+ """Called everytime before decrypting. A Crypter plugin will be most likly used for several jobs."""
+
+ def decryptURL(self, url):
+ """Decrypt a single url
+
+ :param url: url to decrypt
+ :return: See `Crypter` Documentation
+ """
+ raise NotImplementedError
+
+ def decryptURLs(self, urls):
+ """Decrypt a bunch of urls
+
+ :param urls: list of urls
+ :return: See `Crypter` Documentation
+ """
raise NotImplementedError
+ def decryptFile(self, content):
+ """Decrypt file content
+
+ :param content: content to decrypt as string
+ :return: See `Crypter Documentation
+ """
+ raise NotImplementedError
+
+ def generatePackages(self, urls):
+ """Generates `Package` instances and names from urls. Usefull for many different link and no\
+ given package name.
+
+ :param urls: list of urls
+ :return: list of `Package`
+ """
+ return [Package(name, purls) for name, purls in parseNames([(url,url) for url in urls]).iteritems()]
+
+ def processDecrypt(self, urls):
+ """ Internal method to select decrypting method
+
+ :param urls: List of urls/content
+ :return:
+ """
+ cls = self.__class__
+
+ # seperate local and remote files
+ content, urls = self.getLocalContent(urls)
+
+ if hasattr(cls, "decryptURLs"):
+ self.setup()
+ result = to_list(self.decryptURLs(urls))
+ elif hasattr(cls, "decryptURL"):
+ result = []
+ for url in urls:
+ self.setup()
+ result.extend(to_list(self.decryptURL(url)))
+ elif hasattr(cls, "decrypt"):
+ self.logDebug("Deprecated .decrypt() method in Crypter plugin")
+ result = [] # TODO
+ else:
+ self.logError("No Decrypting method was overwritten")
+ result = []
+
+ if hasattr(cls, "decryptFile"):
+ for c in content:
+ self.setup()
+ result.extend(to_list(self.decryptFile(c)))
+
+ return result
+
+ def getLocalContent(self, urls):
+ """Load files from disk
+
+ :param urls:
+ :return: content, remote urls
+ """
+ content = []
+ # do nothing if no decryptFile method
+ if hasattr(self.__class__, "decryptFile"):
+ remote = []
+ for url in urls:
+ path = None
+ if url.startswith("http"):
+ path = None # skip directly
+ elif exists(url):
+ path = url
+ elif exists(self.core.path(url)):
+ path = self.core.path(url)
+
+ if path:
+ f = open(path, "wb")
+ content.append(f.read())
+ f.close()
+ else:
+ remote.append(url)
+
+ #swap filtered url list
+ urls = remote
+
+ return content, urls
+
+ def retry(self):
+ """ Retry decrypting, will only work once. Somewhat deprecated method, should be avoided. """
+ raise Retry()
+
def createPackages(self):
- """ create new packages from self.packages """
+ """ Deprecated """
+ self.logDebug("Deprecated method .createPackages()")
for pack in self.packages:
self.log.debug("Parsed package %(name)s with %(len)d links" % { "name" : pack[0], "len" : len(pack[1]) } )
links = [x.decode("utf-8") for x in pack[1]]
- pid = self.core.api.addPackage(pack[0], links, self.pyfile.package().queue)
+ pid = self.core.api.files.addLinks(self.pid, links)
- if self.pyfile.package().password:
- self.core.api.setPackageData(pid, {"password": self.pyfile.package().password})
if self.urls:
self.core.api.generateAndAddPackages(self.urls)
+ def clean(self):
+ if hasattr(self, "req"):
+ self.req.close()
+ del self.req \ No newline at end of file
diff --git a/module/plugins/Hook.py b/module/plugins/Hook.py
index 860dc76bb..a3b86a794 100644
--- a/module/plugins/Hook.py
+++ b/module/plugins/Hook.py
@@ -20,7 +20,7 @@
from traceback import print_exc
-from Plugin import Base
+from Base import Base
class Expose(object):
""" used for decoration to declare rpc services """
diff --git a/module/plugins/Hoster.py b/module/plugins/Hoster.py
index aa50099fb..54c2efdfd 100644
--- a/module/plugins/Hoster.py
+++ b/module/plugins/Hoster.py
@@ -13,13 +13,39 @@
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
-
- @author: mkaay
+
+ @author: RaNaN, spoob, mkaay
"""
-from module.plugins.Plugin import Plugin
+from time import time, sleep
+from random import randint
+
+import os
+
+if os.name != "nt":
+ from module.utils.fs import chown
+ from pwd import getpwnam
+ from grp import getgrnam
+
+from Base import Base, Fail, Retry
+from module.utils import chunks #legacy import
+from module.utils.fs import save_join, save_path, fs_encode, fs_decode,\
+ remove, makedirs, chmod, stat, exists, join
+
+
+class Abort(Exception):
+ """ raised when aborted """
+
+class Reconnect(Exception):
+ """ raised when reconnected """
-class Hoster(Plugin):
+class SkipDownload(Exception):
+ """ raised when download should be skipped """
+
+class Hoster(Base):
+ """
+ Base plugin for hoster plugin. Overwrite getInfo for online status retrieval, process for downloading.
+ """
@staticmethod
def getInfo(urls):
@@ -28,6 +54,412 @@ class Hoster(Plugin):
where status is one of API pyfile statusses.
:param urls: List of urls
- :return:
+ :return: yield list of tuple with results (name, size, status, url)
+ """
+ pass
+
+ def __init__(self, pyfile):
+ Base.__init__(self, pyfile.m.core)
+
+ self.wantReconnect = False
+ #: enables simultaneous processing of multiple downloads
+ self.multiDL = True
+ self.limitDL = 0
+ #: chunk limit
+ self.chunkLimit = 1
+ #: enables resume (will be ignored if server dont accept chunks)
+ self.resumeDownload = False
+
+ #: time() + wait in seconds
+ self.waitUntil = 0
+ self.waiting = False
+
+ self.ocr = None #captcha reader instance
+ #: account handler instance, see :py:class:`Account`
+ self.account = self.core.accountManager.getAccountForPlugin(self.__name__)
+
+ #: premium status
+ self.premium = False
+ #: username/login
+ self.user = None
+
+ if self.account and not self.account.isUsable(): self.account = None
+ if self.account:
+ self.user = self.account.loginname
+ #: Browser instance, see `network.Browser`
+ self.req = self.account.getAccountRequest()
+ # Default: -1, True, True
+ self.chunkLimit, self.resumeDownload, self.multiDL = self.account.getDownloadSettings()
+ self.premium = self.account.isPremium()
+ else:
+ self.req = self.core.requestFactory.getRequest(self.__name__)
+
+ #: associated pyfile instance, see `PyFile`
+ self.pyfile = pyfile
+ self.thread = None # holds thread in future
+
+ #: location where the last call to download was saved
+ self.lastDownload = ""
+ #: re match of the last call to `checkDownload`
+ self.lastCheck = None
+ #: js engine, see `JsEngine`
+ self.js = self.core.js
+ self.cTask = None #captcha task
+
+ self.retries = 0 # amount of retries already made
+ self.html = None # some plugins store html code here
+
+ self.init()
+
+ def getChunkCount(self):
+ if self.chunkLimit <= 0:
+ return self.config["download"]["chunks"]
+ return min(self.config["download"]["chunks"], self.chunkLimit)
+
+ def __call__(self):
+ return self.__name__
+
+ def init(self):
+ """initialize the plugin (in addition to `__init__`)"""
+ pass
+
+ def setup(self):
+ """ setup for enviroment and other things, called before downloading (possibly more than one time)"""
+ pass
+
+ def preprocessing(self, thread):
+ """ handles important things to do before starting """
+ self.thread = thread
+
+ if self.account:
+ # will force a relogin or reload of account info if necessary
+ self.account.getAccountInfo()
+ else:
+ self.req.clearCookies()
+
+ self.setup()
+
+ self.pyfile.setStatus("starting")
+
+ return self.process(self.pyfile)
+
+
+ def process(self, pyfile):
+ """the 'main' method of every plugin, you **have to** overwrite it"""
+ raise NotImplementedError
+
+ def resetAccount(self):
+ """ dont use account and retry download """
+ self.account = None
+ self.req = self.core.requestFactory.getRequest(self.__name__)
+ self.retry()
+
+ def checksum(self, local_file=None):
+ """
+ return codes:
+ 0 - checksum ok
+ 1 - checksum wrong
+ 5 - can't get checksum
+ 10 - not implemented
+ 20 - unknown error
+ """
+ #@TODO checksum check hook
+
+ return True, 10
+
+
+ def setWait(self, seconds, reconnect=False):
+ """Set a specific wait time later used with `wait`
+
+ :param seconds: wait time in seconds
+ :param reconnect: True if a reconnect would avoid wait time
+ """
+ if reconnect:
+ self.wantReconnect = True
+ self.pyfile.waitUntil = time() + int(seconds)
+
+ def wait(self):
+ """ waits the time previously set """
+ self.waiting = True
+ self.pyfile.setStatus("waiting")
+
+ while self.pyfile.waitUntil > time():
+ self.thread.m.reconnecting.wait(2)
+
+ if self.pyfile.abort: raise Abort
+ if self.thread.m.reconnecting.isSet():
+ self.waiting = False
+ self.wantReconnect = False
+ raise Reconnect
+
+ self.waiting = False
+ self.pyfile.setStatus("starting")
+
+ def offline(self):
+ """ fail and indicate file is offline """
+ raise Fail("offline")
+
+ def tempOffline(self):
+ """ fail and indicates file ist temporary offline, the core may take consequences """
+ raise Fail("temp. offline")
+
+ def retry(self, max_tries=3, wait_time=1, reason=""):
+ """Retries and begin again from the beginning
+
+ :param max_tries: number of maximum retries
+ :param wait_time: time to wait in seconds
+ :param reason: reason for retrying, will be passed to fail if max_tries reached
+ """
+ if 0 < max_tries <= self.retries:
+ if not reason: reason = "Max retries reached"
+ raise Fail(reason)
+
+ self.wantReconnect = False
+ self.setWait(wait_time)
+ self.wait()
+
+ self.retries += 1
+ raise Retry(reason)
+
+ def invalidCaptcha(self):
+ if self.cTask:
+ self.cTask.invalid()
+
+ def correctCaptcha(self):
+ if self.cTask:
+ self.cTask.correct()
+
+ def decryptCaptcha(self, url, get={}, post={}, cookies=False, forceUser=False, imgtype='jpg',
+ result_type='textual'):
+ """ Loads a captcha and decrypts it with ocr, plugin, user input
+
+ :param url: url of captcha image
+ :param get: get part for request
+ :param post: post part for request
+ :param cookies: True if cookies should be enabled
+ :param forceUser: if True, ocr is not used
+ :param imgtype: Type of the Image
+ :param result_type: 'textual' if text is written on the captcha\
+ or 'positional' for captcha where the user have to click\
+ on a specific region on the captcha
+
+ :return: result of decrypting
+ """
+
+ img = self.load(url, get=get, post=post, cookies=cookies)
+
+ id = ("%.2f" % time())[-6:].replace(".", "")
+ temp_file = open(join("tmp", "tmpCaptcha_%s_%s.%s" % (self.__name__, id, imgtype)), "wb")
+ temp_file.write(img)
+ temp_file.close()
+
+ has_plugin = self.__name__ in self.core.pluginManager.getPlugins("captcha")
+
+ if self.core.captcha:
+ Ocr = self.core.pluginManager.loadClass("captcha", self.__name__)
+ else:
+ Ocr = None
+
+ if Ocr and not forceUser:
+ sleep(randint(3000, 5000) / 1000.0)
+ if self.pyfile.abort: raise Abort
+
+ ocr = Ocr()
+ result = ocr.get_captcha(temp_file.name)
+ else:
+ captchaManager = self.core.captchaManager
+ task = captchaManager.newTask(img, imgtype, temp_file.name, result_type)
+ self.cTask = task
+ captchaManager.handleCaptcha(task)
+
+ while task.isWaiting():
+ if self.pyfile.abort:
+ captchaManager.removeTask(task)
+ raise Abort
+ sleep(1)
+
+ captchaManager.removeTask(task)
+
+ if task.error and has_plugin: #ignore default error message since the user could use OCR
+ self.fail(_("Pil and tesseract not installed and no Client connected for captcha decrypting"))
+ elif task.error:
+ self.fail(task.error)
+ elif not task.result:
+ self.fail(_("No captcha result obtained in appropiate time by any of the plugins."))
+
+ result = task.result
+ self.log.debug("Received captcha result: %s" % str(result))
+
+ if not self.core.debug:
+ try:
+ remove(temp_file.name)
+ except:
+ pass
+
+ return result
+
+
+ def load(self, *args, **kwargs):
+ """ See 'Base' load method for more info """
+ if self.pyfile.abort: raise Abort
+ return Base.load(self, *args, **kwargs)
+
+ def download(self, url, get={}, post={}, ref=True, cookies=True, disposition=False):
+ """Downloads the content at url to download folder
+
+ :param url:
+ :param get:
+ :param post:
+ :param ref:
+ :param cookies:
+ :param disposition: if True and server provides content-disposition header\
+ the filename will be changed if needed
+ :return: The location where the file was saved
+ """
+
+ self.checkForSameFiles()
+
+ self.pyfile.setStatus("downloading")
+
+ download_folder = self.config['general']['download_folder']
+
+ location = save_join(download_folder, self.pyfile.package().folder)
+
+ if not exists(location):
+ makedirs(location, int(self.core.config["permission"]["folder"], 8))
+
+ if self.core.config["permission"]["change_dl"] and os.name != "nt":
+ try:
+ uid = getpwnam(self.config["permission"]["user"])[2]
+ gid = getgrnam(self.config["permission"]["group"])[2]
+
+ chown(location, uid, gid)
+ except Exception, e:
+ self.log.warning(_("Setting User and Group failed: %s") % str(e))
+
+ # convert back to unicode
+ location = fs_decode(location)
+ name = save_path(self.pyfile.name)
+
+ filename = join(location, name)
+
+ self.core.hookManager.dispatchEvent("downloadStarts", self.pyfile, url, filename)
+
+ try:
+ newname = self.req.httpDownload(url, filename, get=get, post=post, ref=ref, cookies=cookies,
+ chunks=self.getChunkCount(), resume=self.resumeDownload,
+ progressNotify=self.pyfile.setProgress, disposition=disposition)
+ finally:
+ self.pyfile.size = self.req.size
+
+ if disposition and newname and newname != name: #triple check, just to be sure
+ self.log.info("%(name)s saved as %(newname)s" % {"name": name, "newname": newname})
+ self.pyfile.name = newname
+ filename = join(location, newname)
+
+ fs_filename = fs_encode(filename)
+
+ if self.core.config["permission"]["change_file"]:
+ chmod(fs_filename, int(self.core.config["permission"]["file"], 8))
+
+ if self.core.config["permission"]["change_dl"] and os.name != "nt":
+ try:
+ uid = getpwnam(self.config["permission"]["user"])[2]
+ gid = getgrnam(self.config["permission"]["group"])[2]
+
+ chown(fs_filename, uid, gid)
+ except Exception, e:
+ self.log.warning(_("Setting User and Group failed: %s") % str(e))
+
+ self.lastDownload = filename
+ return self.lastDownload
+
+ def checkDownload(self, rules, api_size=0, max_size=50000, delete=True, read_size=0):
+ """ checks the content of the last downloaded file, re match is saved to `lastCheck`
+
+ :param rules: dict with names and rules to match (compiled regexp or strings)
+ :param api_size: expected file size
+ :param max_size: if the file is larger then it wont be checked
+ :param delete: delete if matched
+ :param read_size: amount of bytes to read from files larger then max_size
+ :return: dictionary key of the first rule that matched
+ """
+ lastDownload = fs_encode(self.lastDownload)
+ if not exists(lastDownload): return None
+
+ size = stat(lastDownload)
+ size = size.st_size
+
+ if api_size and api_size <= size: return None
+ elif size > max_size and not read_size: return None
+ self.log.debug("Download Check triggered")
+ f = open(lastDownload, "rb")
+ content = f.read(read_size if read_size else -1)
+ f.close()
+ #produces encoding errors, better log to other file in the future?
+ #self.log.debug("Content: %s" % content)
+ for name, rule in rules.iteritems():
+ if type(rule) in (str, unicode):
+ if rule in content:
+ if delete:
+ remove(lastDownload)
+ return name
+ elif hasattr(rule, "search"):
+ m = rule.search(content)
+ if m:
+ if delete:
+ remove(lastDownload)
+ self.lastCheck = m
+ return name
+
+
+ def getPassword(self):
+ """ get the password the user provided in the package"""
+ password = self.pyfile.package().password
+ if not password: return ""
+ return password
+
+
+ def checkForSameFiles(self, starting=False):
+ """ checks if same file was/is downloaded within same package
+
+ :param starting: indicates that the current download is going to start
+ :raises SkipDownload:
"""
- pass \ No newline at end of file
+
+ pack = self.pyfile.package()
+
+ for pyfile in self.core.files.cache.values():
+ if pyfile != self.pyfile and pyfile.name == self.pyfile.name and pyfile.package().folder == pack.folder:
+ if pyfile.status in (0, 12): #finished or downloading
+ raise SkipDownload(pyfile.pluginname)
+ elif pyfile.status in (
+ 5, 7) and starting: #a download is waiting/starting and was appenrently started before
+ raise SkipDownload(pyfile.pluginname)
+
+ download_folder = self.config['general']['download_folder']
+ location = save_join(download_folder, pack.folder, self.pyfile.name)
+
+ if starting and self.core.config['download']['skip_existing'] and exists(location):
+ size = os.stat(location).st_size
+ if size >= self.pyfile.size:
+ raise SkipDownload("File exists.")
+
+ pyfile = self.core.db.findDuplicates(self.pyfile.id, self.pyfile.package().folder, self.pyfile.name)
+ if pyfile:
+ if exists(location):
+ raise SkipDownload(pyfile[0])
+
+ self.log.debug("File %s not skipped, because it does not exists." % self.pyfile.name)
+
+ def clean(self):
+ """ clean everything and remove references """
+ if hasattr(self, "pyfile"):
+ del self.pyfile
+ if hasattr(self, "req"):
+ self.req.close()
+ del self.req
+ if hasattr(self, "thread"):
+ del self.thread
+ if hasattr(self, "html"):
+ del self.html
diff --git a/module/plugins/PluginManager.py b/module/plugins/PluginManager.py
index 18dea7699..e00c1e1f5 100644
--- a/module/plugins/PluginManager.py
+++ b/module/plugins/PluginManager.py
@@ -42,7 +42,7 @@ PluginTuple = namedtuple("PluginTuple", "version re deps user path")
class PluginManager:
ROOT = "module.plugins."
USERROOT = "userplugins."
- TYPES = ("crypter", "container", "hoster", "captcha", "accounts", "hooks", "internal")
+ TYPES = ("crypter", "hoster", "captcha", "accounts", "hooks", "internal")
SINGLE = re.compile(r'__(?P<attr>[a-z0-9_]+)__\s*=\s*(?:r|u|_)?((?:(?<!")"(?!")|\'|\().*(?:(?<!")"(?!")|\'|\)))',
re.I)
@@ -216,54 +216,54 @@ class PluginManager:
def parseUrls(self, urls):
- """parse plugins for given list of urls"""
+ """parse plugins for given list of urls, seperate to crypter and hoster"""
- res = [] # tupels of (url, plugin)
+ res = {"hoster": [], "crypter": []} # tupels of (url, plugin)
for url in urls:
if type(url) not in (str, unicode, buffer):
self.log.debug("Parsing invalid type %s" % type(url))
continue
+
found = False
for ptype, name in self.history:
if self.plugins[ptype][name].re.match(url):
- res.append((url, name))
+ res[ptype].append((url, name))
found = (ptype, name)
+ break
- if found and self.history[0] != found:
- # found match, update history
- self.history.remove(found)
- self.history.insert(0, found)
+ if found: # found match
+ if self.history[0] != found: #update history
+ self.history.remove(found)
+ self.history.insert(0, found)
continue
- for ptype in ("crypter", "hoster", "container"):
+ for ptype in ("crypter", "hoster"):
for name, plugin in self.plugins[ptype].iteritems():
if plugin.re.match(url):
- res.append((url, name))
+ res[ptype].append((url, name))
self.history.insert(0, (ptype, name))
del self.history[10:] # cut down to size of 10
found = True
break
if not found:
- res.append((url, "BasePlugin"))
+ res["hoster"].append((url, "BasePlugin"))
- return res
+ return res["hoster"], res["crypter"]
def getPlugins(self, type):
- # TODO clean this workaround
- if type not in self.plugins: type += "s" # append s, so updater can find the plugins
- return self.plugins[type]
+ return self.plugins.get(type, None)
- def findPlugin(self, name, pluginlist=("hoster", "crypter", "container")):
+ def findPlugin(self, name, pluginlist=("hoster", "crypter")):
for ptype in pluginlist:
if name in self.plugins[ptype]:
return ptype, self.plugins[ptype][name]
return None, None
def getPlugin(self, name, original=False):
- """return plugin module from hoster|decrypter|container"""
+ """return plugin module from hoster|decrypter"""
type, plugin = self.findPlugin(name)
if not plugin:
@@ -412,22 +412,4 @@ class PluginManager:
:return: List of unfullfilled dependencies
"""
pass
-
-
-if __name__ == "__main__":
- _ = lambda x: x
- pypath = "/home/christian/Projekte/pyload-0.4/module/plugins"
-
- from time import time
-
- p = PluginManager(None)
-
- a = time()
-
- test = ["http://www.youtube.com/watch?v=%s" % x for x in range(0, 100)]
- print p.parseUrls(test)
-
- b = time()
-
- print b - a, "s"
diff --git a/module/plugins/container/CCF.py b/module/plugins/container/CCF.py
index 301b033d4..ab7ff1099 100644
--- a/module/plugins/container/CCF.py
+++ b/module/plugins/container/CCF.py
@@ -4,13 +4,13 @@
import re
from urllib2 import build_opener
-from module.plugins.Container import Container
+from module.plugins.Crypter import Crypter
from module.lib.MultipartPostHandler import MultipartPostHandler
from os import makedirs
from os.path import exists, join
-class CCF(Container):
+class CCF(Crypter):
__name__ = "CCF"
__version__ = "0.2"
__pattern__ = r"(?!http://).*\.ccf$"
diff --git a/module/plugins/container/LinkList.py b/module/plugins/container/LinkList.py
index b9eb4b972..614c76c90 100644
--- a/module/plugins/container/LinkList.py
+++ b/module/plugins/container/LinkList.py
@@ -2,9 +2,9 @@
# -*- coding: utf-8 -*-
-from module.plugins.Container import Container
+from module.plugins.Crypter import Crypter
-class LinkList(Container):
+class LinkList(Crypter):
__name__ = "LinkList"
__version__ = "0.11"
__pattern__ = r".+\.txt$"
diff --git a/module/plugins/container/RSDF.py b/module/plugins/container/RSDF.py
index ea5cd67f2..cbc9864b1 100644
--- a/module/plugins/container/RSDF.py
+++ b/module/plugins/container/RSDF.py
@@ -5,9 +5,9 @@ import base64
import binascii
import re
-from module.plugins.Container import Container
+from module.plugins.Crypter import Crypter
-class RSDF(Container):
+class RSDF(Crypter):
__name__ = "RSDF"
__version__ = "0.21"
__pattern__ = r".*\.rsdf"
diff --git a/module/plugins/hooks/UpdateManager.py b/module/plugins/hooks/UpdateManager.py
index 4324a96ba..d0c7f213d 100644
--- a/module/plugins/hooks/UpdateManager.py
+++ b/module/plugins/hooks/UpdateManager.py
@@ -61,6 +61,11 @@ class UpdateManager(Hook):
@threaded
def periodical(self):
+
+ if self.core.version.endswith("-dev"):
+ self.logDebug("No update check performed on dev version.")
+ return
+
update = self.checkForUpdate()
if update:
self.info["pyload"] = True
diff --git a/module/remote/socketbackend/ttypes.py b/module/remote/socketbackend/ttypes.py
index 6589e5923..682b2b52a 100644
--- a/module/remote/socketbackend/ttypes.py
+++ b/module/remote/socketbackend/ttypes.py
@@ -207,13 +207,12 @@ class ServerStatus(BaseObject):
self.reconnect = reconnect
class ServiceCall(BaseObject):
- __slots__ = ['plugin', 'func', 'arguments', 'parseArguments']
+ __slots__ = ['plugin', 'func', 'arguments']
- def __init__(self, plugin=None, func=None, arguments=None, parseArguments=None):
+ def __init__(self, plugin=None, func=None, arguments=None):
self.plugin = plugin
self.func = func
self.arguments = arguments
- self.parseArguments = parseArguments
class ServiceDoesNotExists(Exception):
__slots__ = ['plugin', 'func']
@@ -238,10 +237,16 @@ class UserData(BaseObject):
self.permission = permission
self.templateName = templateName
+class UserDoesNotExists(Exception):
+ __slots__ = ['user']
+
+ def __init__(self, user=None):
+ self.user = user
+
class Iface:
def addFiles(self, pid, links):
pass
- def addPackage(self, name, links, dest):
+ def addPackage(self, name, links, dest, password):
pass
def call(self, info):
pass
diff --git a/module/remote/thriftbackend/pyload.thrift b/module/remote/thriftbackend/pyload.thrift
index 414a1ebf2..a6c0a259c 100644
--- a/module/remote/thriftbackend/pyload.thrift
+++ b/module/remote/thriftbackend/pyload.thrift
@@ -183,8 +183,7 @@ struct AccountInfo {
struct ServiceCall {
1: PluginName plugin,
2: string func,
- 3: optional list<string> arguments,
- 4: optional bool parseArguments, //default False
+ 3: string arguments, // empty string or json encoded list
}
struct OnlineStatus {
@@ -211,6 +210,10 @@ exception FileDoesNotExists{
1: FileID fid
}
+exception UserDoesNotExists{
+ 1: string user
+}
+
exception ServiceDoesNotExists{
1: string plugin
2: string func
@@ -271,7 +274,7 @@ service Pyload {
// downloads - adding/deleting
list<PackageID> generateAndAddPackages(1: LinkList links, 2: Destination dest),
- PackageID addPackage(1: string name, 2: LinkList links, 3: Destination dest),
+ PackageID addPackage(1: string name, 2: LinkList links, 3: Destination dest, 4: string password),
void addFiles(1: PackageID pid, 2: LinkList links),
void uploadContainer(1: string filename, 2: binary data),
void deleteFiles(1: list<FileID> fids),
@@ -305,7 +308,7 @@ service Pyload {
//auth
bool login(1: string username, 2: string password),
- UserData getUserData(1: string username, 2:string password),
+ UserData getUserData(1: string username, 2:string password) throws (1: UserDoesNotExists ex),
map<string, UserData> getAllUserData(),
//services
diff --git a/module/remote/thriftbackend/thriftgen/pyload/Pyload-remote b/module/remote/thriftbackend/thriftgen/pyload/Pyload-remote
index f8bcc2863..6ee40092d 100755
--- a/module/remote/thriftbackend/thriftgen/pyload/Pyload-remote
+++ b/module/remote/thriftbackend/thriftgen/pyload/Pyload-remote
@@ -57,7 +57,7 @@ if len(sys.argv) <= 1 or sys.argv[1] == '--help':
print ' getPackageOrder(Destination destination)'
print ' getFileOrder(PackageID pid)'
print ' generateAndAddPackages(LinkList links, Destination dest)'
- print ' PackageID addPackage(string name, LinkList links, Destination dest)'
+ print ' PackageID addPackage(string name, LinkList links, Destination dest, string password)'
print ' void addFiles(PackageID pid, LinkList links)'
print ' void uploadContainer(string filename, string data)'
print ' void deleteFiles( fids)'
@@ -350,10 +350,10 @@ elif cmd == 'generateAndAddPackages':
pp.pprint(client.generateAndAddPackages(eval(args[0]),eval(args[1]),))
elif cmd == 'addPackage':
- if len(args) != 3:
- print 'addPackage requires 3 args'
+ if len(args) != 4:
+ print 'addPackage requires 4 args'
sys.exit(1)
- pp.pprint(client.addPackage(args[0],eval(args[1]),eval(args[2]),))
+ pp.pprint(client.addPackage(args[0],eval(args[1]),eval(args[2]),args[3],))
elif cmd == 'addFiles':
if len(args) != 2:
diff --git a/module/remote/thriftbackend/thriftgen/pyload/Pyload.py b/module/remote/thriftbackend/thriftgen/pyload/Pyload.py
index 1e2f78b66..3328fb3fc 100644
--- a/module/remote/thriftbackend/thriftgen/pyload/Pyload.py
+++ b/module/remote/thriftbackend/thriftgen/pyload/Pyload.py
@@ -186,12 +186,13 @@ class Iface(object):
"""
pass
- def addPackage(self, name, links, dest):
+ def addPackage(self, name, links, dest, password):
"""
Parameters:
- name
- links
- dest
+ - password
"""
pass
@@ -1379,22 +1380,24 @@ class Client(Iface):
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "generateAndAddPackages failed: unknown result");
- def addPackage(self, name, links, dest):
+ def addPackage(self, name, links, dest, password):
"""
Parameters:
- name
- links
- dest
+ - password
"""
- self.send_addPackage(name, links, dest)
+ self.send_addPackage(name, links, dest, password)
return self.recv_addPackage()
- def send_addPackage(self, name, links, dest):
+ def send_addPackage(self, name, links, dest, password):
self._oprot.writeMessageBegin('addPackage', TMessageType.CALL, self._seqid)
args = addPackage_args()
args.name = name
args.links = links
args.dest = dest
+ args.password = password
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
@@ -2161,6 +2164,8 @@ class Client(Iface):
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
+ if result.ex is not None:
+ raise result.ex
raise TApplicationException(TApplicationException.MISSING_RESULT, "getUserData failed: unknown result");
def getAllUserData(self, ):
@@ -2929,7 +2934,7 @@ class Processor(Iface, TProcessor):
args.read(iprot)
iprot.readMessageEnd()
result = addPackage_result()
- result.success = self._handler.addPackage(args.name, args.links, args.dest)
+ result.success = self._handler.addPackage(args.name, args.links, args.dest, args.password)
oprot.writeMessageBegin("addPackage", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
@@ -3218,7 +3223,10 @@ class Processor(Iface, TProcessor):
args.read(iprot)
iprot.readMessageEnd()
result = getUserData_result()
- result.success = self._handler.getUserData(args.username, args.password)
+ try:
+ result.success = self._handler.getUserData(args.username, args.password)
+ except UserDoesNotExists, ex:
+ result.ex = ex
oprot.writeMessageBegin("getUserData", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
@@ -4421,12 +4429,14 @@ class addPackage_args(TBase):
- name
- links
- dest
+ - password
"""
__slots__ = [
'name',
'links',
'dest',
+ 'password',
]
thrift_spec = (
@@ -4434,12 +4444,14 @@ class addPackage_args(TBase):
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.LIST, 'links', (TType.STRING,None), None, ), # 2
(3, TType.I32, 'dest', None, None, ), # 3
+ (4, TType.STRING, 'password', None, None, ), # 4
)
- def __init__(self, name=None, links=None, dest=None,):
+ def __init__(self, name=None, links=None, dest=None, password=None,):
self.name = name
self.links = links
self.dest = dest
+ self.password = password
class addPackage_result(TBase):
@@ -5254,18 +5266,22 @@ class getUserData_result(TBase):
"""
Attributes:
- success
+ - ex
"""
__slots__ = [
'success',
+ 'ex',
]
thrift_spec = (
(0, TType.STRUCT, 'success', (UserData, UserData.thrift_spec), None, ), # 0
+ (1, TType.STRUCT, 'ex', (UserDoesNotExists, UserDoesNotExists.thrift_spec), None, ), # 1
)
- def __init__(self, success=None,):
+ def __init__(self, success=None, ex=None,):
self.success = success
+ self.ex = ex
class getAllUserData_args(TBase):
diff --git a/module/threads/BaseThread.py b/module/threads/BaseThread.py
new file mode 100644
index 000000000..b5856c856
--- /dev/null
+++ b/module/threads/BaseThread.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from threading import Thread
+from time import strftime, gmtime
+from sys import exc_info
+from types import MethodType
+from pprint import pformat
+from traceback import format_exc
+
+from module.utils.fs import listdir, join, save_join, stat
+
+class BaseThread(Thread):
+ """abstract base class for thread types"""
+
+ def __init__(self, manager):
+ """Constructor"""
+ Thread.__init__(self)
+ self.setDaemon(True)
+ self.m = manager #thread manager
+ self.log = manager.core.log
+
+
+ def writeDebugReport(self, pyfile):
+ """ writes a debug report to disk """
+
+ dump_name = "debug_%s_%s.zip" % (pyfile.pluginname, strftime("%d-%m-%Y_%H-%M-%S"))
+ dump = self.getDebugDump(pyfile)
+
+ try:
+ import zipfile
+
+ zip = zipfile.ZipFile(dump_name, "w")
+
+ for f in listdir(join("tmp", pyfile.pluginname)):
+ try:
+ # avoid encoding errors
+ zip.write(join("tmp", pyfile.pluginname, f), save_join(pyfile.pluginname, f))
+ except:
+ pass
+
+ info = zipfile.ZipInfo(save_join(pyfile.pluginname, "debug_Report.txt"), gmtime())
+ info.external_attr = 0644 << 16L # change permissions
+
+ zip.writestr(info, dump)
+ zip.close()
+
+ if not stat(dump_name).st_size:
+ raise Exception("Empty Zipfile")
+
+ except Exception, e:
+ self.log.debug("Error creating zip file: %s" % e)
+
+ dump_name = dump_name.replace(".zip", ".txt")
+ f = open(dump_name, "wb")
+ f.write(dump)
+ f.close()
+
+ self.log.info("Debug Report written to %s" % dump_name)
+
+ def getDebugDump(self, pyfile):
+ dump = "pyLoad %s Debug Report of %s %s \n\nTRACEBACK:\n %s \n\nFRAMESTACK:\n" % (
+ self.m.core.api.getServerVersion(), pyfile.pluginname, pyfile.plugin.__version__, format_exc())
+
+ tb = exc_info()[2]
+ stack = []
+ while tb:
+ stack.append(tb.tb_frame)
+ tb = tb.tb_next
+
+ for frame in stack[1:]:
+ dump += "\nFrame %s in %s at line %s\n" % (frame.f_code.co_name,
+ frame.f_code.co_filename,
+ frame.f_lineno)
+
+ for key, value in frame.f_locals.items():
+ dump += "\t%20s = " % key
+ try:
+ dump += pformat(value) + "\n"
+ except Exception, e:
+ dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
+
+ del frame
+
+ del stack #delete it just to be sure...
+
+ dump += "\n\nPLUGIN OBJECT DUMP: \n\n"
+
+ for name in dir(pyfile.plugin):
+ attr = getattr(pyfile.plugin, name)
+ if not name.endswith("__") and type(attr) != MethodType:
+ dump += "\t%20s = " % name
+ try:
+ dump += pformat(attr) + "\n"
+ except Exception, e:
+ dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
+
+ dump += "\nPYFILE OBJECT DUMP: \n\n"
+
+ for name in dir(pyfile):
+ attr = getattr(pyfile, name)
+ if not name.endswith("__") and type(attr) != MethodType:
+ dump += "\t%20s = " % name
+ try:
+ dump += pformat(attr) + "\n"
+ except Exception, e:
+ dump += "<ERROR WHILE PRINTING VALUE> " + str(e) + "\n"
+
+ dump += "\n\nCONFIG: \n\n"
+ dump += pformat(self.m.core.config.values) + "\n"
+
+ return dump
+
+ def clean(self, pyfile):
+ """ set thread unactive and release pyfile """
+ self.active = False
+ pyfile.release()
diff --git a/module/threads/DecrypterThread.py b/module/threads/DecrypterThread.py
new file mode 100644
index 000000000..5ce59a65e
--- /dev/null
+++ b/module/threads/DecrypterThread.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from BaseThread import BaseThread
+
+class DecrypterThread(BaseThread):
+ """thread for decrypting"""
+
+ def __init__(self, manager, data, package):
+ """constructor"""
+ BaseThread.__init__(self, manager)
+ self.queue = data
+ self.package = package
+
+ self.m.log.debug("Starting Decrypt thread")
+
+ self.start()
+
+ def add(self, data):
+ self.queue.extend(data)
+
+ def run(self):
+ plugin_map = {}
+ for plugin, url in self.queue:
+ if plugin in plugin_map:
+ plugin_map[plugin].append(url)
+ else:
+ plugin_map[plugin] = [url]
+
+
+ self.decrypt(plugin_map)
+
+ def decrypt(self, plugin_map):
+ for name, urls in plugin_map.iteritems():
+ p = self.m.core.pluginManager.loadClass("crypter", name)
diff --git a/module/threads/DownloadThread.py b/module/threads/DownloadThread.py
new file mode 100644
index 000000000..3d444686b
--- /dev/null
+++ b/module/threads/DownloadThread.py
@@ -0,0 +1,215 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License,
+ or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, see <http://www.gnu.org/licenses/>.
+
+ @author: RaNaN
+"""
+
+from Queue import Queue
+from time import sleep, time
+from traceback import print_exc
+from sys import exc_clear
+from pycurl import error
+
+from module.plugins.Base import Fail, Retry
+from module.plugins.Hoster import Abort, Reconnect, SkipDownload
+
+from BaseThread import BaseThread
+
+class DownloadThread(BaseThread):
+ """thread for downloading files from 'real' hoster plugins"""
+
+ def __init__(self, manager):
+ """Constructor"""
+ BaseThread.__init__(self, manager)
+
+ self.queue = Queue() # job queue
+ self.active = False
+
+ self.start()
+
+ def run(self):
+ """run method"""
+ pyfile = None
+
+ while True:
+ del pyfile
+ self.active = self.queue.get()
+ pyfile = self.active
+
+ if self.active == "quit":
+ self.active = False
+ self.m.threads.remove(self)
+ return True
+
+ try:
+ if not pyfile.hasPlugin(): continue
+ #this pyfile was deleted while queueing
+
+ pyfile.plugin.checkForSameFiles(starting=True)
+ self.m.log.info(_("Download starts: %s" % pyfile.name))
+
+ # start download
+ self.m.core.hookManager.downloadPreparing(pyfile)
+ pyfile.plugin.preprocessing(self)
+
+ self.m.log.info(_("Download finished: %s") % pyfile.name)
+ self.m.core.hookManager.downloadFinished(pyfile)
+ self.m.core.files.checkPackageFinished(pyfile)
+
+ except NotImplementedError:
+ self.m.log.error(_("Plugin %s is missing a function.") % pyfile.pluginname)
+ pyfile.setStatus("failed")
+ pyfile.error = "Plugin does not work"
+ self.clean(pyfile)
+ continue
+
+ except Abort:
+ try:
+ self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ except:
+ pass
+
+ pyfile.setStatus("aborted")
+
+ self.clean(pyfile)
+ continue
+
+ except Reconnect:
+ self.queue.put(pyfile)
+ #pyfile.req.clearCookies()
+
+ while self.m.reconnecting.isSet():
+ sleep(0.5)
+
+ continue
+
+ except Retry, e:
+ reason = e.args[0]
+ self.m.log.info(_("Download restarted: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": reason})
+ self.queue.put(pyfile)
+ continue
+
+ except Fail, e:
+ msg = e.args[0]
+
+ if msg == "offline":
+ pyfile.setStatus("offline")
+ self.m.log.warning(_("Download is offline: %s") % pyfile.name)
+ elif msg == "temp. offline":
+ pyfile.setStatus("temp. offline")
+ self.m.log.warning(_("Download is temporary offline: %s") % pyfile.name)
+ else:
+ pyfile.setStatus("failed")
+ self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": msg})
+ pyfile.error = msg
+
+ self.m.core.hookManager.downloadFailed(pyfile)
+ self.clean(pyfile)
+ continue
+
+ except error, e:
+ if len(e.args) == 2:
+ code, msg = e.args
+ else:
+ code = 0
+ msg = e.args
+
+ self.m.log.debug("pycurl exception %s: %s" % (code, msg))
+
+ if code in (7, 18, 28, 52, 56):
+ self.m.log.warning(_("Couldn't connect to host or connection reset, waiting 1 minute and retry."))
+ wait = time() + 60
+
+ pyfile.waitUntil = wait
+ pyfile.setStatus("waiting")
+ while time() < wait:
+ sleep(1)
+ if pyfile.abort:
+ break
+
+ if pyfile.abort:
+ self.m.log.info(_("Download aborted: %s") % pyfile.name)
+ pyfile.setStatus("aborted")
+
+ self.clean(pyfile)
+ else:
+ self.queue.put(pyfile)
+
+ continue
+
+ else:
+ pyfile.setStatus("failed")
+ self.m.log.error("pycurl error %s: %s" % (code, msg))
+ if self.m.core.debug:
+ print_exc()
+ self.writeDebugReport(pyfile)
+
+ self.m.core.hookManager.downloadFailed(pyfile)
+
+ self.clean(pyfile)
+ continue
+
+ except SkipDownload, e:
+ pyfile.setStatus("skipped")
+
+ self.m.log.info(
+ _("Download skipped: %(name)s due to %(plugin)s") % {"name": pyfile.name, "plugin": e.message})
+
+ self.clean(pyfile)
+
+ self.m.core.files.checkPackageFinished(pyfile)
+
+ self.active = False
+ self.m.core.files.save()
+
+ continue
+
+
+ except Exception, e:
+ pyfile.setStatus("failed")
+ self.m.log.warning(_("Download failed: %(name)s | %(msg)s") % {"name": pyfile.name, "msg": str(e)})
+ pyfile.error = str(e)
+
+ if self.m.core.debug:
+ print_exc()
+ self.writeDebugReport(pyfile)
+
+ self.m.core.hookManager.downloadFailed(pyfile)
+ self.clean(pyfile)
+ continue
+
+ finally:
+ self.m.core.files.save()
+ pyfile.checkIfProcessed()
+ exc_clear()
+
+
+ #pyfile.plugin.req.clean()
+
+ self.active = False
+ pyfile.finishIfDone()
+ self.m.core.files.save()
+
+
+ def put(self, job):
+ """assing job to thread"""
+ self.queue.put(job)
+
+
+ def stop(self):
+ """stops the thread"""
+ self.put("quit") \ No newline at end of file
diff --git a/module/threads/HookThread.py b/module/threads/HookThread.py
new file mode 100644
index 000000000..fe4a2a651
--- /dev/null
+++ b/module/threads/HookThread.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from copy import copy
+
+from BaseThread import BaseThread
+
+class HookThread(BaseThread):
+ """thread for hooks"""
+
+ def __init__(self, m, function, args, kwargs):
+ """Constructor"""
+ BaseThread.__init__(self, m)
+
+ self.f = function
+ self.args = args
+ self.kwargs = kwargs
+
+ self.active = []
+
+ m.localThreads.append(self)
+
+ self.start()
+
+ def getActiveFiles(self):
+ return self.active
+
+ def addActive(self, pyfile):
+ """ Adds a pyfile to active list and thus will be displayed on overview"""
+ if pyfile not in self.active:
+ self.active.append(pyfile)
+
+ def finishFile(self, pyfile):
+ if pyfile in self.active:
+ self.active.remove(pyfile)
+
+ pyfile.finishIfDone()
+
+ def run(self):
+ try:
+ try:
+ self.kwargs["thread"] = self
+ self.f(*self.args, **self.kwargs)
+ except TypeError, e:
+ #dirty method to filter out exceptions
+ if "unexpected keyword argument 'thread'" not in e.args[0]:
+ raise
+
+ del self.kwargs["thread"]
+ self.f(*self.args, **self.kwargs)
+ finally:
+ local = copy(self.active)
+ for x in local:
+ self.finishFile(x)
+
+ self.m.localThreads.remove(self) \ No newline at end of file
diff --git a/module/threads/InfoThread.py b/module/threads/InfoThread.py
new file mode 100644
index 000000000..4cba7da38
--- /dev/null
+++ b/module/threads/InfoThread.py
@@ -0,0 +1,215 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from time import time
+from traceback import print_exc
+
+from module.Api import OnlineStatus
+from module.PyFile import PyFile
+from module.common.packagetools import parseNames
+
+from BaseThread import BaseThread
+
+class InfoThread(BaseThread):
+ def __init__(self, manager, data, pid=-1, rid=-1, add=False):
+ """Constructor"""
+ BaseThread.__init__(self, manager)
+
+ self.data = data
+ self.pid = pid # package id
+ # [ .. (name, plugin) .. ]
+
+ self.rid = rid #result id
+ self.add = add #add packages instead of return result
+
+ self.cache = [] #accumulated data
+
+ self.start()
+
+ def run(self):
+ """run method"""
+
+ plugins = {}
+ container = []
+
+ for url, plugin in self.data:
+ if plugin in plugins:
+ plugins[plugin].append(url)
+ else:
+ plugins[plugin] = [url]
+
+
+ # filter out container plugins
+ for name in self.m.core.pluginManager.getPlugins("container"):
+ if name in plugins:
+ container.extend([(name, url) for url in plugins[name]])
+
+ del plugins[name]
+
+ #directly write to database
+ if self.pid > -1:
+ for pluginname, urls in plugins.iteritems():
+ plugin = self.m.core.pluginManager.getPlugin(pluginname, True)
+ if hasattr(plugin, "getInfo"):
+ self.fetchForPlugin(pluginname, plugin, urls, self.updateDB)
+ self.m.core.files.save()
+
+ elif self.add:
+ for pluginname, urls in plugins.iteritems():
+ plugin = self.m.core.pluginManager.getPlugin(pluginname, True)
+ if hasattr(plugin, "getInfo"):
+ self.fetchForPlugin(pluginname, plugin, urls, self.updateCache, True)
+
+ else:
+ #generate default result
+ result = [(url, 0, 3, url) for url in urls]
+
+ self.updateCache(pluginname, result)
+
+ packs = parseNames([(name, url) for name, x, y, url in self.cache])
+
+ self.m.log.debug("Fetched and generated %d packages" % len(packs))
+
+ for k, v in packs:
+ self.m.core.api.addPackage(k, v)
+
+ #empty cache
+ del self.cache[:]
+
+ else: #post the results
+
+
+ for name, url in container:
+ #attach container content
+ try:
+ data = self.decryptContainer(name, url)
+ except:
+ print_exc()
+ self.m.log.error("Could not decrypt container.")
+ data = []
+
+ for url, plugin in data:
+ if plugin in plugins:
+ plugins[plugin].append(url)
+ else:
+ plugins[plugin] = [url]
+
+ self.m.infoResults[self.rid] = {}
+
+ for pluginname, urls in plugins.iteritems():
+ plugin = self.m.core.pluginManager.getPlugin(pluginname, True)
+ if hasattr(plugin, "getInfo"):
+ self.fetchForPlugin(pluginname, plugin, urls, self.updateResult, True)
+
+ #force to process cache
+ if self.cache:
+ self.updateResult(pluginname, [], True)
+
+ else:
+ #generate default result
+ result = [(url, 0, 3, url) for url in urls]
+
+ self.updateResult(pluginname, result, True)
+
+ self.m.infoResults[self.rid]["ALL_INFO_FETCHED"] = {}
+
+ self.m.timestamp = time() + 5 * 60
+
+
+ def updateDB(self, plugin, result):
+ self.m.core.files.updateFileInfo(result, self.pid)
+
+ def updateResult(self, plugin, result, force=False):
+ #parse package name and generate result
+ #accumulate results
+
+ self.cache.extend(result)
+
+ if len(self.cache) >= 20 or force:
+ #used for package generating
+ tmp = [(name, (url, OnlineStatus(name, plugin, "unknown", status, int(size))))
+ for name, size, status, url in self.cache]
+
+ data = parseNames(tmp)
+ result = {}
+ for k, v in data.iteritems():
+ for url, status in v:
+ status.packagename = k
+ result[url] = status
+
+ self.m.setInfoResults(self.rid, result)
+
+ self.cache = []
+
+ def updateCache(self, plugin, result):
+ self.cache.extend(result)
+
+ def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None):
+ try:
+ result = [] #result loaded from cache
+ process = [] #urls to process
+ for url in urls:
+ if url in self.m.infoCache:
+ result.append(self.m.infoCache[url])
+ else:
+ process.append(url)
+
+ if result:
+ self.m.log.debug("Fetched %d values from cache for %s" % (len(result), pluginname))
+ cb(pluginname, result)
+
+ if process:
+ self.m.log.debug("Run Info Fetching for %s" % pluginname)
+ for result in plugin.getInfo(process):
+ #result = [ .. (name, size, status, url) .. ]
+ if not type(result) == list: result = [result]
+
+ for res in result:
+ self.m.infoCache[res[3]] = res
+
+ cb(pluginname, result)
+
+ self.m.log.debug("Finished Info Fetching for %s" % pluginname)
+ except Exception, e:
+ self.m.log.warning(_("Info Fetching for %(name)s failed | %(err)s") %
+ {"name": pluginname, "err": str(e)})
+ if self.m.core.debug:
+ print_exc()
+
+ # generate default results
+ if err:
+ result = [(url, 0, 3, url) for url in urls]
+ cb(pluginname, result)
+
+
+ def decryptContainer(self, plugin, url):
+ data = []
+ # only works on container plugins
+
+ self.m.log.debug("Pre decrypting %s with %s" % (url, plugin))
+
+ # dummy pyfile
+ pyfile = PyFile(self.m.core.files, -1, url, url, 0, 0, "", plugin, -1, -1)
+
+ pyfile.initPlugin()
+
+ # little plugin lifecycle
+ try:
+ pyfile.plugin.setup()
+ pyfile.plugin.loadToDisk()
+ pyfile.plugin.decrypt(pyfile)
+ pyfile.plugin.deleteTmp()
+
+ for pack in pyfile.plugin.packages:
+ pyfile.plugin.urls.extend(pack[1])
+
+ data, crypter = self.m.core.pluginManager.parseUrls(pyfile.plugin.urls)
+
+ self.m.log.debug("Got %d links." % len(data))
+
+ except Exception, e:
+ self.m.log.debug("Pre decrypting error: %s" % str(e))
+ finally:
+ pyfile.release()
+
+ return data
diff --git a/module/ThreadManager.py b/module/threads/ThreadManager.py
index 033d80fdc..c32286eb9 100644
--- a/module/ThreadManager.py
+++ b/module/threads/ThreadManager.py
@@ -28,11 +28,14 @@ from random import choice
import pycurl
-import PluginThread
from module.PyFile import PyFile
from module.network.RequestFactory import getURL
-from module.utils import freeSpace, lock
+from module.utils import lock
+from module.utils.fs import free_space
+from DecrypterThread import DecrypterThread
+from DownloadThread import DownloadThread
+from InfoThread import InfoThread
class ThreadManager:
"""manages the download threads, assign jobs, reconnect etc"""
@@ -63,7 +66,7 @@ class ThreadManager:
# threads which are fetching hoster results
self.infoResults = {}
- #timeout for cache purge
+ # timeout for cache purge
self.timestamp = 0
pycurl.global_init(pycurl.GLOBAL_DEFAULT)
@@ -75,17 +78,14 @@ class ThreadManager:
def createThread(self):
"""create a download thread"""
- thread = PluginThread.DownloadThread(self)
+ thread = DownloadThread(self)
self.threads.append(thread)
def createInfoThread(self, data, pid):
- """
- start a thread whichs fetches online status and other infos
- data = [ .. () .. ]
- """
+ """ start a thread whichs fetches online status and other infos """
self.timestamp = time() + 5 * 60
- PluginThread.InfoThread(self, data, pid)
+ InfoThread(self, data, pid)
@lock
def createResultThread(self, data, add=False):
@@ -95,10 +95,15 @@ class ThreadManager:
rid = self.resultIDs
self.resultIDs += 1
- PluginThread.InfoThread(self, data, rid=rid, add=add)
+ InfoThread(self, data, rid=rid, add=add)
return rid
+ @lock
+ def createDecryptThread(self, data, pid):
+ """ Start decrypting of entered data, all links in one package are accumulated to one thread."""
+ DecrypterThread(self, data, pid)
+
@lock
def getInfoResult(self, rid):
@@ -156,7 +161,6 @@ class ThreadManager:
self.infoResults.clear()
self.log.debug("Cleared Result cache")
- #----------------------------------------------------------------------
def tryReconnect(self):
"""checks if reconnect needed"""
@@ -227,7 +231,6 @@ class ThreadManager:
return ip
- #----------------------------------------------------------------------
def checkThreadCount(self):
"""checks if there are need for increasing or reducing thread count"""
@@ -251,7 +254,7 @@ class ThreadManager:
self.log.debug("Cleaned up pycurl")
return True
- #----------------------------------------------------------------------
+
def assignJob(self):
"""assing a job to a thread if possible"""
@@ -264,7 +267,7 @@ class ThreadManager:
inuse = set([(x.active.pluginname,self.getLimit(x)) for x in self.threads if x.active and x.active.hasPlugin() and x.active.plugin.account])
inuse = map(lambda x : (x[0], x[1], len([y for y in self.threads if y.active and y.active.pluginname == x[0]])) ,inuse)
- onlimit = [x[0] for x in inuse if x[1] > 0 and x[2] >= x[1]]
+ onlimit = [x[0] for x in inuse if 0 < x[1] <= x[2]]
occ = [x.active.pluginname for x in self.threads if x.active and x.active.hasPlugin() and not x.active.plugin.multiDL] + onlimit
@@ -282,38 +285,27 @@ class ThreadManager:
job.release()
return
- if job.plugin.__type__ == "hoster":
- spaceLeft = freeSpace(self.core.config["general"]["download_folder"]) / 1024 / 1024
- if spaceLeft < self.core.config["general"]["min_free_space"]:
- self.log.warning(_("Not enough space left on device"))
- self.pause = True
-
- if free and not self.pause:
- thread = free[0]
- #self.downloaded += 1
-
- thread.put(job)
- else:
- #put job back
- if occ not in self.core.files.jobCache:
- self.core.files.jobCache[occ] = []
- self.core.files.jobCache[occ].append(job.id)
-
- #check for decrypt jobs
- job = self.core.files.getDecryptJob()
- if job:
- job.initPlugin()
- thread = PluginThread.DecrypterThread(self, job)
-
+ spaceLeft = free_space(self.core.config["general"]["download_folder"]) / 1024 / 1024
+ if spaceLeft < self.core.config["general"]["min_free_space"]:
+ self.log.warning(_("Not enough space left on device"))
+ self.pause = True
+ if free and not self.pause:
+ thread = free[0]
+ #self.downloaded += 1
+ thread.put(job)
else:
- thread = PluginThread.DecrypterThread(self, job)
+ #put job back
+ if occ not in self.core.files.jobCache:
+ self.core.files.jobCache[occ] = []
+ self.core.files.jobCache[occ].append(job.id)
def getLimit(self, thread):
limit = thread.active.plugin.account.options.get("limitDL","0")
if limit == "": limit = "0"
return int(limit)
+
def cleanup(self):
"""do global cleanup, should be called when finished with pycurl"""
pycurl.global_cleanup()
diff --git a/module/threads/__init__.py b/module/threads/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/module/threads/__init__.py
diff --git a/module/unescape.py b/module/unescape.py
deleted file mode 100644
index d8999e077..000000000
--- a/module/unescape.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from module.utils import html_unescape
-#deprecated
-unescape = html_unescape \ No newline at end of file
diff --git a/module/Utils.py b/module/utils/__init__.py
index 86fd67558..0d68448cb 100644
--- a/module/Utils.py
+++ b/module/utils/__init__.py
@@ -3,21 +3,12 @@
""" Store all usefull functions here """
import os
-import sys
import time
import re
-from os.path import join
from string import maketrans
from itertools import islice
from htmlentitydefs import name2codepoint
-def chmod(*args):
- try:
- os.chmod(*args)
- except:
- pass
-
-
def decode(string):
""" decode string with utf if possible """
try:
@@ -28,7 +19,6 @@ def decode(string):
except:
return string
-
def remove_chars(string, repl):
""" removes all chars in repl from string"""
if type(string) == str:
@@ -37,34 +27,6 @@ def remove_chars(string, repl):
return string.translate(dict([(ord(s), None) for s in repl]))
-def save_path(name):
- #remove some chars
- if os.name == 'nt':
- return remove_chars(name, '/\\?%*:|"<>')
- else:
- return remove_chars(name, '/\\"')
-
-
-def save_join(*args):
- """ joins a path, encoding aware """
- return fs_encode(join(*[x if type(x) == unicode else decode(x) for x in args]))
-
-
-# File System Encoding functions:
-# Use fs_encode before accesing files on disk, it will encode the string properly
-
-if sys.getfilesystemencoding().startswith('ANSI'):
- def fs_encode(string):
- try:
- string = string.encode('utf-8')
- finally:
- return string
-
- fs_decode = decode #decode utf8
-
-else:
- fs_encode = fs_decode = lambda x: x # do nothing
-
def get_console_encoding(enc):
if os.name == "nt":
if enc == "cp65001": # aka UTF-8
@@ -87,6 +49,8 @@ def compare_time(start, end):
elif start < now > end < start: return True
else: return False
+def to_list(value):
+ return value if type(value) == list else [value]
def formatSize(size):
"""formats size of bytes"""
@@ -104,19 +68,8 @@ def formatSpeed(speed):
def freeSpace(folder):
- folder = fs_encode(folder)
-
- if os.name == "nt":
- import ctypes
-
- free_bytes = ctypes.c_ulonglong(0)
- ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(folder), None, None, ctypes.pointer(free_bytes))
- return free_bytes.value
- else:
- from os import statvfs
-
- s = statvfs(folder)
- return s.f_bsize * s.f_bavail
+ print "Deprecated freeSpace"
+ return free_space(folder)
def uniqify(seq, idfun=None):
@@ -212,3 +165,7 @@ if __name__ == "__main__":
print freeSpace(".")
print remove_chars("ab'cdgdsf''ds'", "'ghd")
+
+
+# TODO: Legacy import
+from fs import chmod, save_path, save_join, fs_decode, fs_encode, free_space \ No newline at end of file
diff --git a/module/utils/fs.py b/module/utils/fs.py
new file mode 100644
index 000000000..23f87a326
--- /dev/null
+++ b/module/utils/fs.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+from os.path import join
+from . import decode, remove_chars
+
+# File System Encoding functions:
+# Use fs_encode before accesing files on disk, it will encode the string properly
+
+if sys.getfilesystemencoding().startswith('ANSI'):
+ def fs_encode(string):
+ if type(string) == unicode:
+ return string.encode('utf8')
+
+ fs_decode = decode #decode utf8
+
+else:
+ fs_encode = fs_decode = lambda x: x # do nothing
+
+# FS utilities
+def chmod(path, mode):
+ return os.chmod(fs_encode(path), mode)
+
+def chown(path, uid, gid):
+ return os.chown(fs_encode(path), uid, gid)
+
+def remove(path):
+ return os.remove(fs_encode(path))
+
+def exists(path):
+ return os.path.exists(fs_encode(path))
+
+def makedirs(path, mode=0660):
+ return os.makedirs(fs_encode(path), mode)
+
+def listdir(path):
+ return os.listdir(fs_encode(path))
+
+def save_path(name):
+ #remove some chars
+ if os.name == 'nt':
+ return remove_chars(name, '/\\?%*:|"<>')
+ else:
+ return remove_chars(name, '/\\"')
+
+def stat(name):
+ return os.stat(fs_encode(name))
+
+def save_join(*args):
+ """ joins a path, encoding aware """
+ return fs_encode(join(*[x if type(x) == unicode else decode(x) for x in args]))
+
+def free_space(folder):
+ folder = fs_encode(folder)
+
+ if os.name == "nt":
+ import ctypes
+
+ free_bytes = ctypes.c_ulonglong(0)
+ ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(folder), None, None, ctypes.pointer(free_bytes))
+ return free_bytes.value
+ else:
+ from os import statvfs
+
+ s = statvfs(folder)
+ return s.f_bsize * s.f_bavail \ No newline at end of file
diff --git a/module/web/json_app.py b/module/web/json_app.py
index e02aa0707..5acafe153 100644
--- a/module/web/json_app.py
+++ b/module/web/json_app.py
@@ -179,11 +179,7 @@ def add_package():
links = map(lambda x: x.strip(), links)
links = filter(lambda x: x != "", links)
- pack = PYLOAD.addPackage(name, links, queue)
- if pw:
- pw = pw.decode("utf8", "ignore")
- data = {"password": pw}
- PYLOAD.setPackageData(pack, data)
+ PYLOAD.addPackage(name, links, queue, pw.decode("utf8", "ignore"))
@route("/json/move_package/<dest:int>/<id:int>")
diff --git a/pyLoadCore.py b/pyLoadCore.py
index b8856accf..b5b4add9e 100755
--- a/pyLoadCore.py
+++ b/pyLoadCore.py
@@ -29,8 +29,7 @@ from imp import find_module
import logging
import logging.handlers
import os
-from os import _exit, execl, getcwd, makedirs, remove, sep, walk, chdir, close
-from os.path import exists, join
+from os import _exit, execl, getcwd, remove, walk, chdir, close
import signal
import sys
from sys import argv, executable, exit
@@ -58,7 +57,8 @@ from module.remote.RemoteManager import RemoteManager
from module.database import DatabaseBackend, FileHandler
import module.common.pylgettext as gettext
-from module.utils import freeSpace, formatSize, get_console_encoding, fs_encode
+from module.utils import formatSize, get_console_encoding
+from module.utils.fs import free_space, exists, makedirs, join
from codecs import getwriter
@@ -376,7 +376,7 @@ class Core(object):
# later imported because they would trigger api import, and remote value not set correctly
from module import Api
from module.HookManager import HookManager
- from module.ThreadManager import ThreadManager
+ from module.threads.ThreadManager import ThreadManager
if Api.activated != self.remote:
self.log.warning("Import error: API remote status not correct.")
@@ -387,13 +387,15 @@ class Core(object):
#hell yeah, so many important managers :D
self.pluginManager = PluginManager(self)
- self.pullManager = EventManager(self)
+ self.eventManager = EventManager(self)
self.accountManager = AccountManager(self)
self.threadManager = ThreadManager(self)
self.captchaManager = CaptchaManager(self)
self.hookManager = HookManager(self)
self.remoteManager = RemoteManager(self)
+ self.files.ev = self.eventManager
+
self.js = JsEngine()
self.log.info(_("Downloadtime: %s") % self.api.isTimeDownload())
@@ -404,12 +406,12 @@ class Core(object):
if web:
self.init_webserver()
- dl_folder = fs_encode(self.config["general"]["download_folder"])
+ dl_folder = self.config["general"]["download_folder"]
if not exists(dl_folder):
makedirs(dl_folder)
- spaceLeft = freeSpace(dl_folder)
+ spaceLeft = free_space(dl_folder)
self.log.info(_("Free space: %s") % formatSize(spaceLeft))