From 6750a9481f44c55252d72b3c791f5efbcaeae71c Mon Sep 17 00:00:00 2001 From: RaNaN Date: Thu, 10 Feb 2011 17:53:25 +0100 Subject: cleanup --- module/DatabaseBackend.py | 324 ------- module/FileDatabase.py | 882 ------------------- module/PluginThread.py | 2 +- module/StorageDatabase.py | 49 -- module/UserDatabase.py | 72 -- module/database/DatabaseBackend.py | 324 +++++++ module/database/FileDatabase.py | 879 +++++++++++++++++++ module/database/StorageDatabase.py | 49 ++ module/database/UserDatabase.py | 72 ++ module/lib/thrift/protocol/fastbinary.c | 1203 -------------------------- module/remote/pyload.thrift | 26 +- module/remote/thriftgen/pyload/Pyload-remote | 2 +- module/remote/thriftgen/pyload/Pyload.py | 10 +- module/remote/thriftgen/pyload/ttypes.py | 78 +- module/web/ajax/__init__.py | 0 module/web/ajax/models.py | 2 - module/web/ajax/tests.py | 23 - module/web/ajax/urls.py | 36 - module/web/ajax/views.py | 321 ------- module/web/cnl/__init__.py | 0 module/web/cnl/models.py | 3 - module/web/cnl/tests.py | 23 - module/web/cnl/urls.py | 21 - module/web/cnl/views.py | 166 ---- module/web/pyload/__init__.py | 0 module/web/pyload/admin.py | 15 - module/web/pyload/models.py | 31 - module/web/pyload/templatetags/__init__.py | 0 module/web/pyload/templatetags/contains.py | 14 - module/web/pyload/templatetags/quotepath.py | 68 -- module/web/pyload/templatetags/token.py | 17 - module/web/pyload/templatetags/truncate.py | 13 - module/web/pyload/tests.py | 23 - module/web/pyload/urls.py | 31 - module/web/pyload/views.py | 492 ----------- module/web/pyload_app.py | 47 +- 36 files changed, 1376 insertions(+), 3942 deletions(-) delete mode 100644 module/DatabaseBackend.py delete mode 100644 module/FileDatabase.py delete mode 100644 module/StorageDatabase.py delete mode 100644 module/UserDatabase.py create mode 100644 module/database/DatabaseBackend.py create mode 100644 module/database/FileDatabase.py create mode 100644 module/database/StorageDatabase.py create mode 100644 module/database/UserDatabase.py delete mode 100644 module/lib/thrift/protocol/fastbinary.c delete mode 100644 module/web/ajax/__init__.py delete mode 100644 module/web/ajax/models.py delete mode 100644 module/web/ajax/tests.py delete mode 100644 module/web/ajax/urls.py delete mode 100644 module/web/ajax/views.py delete mode 100644 module/web/cnl/__init__.py delete mode 100644 module/web/cnl/models.py delete mode 100644 module/web/cnl/tests.py delete mode 100644 module/web/cnl/urls.py delete mode 100644 module/web/cnl/views.py delete mode 100644 module/web/pyload/__init__.py delete mode 100644 module/web/pyload/admin.py delete mode 100644 module/web/pyload/models.py delete mode 100644 module/web/pyload/templatetags/__init__.py delete mode 100644 module/web/pyload/templatetags/contains.py delete mode 100644 module/web/pyload/templatetags/quotepath.py delete mode 100644 module/web/pyload/templatetags/token.py delete mode 100644 module/web/pyload/templatetags/truncate.py delete mode 100644 module/web/pyload/tests.py delete mode 100644 module/web/pyload/urls.py delete mode 100644 module/web/pyload/views.py (limited to 'module') diff --git a/module/DatabaseBackend.py b/module/DatabaseBackend.py deleted file mode 100644 index 1c40f270b..000000000 --- a/module/DatabaseBackend.py +++ /dev/null @@ -1,324 +0,0 @@ -#!/usr/bin/env python -""" - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 3 of the License, - or (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - See the GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, see . - - @author: RaNaN - @author: mkaay -""" - -from threading import Lock -from threading import Thread -from threading import Event -from os import remove -from os.path import exists -from shutil import move - -from Queue import Queue -from traceback import print_exc - -from utils import chmod - -try: - from pysqlite2 import dbapi2 as sqlite3 -except: - import sqlite3 - -DB_VERSION = 4 - -class style(): - db = None - - @classmethod - def setDB(cls, db): - cls.db = db - - @classmethod - def inner(cls, f): - @staticmethod - def x(*args, **kwargs): - if cls.db: - return f(cls.db, *args, **kwargs) - return x - - @classmethod - def queue(cls, f): - @staticmethod - def x(*args, **kwargs): - if cls.db: - return cls.db.queue(f, *args, **kwargs) - return x - - @classmethod - def async(cls, f): - @staticmethod - def x(*args, **kwargs): - if cls.db: - return cls.db.async(f, *args, **kwargs) - return x - -class DatabaseJob(): - def __init__(self, f, *args, **kwargs): - self.done = Event() - - self.f = f - self.args = args - self.kwargs = kwargs - - self.result = None - self.exception = False - - def processJob(self): - try: - self.result = self.f(*self.args, **self.kwargs) - except Exception, e: - print "Database Error @", self.f.__name__, self.args[1:], self.kwargs, e - print_exc() - self.exception = e - self.done.set() - - def wait(self): - self.done.wait() - -class DatabaseBackend(Thread): - subs = [] - def __init__(self, core): - Thread.__init__(self) - self.setDaemon(True) - self.core = core - - self.transactionLock = Lock() - self.jobs = Queue() - - self.setuplock = Event() - - style.setDB(self) - - def setup(self): - self.start() - self.setuplock.wait() - - def run(self): - """main loop, which executes commands""" - convert = self._checkVersion() #returns None or current version - - self.conn = sqlite3.connect("files.db") - chmod("files.db", 0600) - - self.c = self.conn.cursor() #compatibility - - if convert is not None: - self._convertDB(convert) - - self._createTables() - self.conn.commit() - - self.setuplock.set() - - while True: - j = self.jobs.get() - self.transactionLock.acquire() - if j == "quit": - self.c.close() - self.conn.close() - self.transactionLock.release() - break - j.processJob() - if j.exception: - self.conn.rollback() - else: - self.conn.commit() - self.transactionLock.release() - - @style.queue - def shutdown(self): - self.conn.commit() - self.jobs.put("quit") - - def _checkVersion(self): - """ check db version and delete it if needed""" - if not exists("files.version"): - f = open("files.version", "wb") - f.write(str(DB_VERSION)) - f.close() - return - - f = open("files.version", "rb") - v = int(f.read().strip()) - f.close() - if v < DB_VERSION: - if v < 2: - try: - self.manager.core.log.warning(_("Filedatabase was deleted due to incompatible version.")) - except: - print "Filedatabase was deleted due to incompatible version." - remove("files.version") - move("files.db", "files.backup.db") - f = open("files.version", "wb") - f.write(str(DB_VERSION)) - f.close() - return v - - def _convertDB(self, v): - try: - getattr(self, "_convertV%i" % v)() - except: - try: - self.core.log.error(_("Filedatabase could NOT be converted.")) - except: - print "Filedatabase could NOT be converted." - - #--convert scripts start - - def _convertV2(self): - self.c.execute('CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")') - try: - self.manager.core.log.info(_("Database was converted from v2 to v3.")) - except: - print "Database was converted from v2 to v3." - self._convertV3() - - def _convertV3(self): - self.c.execute('CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)') - try: - self.manager.core.log.info(_("Database was converted from v3 to v4.")) - except: - print "Database was converted from v3 to v4." - - #--convert scripts end - - def _createTables(self): - """create tables for database""" - - self.c.execute('CREATE TABLE IF NOT EXISTS "packages" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "folder" TEXT, "password" TEXT DEFAULT "", "site" TEXT DEFAULT "", "queue" INTEGER DEFAULT 0 NOT NULL, "packageorder" INTEGER DEFAULT 0 NOT NULL, "priority" INTEGER DEFAULT 0 NOT NULL)') - self.c.execute('CREATE TABLE IF NOT EXISTS "links" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "url" TEXT NOT NULL, "name" TEXT, "size" INTEGER DEFAULT 0 NOT NULL, "status" INTEGER DEFAULT 3 NOT NULL, "plugin" TEXT DEFAULT "BasePlugin" NOT NULL, "error" TEXT DEFAULT "", "linkorder" INTEGER DEFAULT 0 NOT NULL, "package" INTEGER DEFAULT 0 NOT NULL, FOREIGN KEY(package) REFERENCES packages(id))') - self.c.execute('CREATE INDEX IF NOT EXISTS "pIdIndex" ON links(package)') - self.c.execute('CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")') - self.c.execute('CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)') - - if exists("pyload.db"): - try: - self.core.log.info(_("Converting old Django DB")) - except: - print "Converting old Django DB" - conn = sqlite3.connect('pyload.db') - c = conn.cursor() - c.execute("SELECT username, password, email from auth_user WHERE is_superuser") - users = [] - for r in c: - pw = r[1].split("$") - users.append((r[0], pw[1] + pw[2], r[2])) - c.close() - conn.close() - - self.c.executemany("INSERT INTO users(name, password, email) VALUES (?, ?, ?)", users) - move("pyload.db", "pyload.old.db") - if exists("web.db"): - try: - self.core.log.info(_("Moving users")) - except: - print "Moving users" - conn = sqlite3.connect('web.db') - c = conn.cursor() - c.execute("SELECT name, password, email, role, permission FROM users") - for r in c: - self.c.execute('SELECT name FROM users WHERE name=?', (r[0], )) - if self.c.fetchone() is None: - self.c.executemany("INSERT INTO users (name, password, email, role, permission) VALUES (?, ?, ?, ?, ?)", r) - c.close() - conn.close() - - move("web.db", "web.old.db") - self.c.execute('VACUUM') - - def createCursor(self): - return self.conn.cursor() - - @style.async - def commit(self): - self.conn.commit() - - @style.async - def rollback(self): - self.conn.rollback() - - def async(self, f, *args, **kwargs): - args = (self, ) + args - job = DatabaseJob(f, *args, **kwargs) - self.jobs.put(job) - - def queue(self, f, *args, **kwargs): - args = (self, ) + args - job = DatabaseJob(f, *args, **kwargs) - self.jobs.put(job) - job.wait() - return job.result - - @classmethod - def registerSub(cls, klass): - cls.subs.append(klass) - - @classmethod - def unregisterSub(cls, klass): - cls.subs.remove(klass) - - def __getattr__(self, attr): - for sub in DatabaseBackend.subs: - if hasattr(sub, attr): - return getattr(sub, attr) - -if __name__ == "__main__": - db = DatabaseBackend() - db.setup() - - class Test(): - @style.queue - def insert(db): - c = db.createCursor() - for i in range(1000): - c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", ("foo", i, "bar")) - @style.async - def insert2(db): - c = db.createCursor() - for i in range(1000*1000): - c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", ("foo", i, "bar")) - - @style.queue - def select(db): - c = db.createCursor() - for i in range(10): - res = c.execute("SELECT value FROM storage WHERE identifier=? AND key=?", ("foo", i)) - print res.fetchone() - - @style.queue - def error(db): - c = db.createCursor() - print "a" - c.execute("SELECT myerror FROM storage WHERE identifier=? AND key=?", ("foo", i)) - print "e" - - db.registerSub(Test) - from time import time - start = time() - for i in range(100): - db.insert() - end = time() - print end-start - - start = time() - db.insert2() - end = time() - print end-start - - db.error() - diff --git a/module/FileDatabase.py b/module/FileDatabase.py deleted file mode 100644 index 20548c136..000000000 --- a/module/FileDatabase.py +++ /dev/null @@ -1,882 +0,0 @@ -#!/usr/bin/env python -""" - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 3 of the License, - or (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - See the GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, see . - - @author: RaNaN - @author: mkaay -""" - -from Queue import Queue -from threading import Lock -from threading import RLock -from threading import Thread -from time import time -import traceback - -from module.PullEvents import InsertEvent -from module.PullEvents import ReloadAllEvent -from module.PullEvents import RemoveEvent -from module.PullEvents import UpdateEvent - -from module.PyPackage import PyPackage -from module.PyFile import PyFile -from module.PyFile import formatSize - -from module.DatabaseBackend import style -from module.DatabaseBackend import DatabaseBackend - -try: - from pysqlite2 import dbapi2 as sqlite3 -except: - import sqlite3 - -######################################################################## -class FileHandler: - """Handles all request made to obtain information, - modify status or other request for links or packages""" - - - #---------------------------------------------------------------------- - def __init__(self, core): - """Constructor""" - self.core = core - - # translations - self.statusMsg = [_("finished"), _("offline"), _("online"), _("queued"), _("checking"), _("waiting"), _("reconnected"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), _("downloading"), _("processing"), _("unknown")] - - self.cache = {} #holds instances for files - self.packageCache = {} # same for packages - #@TODO: purge the cache - - self.jobCache = {} - - self.lock = RLock() #@TODO should be a Lock w/o R - - self.filecount = -1 # if an invalid value is set get current value from db - self.unchanged = False #determines if any changes was made since last call - - self.db = self.core.db - - def change(func): - def new(*args): - args[0].unchanged = False - args[0].filecount = -1 - args[0].jobCache = {} - return func(*args) - return new - - def lock(func): - def new(*args): - #print "Handler: %s args: %s" % (func,args[1:]) - args[0].lock.acquire() - res = func(*args) - args[0].lock.release() - #print "Handler: %s return: %s" % (func, res) - return res - return new - - #---------------------------------------------------------------------- - def save(self): - """saves all data to backend""" - self.db.commit() - - #---------------------------------------------------------------------- - def syncSave(self): - """saves all data to backend and waits until all data are written""" - pyfiles = self.cache.values() - for pyfile in pyfiles: - pyfile.sync() - - pypacks = self.packageCache.values() - for pypack in pypacks: - pypack.sync() - - self.db.syncSave() - - #---------------------------------------------------------------------- - def getCompleteData(self, queue=1): - """gets a complete data representation""" - - data = self.db.getAllLinks(queue) - packs = self.db.getAllPackages(queue) - - data.update([(str(x.id), x.toDbDict()[x.id]) for x in self.cache.itervalues()]) - packs.update([(str(x.id), x.toDict()[x.id]) for x in self.packageCache.itervalues() if x.queue == queue]) - - for key, value in data.iteritems(): - if packs.has_key(str(value["package"])): - packs[str(value["package"])]["links"][key] = value - - return packs - - #---------------------------------------------------------------------- - def getInfoData(self, queue=1): - """gets a data representation without links""" - - packs = self.db.getAllPackages(queue) - packs.update([(str(x.id), x.toDict()[x.id]) for x in self.packageCache.itervalues() if x.queue == queue]) - - return packs - - @lock - @change - def addLinks(self, urls, package): - """adds links""" - - data = self.core.pluginManager.parseUrls(urls) - - self.db.addLinks(data, package) - self.core.threadManager.createInfoThread(data, package) - - #@TODO change from reloadAll event to package update event - self.core.pullManager.addEvent(ReloadAllEvent("collector")) - - #---------------------------------------------------------------------- - @lock - @change - def addPackage(self, name, folder, queue=0): - """adds a package, default to link collector""" - lastID = self.db.addPackage(name, folder, queue) - p = self.db.getPackage(lastID) - e = InsertEvent("pack", lastID, p.order, "collector" if not queue else "queue") - self.core.pullManager.addEvent(e) - return lastID - - #---------------------------------------------------------------------- - @lock - @change - def deletePackage(self, id): - """delete package and all contained links""" - - p = self.getPackage(id) - - if not p: - if self.packageCache.has_key(id): del self.packageCache[id] - return - - e = RemoveEvent("pack", id, "collector" if not p.queue else "queue") - - pyfiles = self.cache.values() - - for pyfile in pyfiles: - if pyfile.packageid == id: - pyfile.abortDownload() - pyfile.release() - - self.db.deletePackage(p) - self.core.pullManager.addEvent(e) - - if self.packageCache.has_key(id): - del self.packageCache[id] - - #---------------------------------------------------------------------- - @lock - @change - def deleteLink(self, id): - """deletes links""" - - f = self.getFile(id) - pid = f.packageid - - if not f: - return None - - e = RemoveEvent("file", id, "collector" if not f.package().queue else "queue") - - - if id in self.core.threadManager.processingIds(): - self.cache[id].abortDownload() - - if self.cache.has_key(id): - del self.cache[id] - - self.db.deleteLink(f) - - self.core.pullManager.addEvent(e) - - p = self.getPackage(pid) - if not len(p.getChildren()): - p.delete() - - #---------------------------------------------------------------------- - def releaseLink(self, id): - """removes pyfile from cache""" - if self.cache.has_key(id): - del self.cache[id] - - #---------------------------------------------------------------------- - def releasePackage(self, id): - """removes package from cache""" - if self.packageCache.has_key(id): - del self.packageCache[id] - - #---------------------------------------------------------------------- - def updateLink(self, pyfile): - """updates link""" - self.db.updateLink(pyfile) - - e = UpdateEvent("file", pyfile.id, "collector" if not pyfile.package().queue else "queue") - self.core.pullManager.addEvent(e) - - #---------------------------------------------------------------------- - def updatePackage(self, pypack): - """updates a package""" - self.db.updatePackage(pypack) - - e = UpdateEvent("pack", pypack.id, "collector" if not pypack.queue else "queue") - self.core.pullManager.addEvent(e) - - #---------------------------------------------------------------------- - def getPackage(self, id): - """return package instance""" - - if self.packageCache.has_key(id): - return self.packageCache[id] - else: - return self.db.getPackage(id) - - #---------------------------------------------------------------------- - def getPackageData(self, id): - """returns dict with package information""" - pack = self.getPackage(id) - - if not pack: - return None - - pack = pack.toDict()[id] - - data = self.db.getPackageData(id) - - tmplist = [] - - cache = self.cache.values() - for x in cache: - if int(x.toDbDict()[x.id]["package"]) == int(id): - tmplist.append((str(x.id), x.toDbDict()[x.id])) - data.update(tmplist) - - pack["links"] = data - - return pack - - #---------------------------------------------------------------------- - def getFileData(self, id): - """returns dict with file information""" - if self.cache.has_key(id): - return self.cache[id].toDbDict() - - return self.db.getLinkData(id) - - #---------------------------------------------------------------------- - def getFile(self, id): - """returns pyfile instance""" - if self.cache.has_key(id): - return self.cache[id] - else: - return self.db.getFile(id) - - #---------------------------------------------------------------------- - @lock - def getJob(self, occ): - """get suitable job""" - - #@TODO clean mess - #@TODO improve selection of valid jobs - - if self.jobCache.has_key(occ): - if self.jobCache[occ]: - id = self.jobCache[occ].pop() - if id == "empty": - pyfile = None - self.jobCache[occ].append("empty") - else: - pyfile = self.getFile(id) - else: - jobs = self.db.getJob(occ) - jobs.reverse() - if not jobs: - self.jobCache[occ].append("empty") - pyfile = None - else: - self.jobCache[occ].extend(jobs) - pyfile = self.getFile(self.jobCache[occ].pop()) - - else: - self.jobCache = {} #better not caching to much - jobs = self.db.getJob(occ) - jobs.reverse() - self.jobCache[occ] = jobs - - if not jobs: - self.jobCache[occ].append("empty") - pyfile = None - else: - pyfile = self.getFile(self.jobCache[occ].pop()) - - #@TODO: maybe the new job has to be approved... - - - #pyfile = self.getFile(self.jobCache[occ].pop()) - return pyfile - - @lock - def getDecryptJob(self): - """return job for decrypting""" - if self.jobCache.has_key("decrypt"): - return None - - plugins = self.core.pluginManager.crypterPlugins.keys() + self.core.pluginManager.containerPlugins.keys() - plugins = str(tuple(plugins)) - - jobs = self.db.getPluginJob(plugins) - if jobs: - return self.getFile(jobs[0]) - else: - self.jobCache["decrypt"] = "empty" - return None - - def getFileCount(self): - """returns number of files""" - - if self.filecount == -1: - self.filecount = self.db.filecount(1) - - return self.filecount - - #---------------------------------------------------------------------- - def getQueueCount(self): - """number of files that have to be processed""" - pass - - #---------------------------------------------------------------------- - @lock - @change - def restartPackage(self, id): - """restart package""" - pyfiles = self.cache.values() - for pyfile in pyfiles: - if pyfile.packageid == id: - self.restartFile(pyfile.id) - - self.db.restartPackage(id) - - if self.packageCache.has_key(id): - self.packageCache[id].setFinished = False - - e = UpdateEvent("pack", id, "collector" if not self.getPackage(id).queue else "queue") - self.core.pullManager.addEvent(e) - - @lock - @change - def restartFile(self, id): - """ restart file""" - if self.cache.has_key(id): - self.cache[id].status = 3 - self.cache[id].name = self.cache[id].url - self.cache[id].error = "" - self.cache[id].abortDownload() - - - self.db.restartFile(id) - - e = UpdateEvent("file", id, "collector" if not self.getFile(id).package().queue else "queue") - self.core.pullManager.addEvent(e) - - @lock - @change - def setPackageLocation(self, id, queue): - """push package to queue""" - - pack = self.db.getPackage(id) - - e = RemoveEvent("pack", id, "collector" if not pack.queue else "queue") - self.core.pullManager.addEvent(e) - - self.db.clearPackageOrder(pack) - - pack = self.db.getPackage(id) - - pack.queue = queue - self.db.updatePackage(pack) - - self.db.reorderPackage(pack, -1, True) - - self.db.commit() - self.releasePackage(id) - pack = self.getPackage(id) - e = InsertEvent("pack", id, pack.order, "collector" if not pack.queue else "queue") - self.core.pullManager.addEvent(e) - - @lock - @change - def reorderPackage(self, id, position): - p = self.getPackage(id) - - e = RemoveEvent("pack", id, "collector" if not p.queue else "queue") - self.core.pullManager.addEvent(e) - self.db.reorderPackage(p, position) - - packs = self.packageCache.values() - for pack in packs: - if pack.queue != p.queue or pack.order < 0 or pack == p: continue - if p.order > position: - if pack.order >= position and pack.order < p.order: - pack.order += 1 - elif p.order < position: - if pack.order <= position and pack.order > p.order: - pack.order -= 1 - - p.order = position - self.db.commit() - - e = ReloadAllEvent("collector" if not p.queue else "queue") - self.core.pullManager.addEvent(e) - - @lock - @change - def reorderFile(self, id, position): - f = self.getFileData(id) - f = f[str(id)] - - e = RemoveEvent("file", id, "collector" if not self.getPackage(f["package"]).queue else "queue") - self.core.pullManager.addEvent(e) - - self.db.reorderLink(f, position) - - pyfiles = self.cache.values() - for pyfile in pyfiles: - if pyfile.packageid != f["package"] or pyfile.order < 0: continue - if f["order"] > position: - if pyfile.order >= position and pyfile.order < f["order"]: - pyfile.order += 1 - elif f["order"] < position: - if pyfile.order <= position and pyfile.order > f["order"]: - pyfile.order -= 1 - - if self.cache.has_key(id): - self.cache[id].order = position - - self.db.commit() - - e = ReloadAllEvent("collector" if not self.getPackage(f["package"]).queue else "queue") - - self.core.pullManager.addEvent(e) - - @change - def updateFileInfo(self, data, pid): - """ updates file info (name, size, status, url)""" - ids = self.db.updateLinkInfo(data) - - for fid in ids: - e = UpdateEvent("file", fid, "collector" if not self.getFile(fid).package().queue else "queue") - self.core.pullManager.addEvent(e) - - def checkPackageFinished(self, pyfile): - """ checks if package is finished and calls hookmanager """ - - ids = self.db.getUnfinished(pyfile.packageid) - if not ids or (pyfile.id in ids and len(ids) == 1): - if not pyfile.package().setFinished: - self.core.log.info(_("Package finished: %s") % pyfile.package().name) - self.core.hookManager.packageFinished(pyfile.package()) - pyfile.package().setFinished = True - - - def reCheckPackage(self, pid): - """ recheck links in package """ - data = self.db.getPackageData(pid) - - urls = [] - - for pyfile in data.itervalues(): - if pyfile["status"] not in (0, 12, 13): - urls.append((pyfile["url"], pyfile["plugin"])) - - self.core.threadManager.createInfoThread(urls, pid) - - @lock - @change - def deleteFinishedLinks(self): - """ deletes finished links and packages, return deleted packages """ - - old_packs = self.getInfoData(0) - old_packs.update(self.getInfoData(1)) - - self.db.deleteFinished() - - new_packs = self.db.getAllPackages(0) - new_packs.update(self.db.getAllPackages(1)) - #get new packages only from db - - deleted = [] - for id in old_packs.iterkeys(): - if not new_packs.has_key(str(id)): - deleted.append(id) - self.deletePackage(int(id)) - - return deleted - - @lock - @change - def restartFailed(self): - """ restart all failed links """ - self.db.restartFailed() - -class FileMethods(): - @style.queue - def filecount(self, queue): - """returns number of files in queue""" - self.c.execute("SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.id", (queue, )) - r = self.c.fetchall() - return len(r) - - @style.inner - def _nextPackageOrder(self, queue=0): - self.c.execute('SELECT packageorder FROM packages WHERE queue=?', (queue,)) - o = -1 - for r in self.c: - if r[0] > o: o = r[0] - return o + 1 - - @style.inner - def _nextFileOrder(self, package): - self.c.execute('SELECT linkorder FROM links WHERE package=?', (package,)) - o = -1 - for r in self.c: - if r[0] > o: o = r[0] - return o + 1 - - @style.queue - def addLink(self, url, name, plugin, package): - order = self._nextFileOrder(package) - self.c.execute('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', (url, name, plugin, package, order)) - return self.c.lastrowid - - @style.queue - def addLinks(self, links, package): - """ links is a list of tupels (url,plugin)""" - order = self._nextFileOrder(package) - orders = [order + x for x in range(len(links))] - links = [(x[0], x[0], x[1], package, o) for x, o in zip(links, orders)] - self.c.executemany('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', links) - - @style.queue - def addPackage(self, name, folder, queue): - order = self._nextPackageOrder(queue) - self.c.execute('INSERT INTO packages(name, folder, queue, packageorder) VALUES(?,?,?,?)', (name, folder, queue, order)) - return self.c.lastrowid - - @style.queue - def deletePackage(self, p): - - self.c.execute('DELETE FROM links WHERE package=?', (str(p.id),)) - self.c.execute('DELETE FROM packages WHERE id=?', (str(p.id),)) - self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=?', (p.order, p.queue)) - - @style.queue - def deleteLink(self, f): - - self.c.execute('DELETE FROM links WHERE id=?', (str(f.id),)) - self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder > ? AND package=?', (f.order, str(f.packageid))) - - - @style.queue - def getAllLinks(self, q): - """return information about all links in queue q - - q0 queue - q1 collector - - format: - - { - id: {'name': name, ... 'package': id }, ... - } - - """ - self.c.execute('SELECT l.id,l.url,l.name,l.size,l.status,l.error,l.plugin,l.package,l.linkorder FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.linkorder', (q,)) - data = {} - for r in self.c: - data[str(r[0])] = { - 'id': r[0], - 'url': r[1], - 'name': r[2], - 'size': r[3], - 'format_size': formatSize(r[3]), - 'status': r[4], - 'statusmsg': self.manager.statusMsg[r[4]], - 'error': r[5], - 'plugin': r[6], - 'package': r[7], - 'order': r[8], - 'progress': 100 if r[4] in (0, 4) else 0 - } - - return data - - @style.queue - def getAllPackages(self, q): - """return information about packages in queue q - (only useful in get all data) - - q0 queue - q1 collector - - format: - - { - id: {'name': name ... 'links': {} }, ... - } - """ - self.c.execute('SELECT id,name,folder,site,password,queue,packageorder,priority FROM packages WHERE queue=? ORDER BY packageorder', str(q)) - - data = {} - for r in self.c: - data[str(r[0])] = { - 'id': r[0], - 'name': r[1], - 'folder': r[2], - 'site': r[3], - 'password': r[4], - 'queue': r[5], - 'order': r[6], - 'priority': r[7], - 'links': {} - } - - return data - - @style.queue - def getLinkData(self, id): - """get link information as dict""" - self.c.execute('SELECT id,url,name,size,status,error,plugin,package,linkorder FROM links WHERE id=?', (str(id), )) - data = {} - r = self.c.fetchone() - if not r: - return None - data[str(r[0])] = { - 'id': r[0], - 'url': r[1], - 'name': r[2], - 'size': r[3], - 'format_size': formatSize(r[3]), - 'status': r[4], - 'statusmsg': self.manager.statusMsg[r[4]], - 'error': r[5], - 'plugin': r[6], - 'package': r[7], - 'order': r[8], - 'progress': 100 if r[4] in (0, 4) else 0 - } - - return data - - @style.queue - def getPackageData(self, id): - """get package data""" - self.c.execute('SELECT id,url,name,size,status,error,plugin,package,linkorder FROM links WHERE package=? ORDER BY linkorder', (str(id), )) - - data = {} - for r in self.c: - data[str(r[0])] = { - 'id': r[0], - 'url': r[1], - 'name': r[2], - 'size': r[3], - 'format_size': formatSize(r[3]), - 'status': r[4], - 'statusmsg': self.manager.statusMsg[r[4]], - 'error': r[5], - 'plugin': r[6], - 'package': r[7], - 'order': r[8] - } - - return data - - - @style.async - def updateLink(self, f): - self.c.execute('UPDATE links SET url=?,name=?,size=?,status=?,error=?,package=? WHERE id=?', (f.url, f.name, f.size, f.status, f.error, str(f.packageid), str(f.id))) - - @style.queue - def updatePackage(self, p): - self.c.execute('UPDATE packages SET name=?,folder=?,site=?,password=?,queue=?,priority=? WHERE id=?', (p.name, p.folder, p.site, p.password, p.queue, p.priority, str(p.id))) - - @style.queue - def updateLinkInfo(self, data): - """ data is list of tupels (name, size, status, url) """ - self.c.executemany('UPDATE links SET name=?, size=?, status=? WHERE url=? AND status NOT IN (0,8,12,13)', data) - ids = [] - self.c.execute('SELECT id FROM links WHERE url IN (\'%s\')' % "','".join([x[3] for x in data])) - for r in self.c: - ids.append(int(r[0])) - return ids - - @style.queue - def reorderPackage(self, p, position, noMove=False): - if position == -1: - position = self._nextPackageOrder(p.queue) - if not noMove: - if p.order > position: - self.c.execute('UPDATE packages SET packageorder=packageorder+1 WHERE packageorder >= ? AND packageorder < ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue)) - elif p.order < position: - self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder <= ? AND packageorder > ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue)) - - self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (position, str(p.id))) - - @style.queue - def reorderLink(self, f, position): - """ reorder link with f as dict for pyfile """ - if f["order"] > position: - self.c.execute('UPDATE links SET linkorder=linkorder+1 WHERE linkorder >= ? AND linkorder < ? AND package=?', (position, f["order"], f["package"])) - elif f["order"] < position: - self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder <= ? AND linkorder > ? AND package=?', (position, f["order"], f["package"])) - - self.c.execute('UPDATE links SET linkorder=? WHERE id=?', (position, f["id"])) - - - @style.queue - def clearPackageOrder(self, p): - self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (-1, str(p.id))) - self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=? AND id != ?', (p.order, p.queue, str(p.id))) - - @style.async - def restartFile(self, id): - self.c.execute('UPDATE links SET status=3,error="" WHERE id=?', (str(id),)) - - @style.async - def restartPackage(self, id): - self.c.execute('UPDATE links SET status=3 WHERE package=?', (str(id),)) - - @style.async - def syncSave(self): - self.commit() - - @style.queue - def getPackage(self, id): - """return package instance from id""" - self.c.execute("SELECT name,folder,site,password,queue,packageorder,priority FROM packages WHERE id=?", (str(id), )) - r = self.c.fetchone() - if not r: return None - return PyPackage(self.manager, id, * r) - - #---------------------------------------------------------------------- - @style.queue - def getFile(self, id): - """return link instance from id""" - self.c.execute("SELECT url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?", (str(id), )) - r = self.c.fetchone() - if not r: return None - return PyFile(self.manager, id, * r) - - - @style.queue - def getJob(self, occ): - """return pyfile ids, which are suitable for download and dont use a occupied plugin""" - - #@TODO improve this hardcoded method - pre = "('DLC', 'LinkList', 'SerienjunkiesOrg', 'CCF', 'RSDF')" #plugins which are processed in collector - - cmd = "(" - for i, item in enumerate(occ): - if i: cmd += ", " - cmd += "'%s'" % item - - cmd += ")" - - cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE ((p.queue=1 AND l.plugin NOT IN %s) OR l.plugin IN %s) AND l.status IN (2,3,6,14) ORDER BY p.priority DESC, p.packageorder ASC, l.linkorder ASC LIMIT 5" % (cmd, pre) - - self.c.execute(cmd) # very bad! - - return [x[0] for x in self.c] - - @style.queue - def getPluginJob(self, plugins): - """returns pyfile ids with suited plugins""" - cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE l.plugin IN %s AND l.status IN (2,3,6,14) ORDER BY p.priority DESC, p.packageorder ASC, l.linkorder ASC LIMIT 5" % plugins - - self.c.execute(cmd) # very bad! - - return [x[0] for x in self.c] - - @style.queue - def getUnfinished(self, pid): - """return list of max length 3 ids with pyfiles in package not finished or processed""" - - self.c.execute("SELECT id FROM links WHERE package=? AND status NOT IN (0, 13) LIMIT 3", (str(pid),)) - return [r[0] for r in self.c] - - @style.queue - def deleteFinished(self): - self.c.execute("DELETE FROM links WHERE status=0") - self.c.execute("DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE packages.id=links.package)") - - - @style.queue - def restartFailed(self): - self.c.execute("UPDATE links SET status=3,error='' WHERE status IN (8, 9)") - -DatabaseBackend.registerSub(FileMethods) - -if __name__ == "__main__": - - pypath = "." - _ = lambda x: x - - db = FileHandler(None) - - #p = PyFile(db, 5) - #sleep(0.1) - - a = time() - - #print db.addPackage("package", "folder" , 1) - - pack = db.db.addPackage("package", "folder", 1) - - updates = [] - - - for x in range(0, 200): - x = str(x) - db.db.addLink("http://somehost.com/hoster/file/download?file_id=" + x, x, "BasePlugin", pack) - updates.append(("new name" + x, 0, 3, "http://somehost.com/hoster/file/download?file_id=" + x)) - - - for x in range(0, 100): - updates.append(("unimportant%s" % x, 0, 3, "a really long non existent url%s" % x)) - - db.db.commit() - - b = time() - print "adding 200 links, single sql execs, no commit", b-a - - print db.getCompleteData(1) - - c = time() - - - db.db.updateLinkInfo(updates) - - d = time() - - print "updates", d-c - - print db.getCompleteData(1) - - - e = time() - - print "complete data", e-d diff --git a/module/PluginThread.py b/module/PluginThread.py index 90ad9ec26..34cf46bc5 100644 --- a/module/PluginThread.py +++ b/module/PluginThread.py @@ -32,11 +32,11 @@ from os.path import exists from pycurl import error from utils import save_join +from module import PyFile from module.plugins.Plugin import Abort from module.plugins.Plugin import Fail from module.plugins.Plugin import Reconnect from module.plugins.Plugin import Retry -from module.FileDatabase import PyFile ######################################################################## class PluginThread(Thread): diff --git a/module/StorageDatabase.py b/module/StorageDatabase.py deleted file mode 100644 index 9d3587340..000000000 --- a/module/StorageDatabase.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -""" - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 3 of the License, - or (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - See the GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, see . - - @author: mkaay -""" - -from module.DatabaseBackend import style -from module.DatabaseBackend import DatabaseBackend - -class StorageMethods(): - @style.queue - def setStorage(db, identifier, key, value): - db.c.execute("SELECT id FROM storage WHERE identifier=? AND key=?", (identifier, key)) - if db.c.fetchone() is not None: - db.c.execute("UPDATE storage SET value=? WHERE identifier=? AND key=?", (value, identifier, key)) - else: - db.c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", (identifier, key, value)) - - @style.queue - def getStorage(db, identifier, key=None): - if key is not None: - db.c.execute("SELECT value FROM storage WHERE identifier=? AND key=?", (identifier, key)) - row = db.c.fetchone() - if row is not None: - return row[0] - else: - db.c.execute("SELECT key, value FROM storage WHERE identifier=?", (identifier, )) - d = {} - if row in db.c: - d[row[0]] = row[1] - return d - - @style.queue - def delStorage(db, identifier, key): - db.c.execute("DELETE FROM storage WHERE identifier=? AND key=?", (identifier, key)) - -DatabaseBackend.registerSub(StorageMethods) diff --git a/module/UserDatabase.py b/module/UserDatabase.py deleted file mode 100644 index a69dfff0e..000000000 --- a/module/UserDatabase.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -""" - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 3 of the License, - or (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - See the GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, see . - - @author: mkaay -""" - -from DatabaseBackend import DatabaseBackend -from DatabaseBackend import style - -from hashlib import sha1 -import random - -class UserMethods(): - @style.queue - def checkAuth(db, user, password): - c = db.c - c.execute('SELECT name, password, role, permission, template FROM "users" WHERE name=?', (user, )) - r = c.fetchone() - if not r: - return {} - - salt = r[1][:5] - pw = r[1][5:] - h = sha1(salt + password) - if h.hexdigest() == pw: - return {"name": r[0], "role": r[2], "permission": r[3], "template": r[4]} - else: - return {} - - @style.queue - def addUser(db, user, password): - salt = reduce(lambda x, y: x + y, [str(random.randint(0, 9)) for i in range(0, 5)]) - h = sha1(salt + password) - password = salt + h.hexdigest() - - c = db.c - c.execute('SELECT name FROM users WHERE name=?', (user, )) - if c.fetchone() is not None: - c.execute('UPDATE users SET password=? WHERE name=?', (password, user)) - else: - c.execute('INSERT INTO users (name, password) VALUES (?, ?)', (user, password)) - - @style.queue - def listUsers(db): - c = db.c - c.execute('SELECT name FROM users') - users = [] - for row in c.fetchall(): - users.append(row[0]) - return users - - @style.queue - def removeUser(db, user): - c = db.c - c.execute('SELECT name FROM users WHERE name=?', (user, )) - if c.fetchone() is not None: - c.execute('DELETE FROM users WHERE name=?', (user, )) - - -DatabaseBackend.registerSub(UserMethods) diff --git a/module/database/DatabaseBackend.py b/module/database/DatabaseBackend.py new file mode 100644 index 000000000..2bc6c0bb2 --- /dev/null +++ b/module/database/DatabaseBackend.py @@ -0,0 +1,324 @@ +#!/usr/bin/env python +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: RaNaN + @author: mkaay +""" + +from threading import Lock +from threading import Thread +from threading import Event +from os import remove +from os.path import exists +from shutil import move + +from Queue import Queue +from traceback import print_exc + +from module.utils import chmod + +try: + from pysqlite2 import dbapi2 as sqlite3 +except: + import sqlite3 + +DB_VERSION = 4 + +class style(): + db = None + + @classmethod + def setDB(cls, db): + cls.db = db + + @classmethod + def inner(cls, f): + @staticmethod + def x(*args, **kwargs): + if cls.db: + return f(cls.db, *args, **kwargs) + return x + + @classmethod + def queue(cls, f): + @staticmethod + def x(*args, **kwargs): + if cls.db: + return cls.db.queue(f, *args, **kwargs) + return x + + @classmethod + def async(cls, f): + @staticmethod + def x(*args, **kwargs): + if cls.db: + return cls.db.async(f, *args, **kwargs) + return x + +class DatabaseJob(): + def __init__(self, f, *args, **kwargs): + self.done = Event() + + self.f = f + self.args = args + self.kwargs = kwargs + + self.result = None + self.exception = False + + def processJob(self): + try: + self.result = self.f(*self.args, **self.kwargs) + except Exception, e: + print "Database Error @", self.f.__name__, self.args[1:], self.kwargs, e + print_exc() + self.exception = e + self.done.set() + + def wait(self): + self.done.wait() + +class DatabaseBackend(Thread): + subs = [] + def __init__(self, core): + Thread.__init__(self) + self.setDaemon(True) + self.core = core + + self.transactionLock = Lock() + self.jobs = Queue() + + self.setuplock = Event() + + style.setDB(self) + + def setup(self): + self.start() + self.setuplock.wait() + + def run(self): + """main loop, which executes commands""" + convert = self._checkVersion() #returns None or current version + + self.conn = sqlite3.connect("files.db") + chmod("files.db", 0600) + + self.c = self.conn.cursor() #compatibility + + if convert is not None: + self._convertDB(convert) + + self._createTables() + self.conn.commit() + + self.setuplock.set() + + while True: + j = self.jobs.get() + self.transactionLock.acquire() + if j == "quit": + self.c.close() + self.conn.close() + self.transactionLock.release() + break + j.processJob() + if j.exception: + self.conn.rollback() + else: + self.conn.commit() + self.transactionLock.release() + + @style.queue + def shutdown(self): + self.conn.commit() + self.jobs.put("quit") + + def _checkVersion(self): + """ check db version and delete it if needed""" + if not exists("files.version"): + f = open("files.version", "wb") + f.write(str(DB_VERSION)) + f.close() + return + + f = open("files.version", "rb") + v = int(f.read().strip()) + f.close() + if v < DB_VERSION: + if v < 2: + try: + self.manager.core.log.warning(_("Filedatabase was deleted due to incompatible version.")) + except: + print "Filedatabase was deleted due to incompatible version." + remove("files.version") + move("files.db", "files.backup.db") + f = open("files.version", "wb") + f.write(str(DB_VERSION)) + f.close() + return v + + def _convertDB(self, v): + try: + getattr(self, "_convertV%i" % v)() + except: + try: + self.core.log.error(_("Filedatabase could NOT be converted.")) + except: + print "Filedatabase could NOT be converted." + + #--convert scripts start + + def _convertV2(self): + self.c.execute('CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")') + try: + self.manager.core.log.info(_("Database was converted from v2 to v3.")) + except: + print "Database was converted from v2 to v3." + self._convertV3() + + def _convertV3(self): + self.c.execute('CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)') + try: + self.manager.core.log.info(_("Database was converted from v3 to v4.")) + except: + print "Database was converted from v3 to v4." + + #--convert scripts end + + def _createTables(self): + """create tables for database""" + + self.c.execute('CREATE TABLE IF NOT EXISTS "packages" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "folder" TEXT, "password" TEXT DEFAULT "", "site" TEXT DEFAULT "", "queue" INTEGER DEFAULT 0 NOT NULL, "packageorder" INTEGER DEFAULT 0 NOT NULL, "priority" INTEGER DEFAULT 0 NOT NULL)') + self.c.execute('CREATE TABLE IF NOT EXISTS "links" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "url" TEXT NOT NULL, "name" TEXT, "size" INTEGER DEFAULT 0 NOT NULL, "status" INTEGER DEFAULT 3 NOT NULL, "plugin" TEXT DEFAULT "BasePlugin" NOT NULL, "error" TEXT DEFAULT "", "linkorder" INTEGER DEFAULT 0 NOT NULL, "package" INTEGER DEFAULT 0 NOT NULL, FOREIGN KEY(package) REFERENCES packages(id))') + self.c.execute('CREATE INDEX IF NOT EXISTS "pIdIndex" ON links(package)') + self.c.execute('CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")') + self.c.execute('CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)') + + if exists("pyload.db"): + try: + self.core.log.info(_("Converting old Django DB")) + except: + print "Converting old Django DB" + conn = sqlite3.connect('pyload.db') + c = conn.cursor() + c.execute("SELECT username, password, email from auth_user WHERE is_superuser") + users = [] + for r in c: + pw = r[1].split("$") + users.append((r[0], pw[1] + pw[2], r[2])) + c.close() + conn.close() + + self.c.executemany("INSERT INTO users(name, password, email) VALUES (?, ?, ?)", users) + move("pyload.db", "pyload.old.db") + if exists("web.db"): + try: + self.core.log.info(_("Moving users")) + except: + print "Moving users" + conn = sqlite3.connect('web.db') + c = conn.cursor() + c.execute("SELECT name, password, email, role, permission FROM users") + for r in c: + self.c.execute('SELECT name FROM users WHERE name=?', (r[0], )) + if self.c.fetchone() is None: + self.c.executemany("INSERT INTO users (name, password, email, role, permission) VALUES (?, ?, ?, ?, ?)", r) + c.close() + conn.close() + + move("web.db", "web.old.db") + self.c.execute('VACUUM') + + def createCursor(self): + return self.conn.cursor() + + @style.async + def commit(self): + self.conn.commit() + + @style.async + def rollback(self): + self.conn.rollback() + + def async(self, f, *args, **kwargs): + args = (self, ) + args + job = DatabaseJob(f, *args, **kwargs) + self.jobs.put(job) + + def queue(self, f, *args, **kwargs): + args = (self, ) + args + job = DatabaseJob(f, *args, **kwargs) + self.jobs.put(job) + job.wait() + return job.result + + @classmethod + def registerSub(cls, klass): + cls.subs.append(klass) + + @classmethod + def unregisterSub(cls, klass): + cls.subs.remove(klass) + + def __getattr__(self, attr): + for sub in DatabaseBackend.subs: + if hasattr(sub, attr): + return getattr(sub, attr) + +if __name__ == "__main__": + db = DatabaseBackend() + db.setup() + + class Test(): + @style.queue + def insert(db): + c = db.createCursor() + for i in range(1000): + c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", ("foo", i, "bar")) + @style.async + def insert2(db): + c = db.createCursor() + for i in range(1000*1000): + c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", ("foo", i, "bar")) + + @style.queue + def select(db): + c = db.createCursor() + for i in range(10): + res = c.execute("SELECT value FROM storage WHERE identifier=? AND key=?", ("foo", i)) + print res.fetchone() + + @style.queue + def error(db): + c = db.createCursor() + print "a" + c.execute("SELECT myerror FROM storage WHERE identifier=? AND key=?", ("foo", i)) + print "e" + + db.registerSub(Test) + from time import time + start = time() + for i in range(100): + db.insert() + end = time() + print end-start + + start = time() + db.insert2() + end = time() + print end-start + + db.error() + diff --git a/module/database/FileDatabase.py b/module/database/FileDatabase.py new file mode 100644 index 000000000..46f23855e --- /dev/null +++ b/module/database/FileDatabase.py @@ -0,0 +1,879 @@ +#!/usr/bin/env python +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: RaNaN + @author: mkaay +""" + + +from threading import RLock +from time import time + +from module.PullEvents import InsertEvent +from module.PullEvents import ReloadAllEvent +from module.PullEvents import RemoveEvent +from module.PullEvents import UpdateEvent + +from module.PyPackage import PyPackage +from module.PyFile import PyFile +from module.PyFile import formatSize + +from module.database import style +from module.database import DatabaseBackend + +try: + from pysqlite2 import dbapi2 as sqlite3 +except: + import sqlite3 + +######################################################################## +class FileHandler: + """Handles all request made to obtain information, + modify status or other request for links or packages""" + + + #---------------------------------------------------------------------- + def __init__(self, core): + """Constructor""" + self.core = core + + # translations + self.statusMsg = [_("finished"), _("offline"), _("online"), _("queued"), _("checking"), _("waiting"), _("reconnected"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), _("downloading"), _("processing"), _("unknown")] + + self.cache = {} #holds instances for files + self.packageCache = {} # same for packages + #@TODO: purge the cache + + self.jobCache = {} + + self.lock = RLock() #@TODO should be a Lock w/o R + + self.filecount = -1 # if an invalid value is set get current value from db + self.unchanged = False #determines if any changes was made since last call + + self.db = self.core.db + + def change(func): + def new(*args): + args[0].unchanged = False + args[0].filecount = -1 + args[0].jobCache = {} + return func(*args) + return new + + def lock(func): + def new(*args): + #print "Handler: %s args: %s" % (func,args[1:]) + args[0].lock.acquire() + res = func(*args) + args[0].lock.release() + #print "Handler: %s return: %s" % (func, res) + return res + return new + + #---------------------------------------------------------------------- + def save(self): + """saves all data to backend""" + self.db.commit() + + #---------------------------------------------------------------------- + def syncSave(self): + """saves all data to backend and waits until all data are written""" + pyfiles = self.cache.values() + for pyfile in pyfiles: + pyfile.sync() + + pypacks = self.packageCache.values() + for pypack in pypacks: + pypack.sync() + + self.db.syncSave() + + #---------------------------------------------------------------------- + def getCompleteData(self, queue=1): + """gets a complete data representation""" + + data = self.db.getAllLinks(queue) + packs = self.db.getAllPackages(queue) + + data.update([(str(x.id), x.toDbDict()[x.id]) for x in self.cache.itervalues()]) + packs.update([(str(x.id), x.toDict()[x.id]) for x in self.packageCache.itervalues() if x.queue == queue]) + + for key, value in data.iteritems(): + if packs.has_key(str(value["package"])): + packs[str(value["package"])]["links"][key] = value + + return packs + + #---------------------------------------------------------------------- + def getInfoData(self, queue=1): + """gets a data representation without links""" + + packs = self.db.getAllPackages(queue) + packs.update([(str(x.id), x.toDict()[x.id]) for x in self.packageCache.itervalues() if x.queue == queue]) + + return packs + + @lock + @change + def addLinks(self, urls, package): + """adds links""" + + data = self.core.pluginManager.parseUrls(urls) + + self.db.addLinks(data, package) + self.core.threadManager.createInfoThread(data, package) + + #@TODO change from reloadAll event to package update event + self.core.pullManager.addEvent(ReloadAllEvent("collector")) + + #---------------------------------------------------------------------- + @lock + @change + def addPackage(self, name, folder, queue=0): + """adds a package, default to link collector""" + lastID = self.db.addPackage(name, folder, queue) + p = self.db.getPackage(lastID) + e = InsertEvent("pack", lastID, p.order, "collector" if not queue else "queue") + self.core.pullManager.addEvent(e) + return lastID + + #---------------------------------------------------------------------- + @lock + @change + def deletePackage(self, id): + """delete package and all contained links""" + + p = self.getPackage(id) + + if not p: + if self.packageCache.has_key(id): del self.packageCache[id] + return + + e = RemoveEvent("pack", id, "collector" if not p.queue else "queue") + + pyfiles = self.cache.values() + + for pyfile in pyfiles: + if pyfile.packageid == id: + pyfile.abortDownload() + pyfile.release() + + self.db.deletePackage(p) + self.core.pullManager.addEvent(e) + + if self.packageCache.has_key(id): + del self.packageCache[id] + + #---------------------------------------------------------------------- + @lock + @change + def deleteLink(self, id): + """deletes links""" + + f = self.getFile(id) + pid = f.packageid + + if not f: + return None + + e = RemoveEvent("file", id, "collector" if not f.package().queue else "queue") + + + if id in self.core.threadManager.processingIds(): + self.cache[id].abortDownload() + + if self.cache.has_key(id): + del self.cache[id] + + self.db.deleteLink(f) + + self.core.pullManager.addEvent(e) + + p = self.getPackage(pid) + if not len(p.getChildren()): + p.delete() + + #---------------------------------------------------------------------- + def releaseLink(self, id): + """removes pyfile from cache""" + if self.cache.has_key(id): + del self.cache[id] + + #---------------------------------------------------------------------- + def releasePackage(self, id): + """removes package from cache""" + if self.packageCache.has_key(id): + del self.packageCache[id] + + #---------------------------------------------------------------------- + def updateLink(self, pyfile): + """updates link""" + self.db.updateLink(pyfile) + + e = UpdateEvent("file", pyfile.id, "collector" if not pyfile.package().queue else "queue") + self.core.pullManager.addEvent(e) + + #---------------------------------------------------------------------- + def updatePackage(self, pypack): + """updates a package""" + self.db.updatePackage(pypack) + + e = UpdateEvent("pack", pypack.id, "collector" if not pypack.queue else "queue") + self.core.pullManager.addEvent(e) + + #---------------------------------------------------------------------- + def getPackage(self, id): + """return package instance""" + + if self.packageCache.has_key(id): + return self.packageCache[id] + else: + return self.db.getPackage(id) + + #---------------------------------------------------------------------- + def getPackageData(self, id): + """returns dict with package information""" + pack = self.getPackage(id) + + if not pack: + return None + + pack = pack.toDict()[id] + + data = self.db.getPackageData(id) + + tmplist = [] + + cache = self.cache.values() + for x in cache: + if int(x.toDbDict()[x.id]["package"]) == int(id): + tmplist.append((str(x.id), x.toDbDict()[x.id])) + data.update(tmplist) + + pack["links"] = data + + return pack + + #---------------------------------------------------------------------- + def getFileData(self, id): + """returns dict with file information""" + if self.cache.has_key(id): + return self.cache[id].toDbDict() + + return self.db.getLinkData(id) + + #---------------------------------------------------------------------- + def getFile(self, id): + """returns pyfile instance""" + if self.cache.has_key(id): + return self.cache[id] + else: + return self.db.getFile(id) + + #---------------------------------------------------------------------- + @lock + def getJob(self, occ): + """get suitable job""" + + #@TODO clean mess + #@TODO improve selection of valid jobs + + if self.jobCache.has_key(occ): + if self.jobCache[occ]: + id = self.jobCache[occ].pop() + if id == "empty": + pyfile = None + self.jobCache[occ].append("empty") + else: + pyfile = self.getFile(id) + else: + jobs = self.db.getJob(occ) + jobs.reverse() + if not jobs: + self.jobCache[occ].append("empty") + pyfile = None + else: + self.jobCache[occ].extend(jobs) + pyfile = self.getFile(self.jobCache[occ].pop()) + + else: + self.jobCache = {} #better not caching to much + jobs = self.db.getJob(occ) + jobs.reverse() + self.jobCache[occ] = jobs + + if not jobs: + self.jobCache[occ].append("empty") + pyfile = None + else: + pyfile = self.getFile(self.jobCache[occ].pop()) + + #@TODO: maybe the new job has to be approved... + + + #pyfile = self.getFile(self.jobCache[occ].pop()) + return pyfile + + @lock + def getDecryptJob(self): + """return job for decrypting""" + if self.jobCache.has_key("decrypt"): + return None + + plugins = self.core.pluginManager.crypterPlugins.keys() + self.core.pluginManager.containerPlugins.keys() + plugins = str(tuple(plugins)) + + jobs = self.db.getPluginJob(plugins) + if jobs: + return self.getFile(jobs[0]) + else: + self.jobCache["decrypt"] = "empty" + return None + + def getFileCount(self): + """returns number of files""" + + if self.filecount == -1: + self.filecount = self.db.filecount(1) + + return self.filecount + + #---------------------------------------------------------------------- + def getQueueCount(self): + """number of files that have to be processed""" + pass + + #---------------------------------------------------------------------- + @lock + @change + def restartPackage(self, id): + """restart package""" + pyfiles = self.cache.values() + for pyfile in pyfiles: + if pyfile.packageid == id: + self.restartFile(pyfile.id) + + self.db.restartPackage(id) + + if self.packageCache.has_key(id): + self.packageCache[id].setFinished = False + + e = UpdateEvent("pack", id, "collector" if not self.getPackage(id).queue else "queue") + self.core.pullManager.addEvent(e) + + @lock + @change + def restartFile(self, id): + """ restart file""" + if self.cache.has_key(id): + self.cache[id].status = 3 + self.cache[id].name = self.cache[id].url + self.cache[id].error = "" + self.cache[id].abortDownload() + + + self.db.restartFile(id) + + e = UpdateEvent("file", id, "collector" if not self.getFile(id).package().queue else "queue") + self.core.pullManager.addEvent(e) + + @lock + @change + def setPackageLocation(self, id, queue): + """push package to queue""" + + pack = self.db.getPackage(id) + + e = RemoveEvent("pack", id, "collector" if not pack.queue else "queue") + self.core.pullManager.addEvent(e) + + self.db.clearPackageOrder(pack) + + pack = self.db.getPackage(id) + + pack.queue = queue + self.db.updatePackage(pack) + + self.db.reorderPackage(pack, -1, True) + + self.db.commit() + self.releasePackage(id) + pack = self.getPackage(id) + e = InsertEvent("pack", id, pack.order, "collector" if not pack.queue else "queue") + self.core.pullManager.addEvent(e) + + @lock + @change + def reorderPackage(self, id, position): + p = self.getPackage(id) + + e = RemoveEvent("pack", id, "collector" if not p.queue else "queue") + self.core.pullManager.addEvent(e) + self.db.reorderPackage(p, position) + + packs = self.packageCache.values() + for pack in packs: + if pack.queue != p.queue or pack.order < 0 or pack == p: continue + if p.order > position: + if pack.order >= position and pack.order < p.order: + pack.order += 1 + elif p.order < position: + if pack.order <= position and pack.order > p.order: + pack.order -= 1 + + p.order = position + self.db.commit() + + e = ReloadAllEvent("collector" if not p.queue else "queue") + self.core.pullManager.addEvent(e) + + @lock + @change + def reorderFile(self, id, position): + f = self.getFileData(id) + f = f[str(id)] + + e = RemoveEvent("file", id, "collector" if not self.getPackage(f["package"]).queue else "queue") + self.core.pullManager.addEvent(e) + + self.db.reorderLink(f, position) + + pyfiles = self.cache.values() + for pyfile in pyfiles: + if pyfile.packageid != f["package"] or pyfile.order < 0: continue + if f["order"] > position: + if pyfile.order >= position and pyfile.order < f["order"]: + pyfile.order += 1 + elif f["order"] < position: + if pyfile.order <= position and pyfile.order > f["order"]: + pyfile.order -= 1 + + if self.cache.has_key(id): + self.cache[id].order = position + + self.db.commit() + + e = ReloadAllEvent("collector" if not self.getPackage(f["package"]).queue else "queue") + + self.core.pullManager.addEvent(e) + + @change + def updateFileInfo(self, data, pid): + """ updates file info (name, size, status, url)""" + ids = self.db.updateLinkInfo(data) + + for fid in ids: + e = UpdateEvent("file", fid, "collector" if not self.getFile(fid).package().queue else "queue") + self.core.pullManager.addEvent(e) + + def checkPackageFinished(self, pyfile): + """ checks if package is finished and calls hookmanager """ + + ids = self.db.getUnfinished(pyfile.packageid) + if not ids or (pyfile.id in ids and len(ids) == 1): + if not pyfile.package().setFinished: + self.core.log.info(_("Package finished: %s") % pyfile.package().name) + self.core.hookManager.packageFinished(pyfile.package()) + pyfile.package().setFinished = True + + + def reCheckPackage(self, pid): + """ recheck links in package """ + data = self.db.getPackageData(pid) + + urls = [] + + for pyfile in data.itervalues(): + if pyfile["status"] not in (0, 12, 13): + urls.append((pyfile["url"], pyfile["plugin"])) + + self.core.threadManager.createInfoThread(urls, pid) + + @lock + @change + def deleteFinishedLinks(self): + """ deletes finished links and packages, return deleted packages """ + + old_packs = self.getInfoData(0) + old_packs.update(self.getInfoData(1)) + + self.db.deleteFinished() + + new_packs = self.db.getAllPackages(0) + new_packs.update(self.db.getAllPackages(1)) + #get new packages only from db + + deleted = [] + for id in old_packs.iterkeys(): + if not new_packs.has_key(str(id)): + deleted.append(id) + self.deletePackage(int(id)) + + return deleted + + @lock + @change + def restartFailed(self): + """ restart all failed links """ + self.db.restartFailed() + +class FileMethods(): + @style.queue + def filecount(self, queue): + """returns number of files in queue""" + self.c.execute("SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.id", (queue, )) + r = self.c.fetchall() + return len(r) + + @style.inner + def _nextPackageOrder(self, queue=0): + self.c.execute('SELECT packageorder FROM packages WHERE queue=?', (queue,)) + o = -1 + for r in self.c: + if r[0] > o: o = r[0] + return o + 1 + + @style.inner + def _nextFileOrder(self, package): + self.c.execute('SELECT linkorder FROM links WHERE package=?', (package,)) + o = -1 + for r in self.c: + if r[0] > o: o = r[0] + return o + 1 + + @style.queue + def addLink(self, url, name, plugin, package): + order = self._nextFileOrder(package) + self.c.execute('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', (url, name, plugin, package, order)) + return self.c.lastrowid + + @style.queue + def addLinks(self, links, package): + """ links is a list of tupels (url,plugin)""" + order = self._nextFileOrder(package) + orders = [order + x for x in range(len(links))] + links = [(x[0], x[0], x[1], package, o) for x, o in zip(links, orders)] + self.c.executemany('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', links) + + @style.queue + def addPackage(self, name, folder, queue): + order = self._nextPackageOrder(queue) + self.c.execute('INSERT INTO packages(name, folder, queue, packageorder) VALUES(?,?,?,?)', (name, folder, queue, order)) + return self.c.lastrowid + + @style.queue + def deletePackage(self, p): + + self.c.execute('DELETE FROM links WHERE package=?', (str(p.id),)) + self.c.execute('DELETE FROM packages WHERE id=?', (str(p.id),)) + self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=?', (p.order, p.queue)) + + @style.queue + def deleteLink(self, f): + + self.c.execute('DELETE FROM links WHERE id=?', (str(f.id),)) + self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder > ? AND package=?', (f.order, str(f.packageid))) + + + @style.queue + def getAllLinks(self, q): + """return information about all links in queue q + + q0 queue + q1 collector + + format: + + { + id: {'name': name, ... 'package': id }, ... + } + + """ + self.c.execute('SELECT l.id,l.url,l.name,l.size,l.status,l.error,l.plugin,l.package,l.linkorder FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.linkorder', (q,)) + data = {} + for r in self.c: + data[str(r[0])] = { + 'id': r[0], + 'url': r[1], + 'name': r[2], + 'size': r[3], + 'format_size': formatSize(r[3]), + 'status': r[4], + 'statusmsg': self.manager.statusMsg[r[4]], + 'error': r[5], + 'plugin': r[6], + 'package': r[7], + 'order': r[8], + 'progress': 100 if r[4] in (0, 4) else 0 + } + + return data + + @style.queue + def getAllPackages(self, q): + """return information about packages in queue q + (only useful in get all data) + + q0 queue + q1 collector + + format: + + { + id: {'name': name ... 'links': {} }, ... + } + """ + self.c.execute('SELECT id,name,folder,site,password,queue,packageorder,priority FROM packages WHERE queue=? ORDER BY packageorder', str(q)) + + data = {} + for r in self.c: + data[str(r[0])] = { + 'id': r[0], + 'name': r[1], + 'folder': r[2], + 'site': r[3], + 'password': r[4], + 'queue': r[5], + 'order': r[6], + 'priority': r[7], + 'links': {} + } + + return data + + @style.queue + def getLinkData(self, id): + """get link information as dict""" + self.c.execute('SELECT id,url,name,size,status,error,plugin,package,linkorder FROM links WHERE id=?', (str(id), )) + data = {} + r = self.c.fetchone() + if not r: + return None + data[str(r[0])] = { + 'id': r[0], + 'url': r[1], + 'name': r[2], + 'size': r[3], + 'format_size': formatSize(r[3]), + 'status': r[4], + 'statusmsg': self.manager.statusMsg[r[4]], + 'error': r[5], + 'plugin': r[6], + 'package': r[7], + 'order': r[8], + 'progress': 100 if r[4] in (0, 4) else 0 + } + + return data + + @style.queue + def getPackageData(self, id): + """get package data""" + self.c.execute('SELECT id,url,name,size,status,error,plugin,package,linkorder FROM links WHERE package=? ORDER BY linkorder', (str(id), )) + + data = {} + for r in self.c: + data[str(r[0])] = { + 'id': r[0], + 'url': r[1], + 'name': r[2], + 'size': r[3], + 'format_size': formatSize(r[3]), + 'status': r[4], + 'statusmsg': self.manager.statusMsg[r[4]], + 'error': r[5], + 'plugin': r[6], + 'package': r[7], + 'order': r[8] + } + + return data + + + @style.async + def updateLink(self, f): + self.c.execute('UPDATE links SET url=?,name=?,size=?,status=?,error=?,package=? WHERE id=?', (f.url, f.name, f.size, f.status, f.error, str(f.packageid), str(f.id))) + + @style.queue + def updatePackage(self, p): + self.c.execute('UPDATE packages SET name=?,folder=?,site=?,password=?,queue=?,priority=? WHERE id=?', (p.name, p.folder, p.site, p.password, p.queue, p.priority, str(p.id))) + + @style.queue + def updateLinkInfo(self, data): + """ data is list of tupels (name, size, status, url) """ + self.c.executemany('UPDATE links SET name=?, size=?, status=? WHERE url=? AND status NOT IN (0,8,12,13)', data) + ids = [] + self.c.execute('SELECT id FROM links WHERE url IN (\'%s\')' % "','".join([x[3] for x in data])) + for r in self.c: + ids.append(int(r[0])) + return ids + + @style.queue + def reorderPackage(self, p, position, noMove=False): + if position == -1: + position = self._nextPackageOrder(p.queue) + if not noMove: + if p.order > position: + self.c.execute('UPDATE packages SET packageorder=packageorder+1 WHERE packageorder >= ? AND packageorder < ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue)) + elif p.order < position: + self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder <= ? AND packageorder > ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue)) + + self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (position, str(p.id))) + + @style.queue + def reorderLink(self, f, position): + """ reorder link with f as dict for pyfile """ + if f["order"] > position: + self.c.execute('UPDATE links SET linkorder=linkorder+1 WHERE linkorder >= ? AND linkorder < ? AND package=?', (position, f["order"], f["package"])) + elif f["order"] < position: + self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder <= ? AND linkorder > ? AND package=?', (position, f["order"], f["package"])) + + self.c.execute('UPDATE links SET linkorder=? WHERE id=?', (position, f["id"])) + + + @style.queue + def clearPackageOrder(self, p): + self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (-1, str(p.id))) + self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=? AND id != ?', (p.order, p.queue, str(p.id))) + + @style.async + def restartFile(self, id): + self.c.execute('UPDATE links SET status=3,error="" WHERE id=?', (str(id),)) + + @style.async + def restartPackage(self, id): + self.c.execute('UPDATE links SET status=3 WHERE package=?', (str(id),)) + + @style.async + def syncSave(self): + self.commit() + + @style.queue + def getPackage(self, id): + """return package instance from id""" + self.c.execute("SELECT name,folder,site,password,queue,packageorder,priority FROM packages WHERE id=?", (str(id), )) + r = self.c.fetchone() + if not r: return None + return PyPackage(self.manager, id, * r) + + #---------------------------------------------------------------------- + @style.queue + def getFile(self, id): + """return link instance from id""" + self.c.execute("SELECT url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?", (str(id), )) + r = self.c.fetchone() + if not r: return None + return PyFile(self.manager, id, * r) + + + @style.queue + def getJob(self, occ): + """return pyfile ids, which are suitable for download and dont use a occupied plugin""" + + #@TODO improve this hardcoded method + pre = "('DLC', 'LinkList', 'SerienjunkiesOrg', 'CCF', 'RSDF')" #plugins which are processed in collector + + cmd = "(" + for i, item in enumerate(occ): + if i: cmd += ", " + cmd += "'%s'" % item + + cmd += ")" + + cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE ((p.queue=1 AND l.plugin NOT IN %s) OR l.plugin IN %s) AND l.status IN (2,3,6,14) ORDER BY p.priority DESC, p.packageorder ASC, l.linkorder ASC LIMIT 5" % (cmd, pre) + + self.c.execute(cmd) # very bad! + + return [x[0] for x in self.c] + + @style.queue + def getPluginJob(self, plugins): + """returns pyfile ids with suited plugins""" + cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE l.plugin IN %s AND l.status IN (2,3,6,14) ORDER BY p.priority DESC, p.packageorder ASC, l.linkorder ASC LIMIT 5" % plugins + + self.c.execute(cmd) # very bad! + + return [x[0] for x in self.c] + + @style.queue + def getUnfinished(self, pid): + """return list of max length 3 ids with pyfiles in package not finished or processed""" + + self.c.execute("SELECT id FROM links WHERE package=? AND status NOT IN (0, 13) LIMIT 3", (str(pid),)) + return [r[0] for r in self.c] + + @style.queue + def deleteFinished(self): + self.c.execute("DELETE FROM links WHERE status=0") + self.c.execute("DELETE FROM packages WHERE NOT EXISTS(SELECT 1 FROM links WHERE packages.id=links.package)") + + + @style.queue + def restartFailed(self): + self.c.execute("UPDATE links SET status=3,error='' WHERE status IN (8, 9)") + +DatabaseBackend.registerSub(FileMethods) + +if __name__ == "__main__": + + pypath = "." + _ = lambda x: x + + db = FileHandler(None) + + #p = PyFile(db, 5) + #sleep(0.1) + + a = time() + + #print db.addPackage("package", "folder" , 1) + + pack = db.db.addPackage("package", "folder", 1) + + updates = [] + + + for x in range(0, 200): + x = str(x) + db.db.addLink("http://somehost.com/hoster/file/download?file_id=" + x, x, "BasePlugin", pack) + updates.append(("new name" + x, 0, 3, "http://somehost.com/hoster/file/download?file_id=" + x)) + + + for x in range(0, 100): + updates.append(("unimportant%s" % x, 0, 3, "a really long non existent url%s" % x)) + + db.db.commit() + + b = time() + print "adding 200 links, single sql execs, no commit", b-a + + print db.getCompleteData(1) + + c = time() + + + db.db.updateLinkInfo(updates) + + d = time() + + print "updates", d-c + + print db.getCompleteData(1) + + + e = time() + + print "complete data", e-d diff --git a/module/database/StorageDatabase.py b/module/database/StorageDatabase.py new file mode 100644 index 000000000..dc3135c30 --- /dev/null +++ b/module/database/StorageDatabase.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: mkaay +""" + +from module.database import style +from module.database import DatabaseBackend + +class StorageMethods(): + @style.queue + def setStorage(db, identifier, key, value): + db.c.execute("SELECT id FROM storage WHERE identifier=? AND key=?", (identifier, key)) + if db.c.fetchone() is not None: + db.c.execute("UPDATE storage SET value=? WHERE identifier=? AND key=?", (value, identifier, key)) + else: + db.c.execute("INSERT INTO storage (identifier, key, value) VALUES (?, ?, ?)", (identifier, key, value)) + + @style.queue + def getStorage(db, identifier, key=None): + if key is not None: + db.c.execute("SELECT value FROM storage WHERE identifier=? AND key=?", (identifier, key)) + row = db.c.fetchone() + if row is not None: + return row[0] + else: + db.c.execute("SELECT key, value FROM storage WHERE identifier=?", (identifier, )) + d = {} + if row in db.c: + d[row[0]] = row[1] + return d + + @style.queue + def delStorage(db, identifier, key): + db.c.execute("DELETE FROM storage WHERE identifier=? AND key=?", (identifier, key)) + +DatabaseBackend.registerSub(StorageMethods) diff --git a/module/database/UserDatabase.py b/module/database/UserDatabase.py new file mode 100644 index 000000000..a69dfff0e --- /dev/null +++ b/module/database/UserDatabase.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +""" + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 3 of the License, + or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, see . + + @author: mkaay +""" + +from DatabaseBackend import DatabaseBackend +from DatabaseBackend import style + +from hashlib import sha1 +import random + +class UserMethods(): + @style.queue + def checkAuth(db, user, password): + c = db.c + c.execute('SELECT name, password, role, permission, template FROM "users" WHERE name=?', (user, )) + r = c.fetchone() + if not r: + return {} + + salt = r[1][:5] + pw = r[1][5:] + h = sha1(salt + password) + if h.hexdigest() == pw: + return {"name": r[0], "role": r[2], "permission": r[3], "template": r[4]} + else: + return {} + + @style.queue + def addUser(db, user, password): + salt = reduce(lambda x, y: x + y, [str(random.randint(0, 9)) for i in range(0, 5)]) + h = sha1(salt + password) + password = salt + h.hexdigest() + + c = db.c + c.execute('SELECT name FROM users WHERE name=?', (user, )) + if c.fetchone() is not None: + c.execute('UPDATE users SET password=? WHERE name=?', (password, user)) + else: + c.execute('INSERT INTO users (name, password) VALUES (?, ?)', (user, password)) + + @style.queue + def listUsers(db): + c = db.c + c.execute('SELECT name FROM users') + users = [] + for row in c.fetchall(): + users.append(row[0]) + return users + + @style.queue + def removeUser(db, user): + c = db.c + c.execute('SELECT name FROM users WHERE name=?', (user, )) + if c.fetchone() is not None: + c.execute('DELETE FROM users WHERE name=?', (user, )) + + +DatabaseBackend.registerSub(UserMethods) diff --git a/module/lib/thrift/protocol/fastbinary.c b/module/lib/thrift/protocol/fastbinary.c deleted file mode 100644 index 67b215a83..000000000 --- a/module/lib/thrift/protocol/fastbinary.c +++ /dev/null @@ -1,1203 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#include -#include "cStringIO.h" -#include -#include -#include - -/* Fix endianness issues on Solaris */ -#if defined (__SVR4) && defined (__sun) - #if defined(__i386) && !defined(__i386__) - #define __i386__ - #endif - - #ifndef BIG_ENDIAN - #define BIG_ENDIAN (4321) - #endif - #ifndef LITTLE_ENDIAN - #define LITTLE_ENDIAN (1234) - #endif - - /* I386 is LE, even on Solaris */ - #if !defined(BYTE_ORDER) && defined(__i386__) - #define BYTE_ORDER LITTLE_ENDIAN - #endif -#endif - -// TODO(dreiss): defval appears to be unused. Look into removing it. -// TODO(dreiss): Make parse_spec_args recursive, and cache the output -// permanently in the object. (Malloc and orphan.) -// TODO(dreiss): Why do we need cStringIO for reading, why not just char*? -// Can cStringIO let us work with a BufferedTransport? -// TODO(dreiss): Don't ignore the rv from cwrite (maybe). - -/* ====== BEGIN UTILITIES ====== */ - -#define INIT_OUTBUF_SIZE 128 - -// Stolen out of TProtocol.h. -// It would be a huge pain to have both get this from one place. -typedef enum TType { - T_STOP = 0, - T_VOID = 1, - T_BOOL = 2, - T_BYTE = 3, - T_I08 = 3, - T_I16 = 6, - T_I32 = 8, - T_U64 = 9, - T_I64 = 10, - T_DOUBLE = 4, - T_STRING = 11, - T_UTF7 = 11, - T_STRUCT = 12, - T_MAP = 13, - T_SET = 14, - T_LIST = 15, - T_UTF8 = 16, - T_UTF16 = 17 -} TType; - -#ifndef __BYTE_ORDER -# if defined(BYTE_ORDER) && defined(LITTLE_ENDIAN) && defined(BIG_ENDIAN) -# define __BYTE_ORDER BYTE_ORDER -# define __LITTLE_ENDIAN LITTLE_ENDIAN -# define __BIG_ENDIAN BIG_ENDIAN -# else -# error "Cannot determine endianness" -# endif -#endif - -// Same comment as the enum. Sorry. -#if __BYTE_ORDER == __BIG_ENDIAN -# define ntohll(n) (n) -# define htonll(n) (n) -#elif __BYTE_ORDER == __LITTLE_ENDIAN -# if defined(__GNUC__) && defined(__GLIBC__) -# include -# define ntohll(n) bswap_64(n) -# define htonll(n) bswap_64(n) -# else /* GNUC & GLIBC */ -# define ntohll(n) ( (((unsigned long long)ntohl(n)) << 32) + ntohl(n >> 32) ) -# define htonll(n) ( (((unsigned long long)htonl(n)) << 32) + htonl(n >> 32) ) -# endif /* GNUC & GLIBC */ -#else /* __BYTE_ORDER */ -# error "Can't define htonll or ntohll!" -#endif - -// Doing a benchmark shows that interning actually makes a difference, amazingly. -#define INTERN_STRING(value) _intern_ ## value - -#define INT_CONV_ERROR_OCCURRED(v) ( ((v) == -1) && PyErr_Occurred() ) -#define CHECK_RANGE(v, min, max) ( ((v) <= (max)) && ((v) >= (min)) ) - -// Py_ssize_t was not defined before Python 2.5 -#if (PY_VERSION_HEX < 0x02050000) -typedef int Py_ssize_t; -#endif - -/** - * A cache of the spec_args for a set or list, - * so we don't have to keep calling PyTuple_GET_ITEM. - */ -typedef struct { - TType element_type; - PyObject* typeargs; -} SetListTypeArgs; - -/** - * A cache of the spec_args for a map, - * so we don't have to keep calling PyTuple_GET_ITEM. - */ -typedef struct { - TType ktag; - TType vtag; - PyObject* ktypeargs; - PyObject* vtypeargs; -} MapTypeArgs; - -/** - * A cache of the spec_args for a struct, - * so we don't have to keep calling PyTuple_GET_ITEM. - */ -typedef struct { - PyObject* klass; - PyObject* spec; -} StructTypeArgs; - -/** - * A cache of the item spec from a struct specification, - * so we don't have to keep calling PyTuple_GET_ITEM. - */ -typedef struct { - int tag; - TType type; - PyObject* attrname; - PyObject* typeargs; - PyObject* defval; -} StructItemSpec; - -/** - * A cache of the two key attributes of a CReadableTransport, - * so we don't have to keep calling PyObject_GetAttr. - */ -typedef struct { - PyObject* stringiobuf; - PyObject* refill_callable; -} DecodeBuffer; - -/** Pointer to interned string to speed up attribute lookup. */ -static PyObject* INTERN_STRING(cstringio_buf); -/** Pointer to interned string to speed up attribute lookup. */ -static PyObject* INTERN_STRING(cstringio_refill); - -static inline bool -check_ssize_t_32(Py_ssize_t len) { - // error from getting the int - if (INT_CONV_ERROR_OCCURRED(len)) { - return false; - } - if (!CHECK_RANGE(len, 0, INT32_MAX)) { - PyErr_SetString(PyExc_OverflowError, "string size out of range"); - return false; - } - return true; -} - -static inline bool -parse_pyint(PyObject* o, int32_t* ret, int32_t min, int32_t max) { - long val = PyInt_AsLong(o); - - if (INT_CONV_ERROR_OCCURRED(val)) { - return false; - } - if (!CHECK_RANGE(val, min, max)) { - PyErr_SetString(PyExc_OverflowError, "int out of range"); - return false; - } - - *ret = (int32_t) val; - return true; -} - - -/* --- FUNCTIONS TO PARSE STRUCT SPECIFICATOINS --- */ - -static bool -parse_set_list_args(SetListTypeArgs* dest, PyObject* typeargs) { - if (PyTuple_Size(typeargs) != 2) { - PyErr_SetString(PyExc_TypeError, "expecting tuple of size 2 for list/set type args"); - return false; - } - - dest->element_type = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 0)); - if (INT_CONV_ERROR_OCCURRED(dest->element_type)) { - return false; - } - - dest->typeargs = PyTuple_GET_ITEM(typeargs, 1); - - return true; -} - -static bool -parse_map_args(MapTypeArgs* dest, PyObject* typeargs) { - if (PyTuple_Size(typeargs) != 4) { - PyErr_SetString(PyExc_TypeError, "expecting 4 arguments for typeargs to map"); - return false; - } - - dest->ktag = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 0)); - if (INT_CONV_ERROR_OCCURRED(dest->ktag)) { - return false; - } - - dest->vtag = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 2)); - if (INT_CONV_ERROR_OCCURRED(dest->vtag)) { - return false; - } - - dest->ktypeargs = PyTuple_GET_ITEM(typeargs, 1); - dest->vtypeargs = PyTuple_GET_ITEM(typeargs, 3); - - return true; -} - -static bool -parse_struct_args(StructTypeArgs* dest, PyObject* typeargs) { - if (PyTuple_Size(typeargs) != 2) { - PyErr_SetString(PyExc_TypeError, "expecting tuple of size 2 for struct args"); - return false; - } - - dest->klass = PyTuple_GET_ITEM(typeargs, 0); - dest->spec = PyTuple_GET_ITEM(typeargs, 1); - - return true; -} - -static int -parse_struct_item_spec(StructItemSpec* dest, PyObject* spec_tuple) { - - // i'd like to use ParseArgs here, but it seems to be a bottleneck. - if (PyTuple_Size(spec_tuple) != 5) { - PyErr_SetString(PyExc_TypeError, "expecting 5 arguments for spec tuple"); - return false; - } - - dest->tag = PyInt_AsLong(PyTuple_GET_ITEM(spec_tuple, 0)); - if (INT_CONV_ERROR_OCCURRED(dest->tag)) { - return false; - } - - dest->type = PyInt_AsLong(PyTuple_GET_ITEM(spec_tuple, 1)); - if (INT_CONV_ERROR_OCCURRED(dest->type)) { - return false; - } - - dest->attrname = PyTuple_GET_ITEM(spec_tuple, 2); - dest->typeargs = PyTuple_GET_ITEM(spec_tuple, 3); - dest->defval = PyTuple_GET_ITEM(spec_tuple, 4); - return true; -} - -/* ====== END UTILITIES ====== */ - - -/* ====== BEGIN WRITING FUNCTIONS ====== */ - -/* --- LOW-LEVEL WRITING FUNCTIONS --- */ - -static void writeByte(PyObject* outbuf, int8_t val) { - int8_t net = val; - PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int8_t)); -} - -static void writeI16(PyObject* outbuf, int16_t val) { - int16_t net = (int16_t)htons(val); - PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int16_t)); -} - -static void writeI32(PyObject* outbuf, int32_t val) { - int32_t net = (int32_t)htonl(val); - PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int32_t)); -} - -static void writeI64(PyObject* outbuf, int64_t val) { - int64_t net = (int64_t)htonll(val); - PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int64_t)); -} - -static void writeDouble(PyObject* outbuf, double dub) { - // Unfortunately, bitwise_cast doesn't work in C. Bad C! - union { - double f; - int64_t t; - } transfer; - transfer.f = dub; - writeI64(outbuf, transfer.t); -} - - -/* --- MAIN RECURSIVE OUTPUT FUCNTION -- */ - -static int -output_val(PyObject* output, PyObject* value, TType type, PyObject* typeargs) { - /* - * Refcounting Strategy: - * - * We assume that elements of the thrift_spec tuple are not going to be - * mutated, so we don't ref count those at all. Other than that, we try to - * keep a reference to all the user-created objects while we work with them. - * output_val assumes that a reference is already held. The *caller* is - * responsible for handling references - */ - - switch (type) { - - case T_BOOL: { - int v = PyObject_IsTrue(value); - if (v == -1) { - return false; - } - - writeByte(output, (int8_t) v); - break; - } - case T_I08: { - int32_t val; - - if (!parse_pyint(value, &val, INT8_MIN, INT8_MAX)) { - return false; - } - - writeByte(output, (int8_t) val); - break; - } - case T_I16: { - int32_t val; - - if (!parse_pyint(value, &val, INT16_MIN, INT16_MAX)) { - return false; - } - - writeI16(output, (int16_t) val); - break; - } - case T_I32: { - int32_t val; - - if (!parse_pyint(value, &val, INT32_MIN, INT32_MAX)) { - return false; - } - - writeI32(output, val); - break; - } - case T_I64: { - int64_t nval = PyLong_AsLongLong(value); - - if (INT_CONV_ERROR_OCCURRED(nval)) { - return false; - } - - if (!CHECK_RANGE(nval, INT64_MIN, INT64_MAX)) { - PyErr_SetString(PyExc_OverflowError, "int out of range"); - return false; - } - - writeI64(output, nval); - break; - } - - case T_DOUBLE: { - double nval = PyFloat_AsDouble(value); - if (nval == -1.0 && PyErr_Occurred()) { - return false; - } - - writeDouble(output, nval); - break; - } - - case T_STRING: { - Py_ssize_t len = PyString_Size(value); - - if (!check_ssize_t_32(len)) { - return false; - } - - writeI32(output, (int32_t) len); - PycStringIO->cwrite(output, PyString_AsString(value), (int32_t) len); - break; - } - - case T_LIST: - case T_SET: { - Py_ssize_t len; - SetListTypeArgs parsedargs; - PyObject *item; - PyObject *iterator; - - if (!parse_set_list_args(&parsedargs, typeargs)) { - return false; - } - - len = PyObject_Length(value); - - if (!check_ssize_t_32(len)) { - return false; - } - - writeByte(output, parsedargs.element_type); - writeI32(output, (int32_t) len); - - iterator = PyObject_GetIter(value); - if (iterator == NULL) { - return false; - } - - while ((item = PyIter_Next(iterator))) { - if (!output_val(output, item, parsedargs.element_type, parsedargs.typeargs)) { - Py_DECREF(item); - Py_DECREF(iterator); - return false; - } - Py_DECREF(item); - } - - Py_DECREF(iterator); - - if (PyErr_Occurred()) { - return false; - } - - break; - } - - case T_MAP: { - PyObject *k, *v; - Py_ssize_t pos = 0; - Py_ssize_t len; - - MapTypeArgs parsedargs; - - len = PyDict_Size(value); - if (!check_ssize_t_32(len)) { - return false; - } - - if (!parse_map_args(&parsedargs, typeargs)) { - return false; - } - - writeByte(output, parsedargs.ktag); - writeByte(output, parsedargs.vtag); - writeI32(output, len); - - // TODO(bmaurer): should support any mapping, not just dicts - while (PyDict_Next(value, &pos, &k, &v)) { - // TODO(dreiss): Think hard about whether these INCREFs actually - // turn any unsafe scenarios into safe scenarios. - Py_INCREF(k); - Py_INCREF(v); - - if (!output_val(output, k, parsedargs.ktag, parsedargs.ktypeargs) - || !output_val(output, v, parsedargs.vtag, parsedargs.vtypeargs)) { - Py_DECREF(k); - Py_DECREF(v); - return false; - } - Py_DECREF(k); - Py_DECREF(v); - } - break; - } - - // TODO(dreiss): Consider breaking this out as a function - // the way we did for decode_struct. - case T_STRUCT: { - StructTypeArgs parsedargs; - Py_ssize_t nspec; - Py_ssize_t i; - - if (!parse_struct_args(&parsedargs, typeargs)) { - return false; - } - - nspec = PyTuple_Size(parsedargs.spec); - - if (nspec == -1) { - return false; - } - - for (i = 0; i < nspec; i++) { - StructItemSpec parsedspec; - PyObject* spec_tuple; - PyObject* instval = NULL; - - spec_tuple = PyTuple_GET_ITEM(parsedargs.spec, i); - if (spec_tuple == Py_None) { - continue; - } - - if (!parse_struct_item_spec (&parsedspec, spec_tuple)) { - return false; - } - - instval = PyObject_GetAttr(value, parsedspec.attrname); - - if (!instval) { - return false; - } - - if (instval == Py_None) { - Py_DECREF(instval); - continue; - } - - writeByte(output, (int8_t) parsedspec.type); - writeI16(output, parsedspec.tag); - - if (!output_val(output, instval, parsedspec.type, parsedspec.typeargs)) { - Py_DECREF(instval); - return false; - } - - Py_DECREF(instval); - } - - writeByte(output, (int8_t)T_STOP); - break; - } - - case T_STOP: - case T_VOID: - case T_UTF16: - case T_UTF8: - case T_U64: - default: - PyErr_SetString(PyExc_TypeError, "Unexpected TType"); - return false; - - } - - return true; -} - - -/* --- TOP-LEVEL WRAPPER FOR OUTPUT -- */ - -static PyObject * -encode_binary(PyObject *self, PyObject *args) { - PyObject* enc_obj; - PyObject* type_args; - PyObject* buf; - PyObject* ret = NULL; - - if (!PyArg_ParseTuple(args, "OO", &enc_obj, &type_args)) { - return NULL; - } - - buf = PycStringIO->NewOutput(INIT_OUTBUF_SIZE); - if (output_val(buf, enc_obj, T_STRUCT, type_args)) { - ret = PycStringIO->cgetvalue(buf); - } - - Py_DECREF(buf); - return ret; -} - -/* ====== END WRITING FUNCTIONS ====== */ - - -/* ====== BEGIN READING FUNCTIONS ====== */ - -/* --- LOW-LEVEL READING FUNCTIONS --- */ - -static void -free_decodebuf(DecodeBuffer* d) { - Py_XDECREF(d->stringiobuf); - Py_XDECREF(d->refill_callable); -} - -static bool -decode_buffer_from_obj(DecodeBuffer* dest, PyObject* obj) { - dest->stringiobuf = PyObject_GetAttr(obj, INTERN_STRING(cstringio_buf)); - if (!dest->stringiobuf) { - return false; - } - - if (!PycStringIO_InputCheck(dest->stringiobuf)) { - free_decodebuf(dest); - PyErr_SetString(PyExc_TypeError, "expecting stringio input"); - return false; - } - - dest->refill_callable = PyObject_GetAttr(obj, INTERN_STRING(cstringio_refill)); - - if(!dest->refill_callable) { - free_decodebuf(dest); - return false; - } - - if (!PyCallable_Check(dest->refill_callable)) { - free_decodebuf(dest); - PyErr_SetString(PyExc_TypeError, "expecting callable"); - return false; - } - - return true; -} - -static bool readBytes(DecodeBuffer* input, char** output, int len) { - int read; - - // TODO(dreiss): Don't fear the malloc. Think about taking a copy of - // the partial read instead of forcing the transport - // to prepend it to its buffer. - - read = PycStringIO->cread(input->stringiobuf, output, len); - - if (read == len) { - return true; - } else if (read == -1) { - return false; - } else { - PyObject* newiobuf; - - // using building functions as this is a rare codepath - newiobuf = PyObject_CallFunction( - input->refill_callable, "s#i", *output, read, len, NULL); - if (newiobuf == NULL) { - return false; - } - - // must do this *AFTER* the call so that we don't deref the io buffer - Py_CLEAR(input->stringiobuf); - input->stringiobuf = newiobuf; - - read = PycStringIO->cread(input->stringiobuf, output, len); - - if (read == len) { - return true; - } else if (read == -1) { - return false; - } else { - // TODO(dreiss): This could be a valid code path for big binary blobs. - PyErr_SetString(PyExc_TypeError, - "refill claimed to have refilled the buffer, but didn't!!"); - return false; - } - } -} - -static int8_t readByte(DecodeBuffer* input) { - char* buf; - if (!readBytes(input, &buf, sizeof(int8_t))) { - return -1; - } - - return *(int8_t*) buf; -} - -static int16_t readI16(DecodeBuffer* input) { - char* buf; - if (!readBytes(input, &buf, sizeof(int16_t))) { - return -1; - } - - return (int16_t) ntohs(*(int16_t*) buf); -} - -static int32_t readI32(DecodeBuffer* input) { - char* buf; - if (!readBytes(input, &buf, sizeof(int32_t))) { - return -1; - } - return (int32_t) ntohl(*(int32_t*) buf); -} - - -static int64_t readI64(DecodeBuffer* input) { - char* buf; - if (!readBytes(input, &buf, sizeof(int64_t))) { - return -1; - } - - return (int64_t) ntohll(*(int64_t*) buf); -} - -static double readDouble(DecodeBuffer* input) { - union { - int64_t f; - double t; - } transfer; - - transfer.f = readI64(input); - if (transfer.f == -1) { - return -1; - } - return transfer.t; -} - -static bool -checkTypeByte(DecodeBuffer* input, TType expected) { - TType got = readByte(input); - if (INT_CONV_ERROR_OCCURRED(got)) { - return false; - } - - if (expected != got) { - PyErr_SetString(PyExc_TypeError, "got wrong ttype while reading field"); - return false; - } - return true; -} - -static bool -skip(DecodeBuffer* input, TType type) { -#define SKIPBYTES(n) \ - do { \ - if (!readBytes(input, &dummy_buf, (n))) { \ - return false; \ - } \ - } while(0) - - char* dummy_buf; - - switch (type) { - - case T_BOOL: - case T_I08: SKIPBYTES(1); break; - case T_I16: SKIPBYTES(2); break; - case T_I32: SKIPBYTES(4); break; - case T_I64: - case T_DOUBLE: SKIPBYTES(8); break; - - case T_STRING: { - // TODO(dreiss): Find out if these check_ssize_t32s are really necessary. - int len = readI32(input); - if (!check_ssize_t_32(len)) { - return false; - } - SKIPBYTES(len); - break; - } - - case T_LIST: - case T_SET: { - TType etype; - int len, i; - - etype = readByte(input); - if (etype == -1) { - return false; - } - - len = readI32(input); - if (!check_ssize_t_32(len)) { - return false; - } - - for (i = 0; i < len; i++) { - if (!skip(input, etype)) { - return false; - } - } - break; - } - - case T_MAP: { - TType ktype, vtype; - int len, i; - - ktype = readByte(input); - if (ktype == -1) { - return false; - } - - vtype = readByte(input); - if (vtype == -1) { - return false; - } - - len = readI32(input); - if (!check_ssize_t_32(len)) { - return false; - } - - for (i = 0; i < len; i++) { - if (!(skip(input, ktype) && skip(input, vtype))) { - return false; - } - } - break; - } - - case T_STRUCT: { - while (true) { - TType type; - - type = readByte(input); - if (type == -1) { - return false; - } - - if (type == T_STOP) - break; - - SKIPBYTES(2); // tag - if (!skip(input, type)) { - return false; - } - } - break; - } - - case T_STOP: - case T_VOID: - case T_UTF16: - case T_UTF8: - case T_U64: - default: - PyErr_SetString(PyExc_TypeError, "Unexpected TType"); - return false; - - } - - return true; - -#undef SKIPBYTES -} - - -/* --- HELPER FUNCTION FOR DECODE_VAL --- */ - -static PyObject* -decode_val(DecodeBuffer* input, TType type, PyObject* typeargs); - -static bool -decode_struct(DecodeBuffer* input, PyObject* output, PyObject* spec_seq) { - int spec_seq_len = PyTuple_Size(spec_seq); - if (spec_seq_len == -1) { - return false; - } - - while (true) { - TType type; - int16_t tag; - PyObject* item_spec; - PyObject* fieldval = NULL; - StructItemSpec parsedspec; - - type = readByte(input); - if (type == -1) { - return false; - } - if (type == T_STOP) { - break; - } - tag = readI16(input); - if (INT_CONV_ERROR_OCCURRED(tag)) { - return false; - } - if (tag >= 0 && tag < spec_seq_len) { - item_spec = PyTuple_GET_ITEM(spec_seq, tag); - } else { - item_spec = Py_None; - } - - if (item_spec == Py_None) { - if (!skip(input, type)) { - return false; - } else { - continue; - } - } - - if (!parse_struct_item_spec(&parsedspec, item_spec)) { - return false; - } - if (parsedspec.type != type) { - if (!skip(input, type)) { - PyErr_SetString(PyExc_TypeError, "struct field had wrong type while reading and can't be skipped"); - return false; - } else { - continue; - } - } - - fieldval = decode_val(input, parsedspec.type, parsedspec.typeargs); - if (fieldval == NULL) { - return false; - } - - if (PyObject_SetAttr(output, parsedspec.attrname, fieldval) == -1) { - Py_DECREF(fieldval); - return false; - } - Py_DECREF(fieldval); - } - return true; -} - - -/* --- MAIN RECURSIVE INPUT FUCNTION --- */ - -// Returns a new reference. -static PyObject* -decode_val(DecodeBuffer* input, TType type, PyObject* typeargs) { - switch (type) { - - case T_BOOL: { - int8_t v = readByte(input); - if (INT_CONV_ERROR_OCCURRED(v)) { - return NULL; - } - - switch (v) { - case 0: Py_RETURN_FALSE; - case 1: Py_RETURN_TRUE; - // Don't laugh. This is a potentially serious issue. - default: PyErr_SetString(PyExc_TypeError, "boolean out of range"); return NULL; - } - break; - } - case T_I08: { - int8_t v = readByte(input); - if (INT_CONV_ERROR_OCCURRED(v)) { - return NULL; - } - - return PyInt_FromLong(v); - } - case T_I16: { - int16_t v = readI16(input); - if (INT_CONV_ERROR_OCCURRED(v)) { - return NULL; - } - return PyInt_FromLong(v); - } - case T_I32: { - int32_t v = readI32(input); - if (INT_CONV_ERROR_OCCURRED(v)) { - return NULL; - } - return PyInt_FromLong(v); - } - - case T_I64: { - int64_t v = readI64(input); - if (INT_CONV_ERROR_OCCURRED(v)) { - return NULL; - } - // TODO(dreiss): Find out if we can take this fastpath always when - // sizeof(long) == sizeof(long long). - if (CHECK_RANGE(v, LONG_MIN, LONG_MAX)) { - return PyInt_FromLong((long) v); - } - - return PyLong_FromLongLong(v); - } - - case T_DOUBLE: { - double v = readDouble(input); - if (v == -1.0 && PyErr_Occurred()) { - return false; - } - return PyFloat_FromDouble(v); - } - - case T_STRING: { - Py_ssize_t len = readI32(input); - char* buf; - if (!readBytes(input, &buf, len)) { - return NULL; - } - - return PyString_FromStringAndSize(buf, len); - } - - case T_LIST: - case T_SET: { - SetListTypeArgs parsedargs; - int32_t len; - PyObject* ret = NULL; - int i; - - if (!parse_set_list_args(&parsedargs, typeargs)) { - return NULL; - } - - if (!checkTypeByte(input, parsedargs.element_type)) { - return NULL; - } - - len = readI32(input); - if (!check_ssize_t_32(len)) { - return NULL; - } - - ret = PyList_New(len); - if (!ret) { - return NULL; - } - - for (i = 0; i < len; i++) { - PyObject* item = decode_val(input, parsedargs.element_type, parsedargs.typeargs); - if (!item) { - Py_DECREF(ret); - return NULL; - } - PyList_SET_ITEM(ret, i, item); - } - - // TODO(dreiss): Consider biting the bullet and making two separate cases - // for list and set, avoiding this post facto conversion. - if (type == T_SET) { - PyObject* setret; -#if (PY_VERSION_HEX < 0x02050000) - // hack needed for older versions - setret = PyObject_CallFunctionObjArgs((PyObject*)&PySet_Type, ret, NULL); -#else - // official version - setret = PySet_New(ret); -#endif - Py_DECREF(ret); - return setret; - } - return ret; - } - - case T_MAP: { - int32_t len; - int i; - MapTypeArgs parsedargs; - PyObject* ret = NULL; - - if (!parse_map_args(&parsedargs, typeargs)) { - return NULL; - } - - if (!checkTypeByte(input, parsedargs.ktag)) { - return NULL; - } - if (!checkTypeByte(input, parsedargs.vtag)) { - return NULL; - } - - len = readI32(input); - if (!check_ssize_t_32(len)) { - return false; - } - - ret = PyDict_New(); - if (!ret) { - goto error; - } - - for (i = 0; i < len; i++) { - PyObject* k = NULL; - PyObject* v = NULL; - k = decode_val(input, parsedargs.ktag, parsedargs.ktypeargs); - if (k == NULL) { - goto loop_error; - } - v = decode_val(input, parsedargs.vtag, parsedargs.vtypeargs); - if (v == NULL) { - goto loop_error; - } - if (PyDict_SetItem(ret, k, v) == -1) { - goto loop_error; - } - - Py_DECREF(k); - Py_DECREF(v); - continue; - - // Yuck! Destructors, anyone? - loop_error: - Py_XDECREF(k); - Py_XDECREF(v); - goto error; - } - - return ret; - - error: - Py_XDECREF(ret); - return NULL; - } - - case T_STRUCT: { - StructTypeArgs parsedargs; - if (!parse_struct_args(&parsedargs, typeargs)) { - return NULL; - } - - PyObject* ret = PyObject_CallObject(parsedargs.klass, NULL); - if (!ret) { - return NULL; - } - - if (!decode_struct(input, ret, parsedargs.spec)) { - Py_DECREF(ret); - return NULL; - } - - return ret; - } - - case T_STOP: - case T_VOID: - case T_UTF16: - case T_UTF8: - case T_U64: - default: - PyErr_SetString(PyExc_TypeError, "Unexpected TType"); - return NULL; - } -} - - -/* --- TOP-LEVEL WRAPPER FOR INPUT -- */ - -static PyObject* -decode_binary(PyObject *self, PyObject *args) { - PyObject* output_obj = NULL; - PyObject* transport = NULL; - PyObject* typeargs = NULL; - StructTypeArgs parsedargs; - DecodeBuffer input = {}; - - if (!PyArg_ParseTuple(args, "OOO", &output_obj, &transport, &typeargs)) { - return NULL; - } - - if (!parse_struct_args(&parsedargs, typeargs)) { - return NULL; - } - - if (!decode_buffer_from_obj(&input, transport)) { - return NULL; - } - - if (!decode_struct(&input, output_obj, parsedargs.spec)) { - free_decodebuf(&input); - return NULL; - } - - free_decodebuf(&input); - - Py_RETURN_NONE; -} - -/* ====== END READING FUNCTIONS ====== */ - - -/* -- PYTHON MODULE SETUP STUFF --- */ - -static PyMethodDef ThriftFastBinaryMethods[] = { - - {"encode_binary", encode_binary, METH_VARARGS, ""}, - {"decode_binary", decode_binary, METH_VARARGS, ""}, - - {NULL, NULL, 0, NULL} /* Sentinel */ -}; - -PyMODINIT_FUNC -initfastbinary(void) { -#define INIT_INTERN_STRING(value) \ - do { \ - INTERN_STRING(value) = PyString_InternFromString(#value); \ - if(!INTERN_STRING(value)) return; \ - } while(0) - - INIT_INTERN_STRING(cstringio_buf); - INIT_INTERN_STRING(cstringio_refill); -#undef INIT_INTERN_STRING - - PycString_IMPORT; - if (PycStringIO == NULL) return; - - (void) Py_InitModule("thrift.protocol.fastbinary", ThriftFastBinaryMethods); -} diff --git a/module/remote/pyload.thrift b/module/remote/pyload.thrift index 413a841d7..4fb054cd1 100644 --- a/module/remote/pyload.thrift +++ b/module/remote/pyload.thrift @@ -28,26 +28,6 @@ enum Destination { Collector } -enum CaptchaStatus { - Init, - Waiting, - User, - SharedUser, - Done -} - -enum ConfigItemType { - String, - Password, - Choice, - Bool, - Integer, - IP, - File, - Folder, - Time -} - enum ElementType { Package, File @@ -95,7 +75,7 @@ struct ConfigItem { 1: string name, 2: string description, 3: string value, - 4: ConfigItemType type, + 4: string type, 5: optional set choice } @@ -133,7 +113,7 @@ struct PackageData { } struct CaptchaTask { - 1: TaskID tid, + 1: i16 tid, 2: binary data, 3: string type } @@ -222,7 +202,7 @@ service Pyload { //captcha bool isCaptchaWaiting(), CaptchaTask getCaptchaTask(1: bool exclusive), - CaptchaStatus getCaptchaTaskStatus(1: TaskID tid), + string getCaptchaTaskStatus(1: TaskID tid), void setCaptchaResult(1: TaskID tid, 2: string result), //events diff --git a/module/remote/thriftgen/pyload/Pyload-remote b/module/remote/thriftgen/pyload/Pyload-remote index 67edb2b71..cd8a1d703 100755 --- a/module/remote/thriftgen/pyload/Pyload-remote +++ b/module/remote/thriftgen/pyload/Pyload-remote @@ -66,7 +66,7 @@ if len(sys.argv) <= 1 or sys.argv[1] == '--help': print ' getFileOrder(PackageID pid)' print ' bool isCaptchaWaiting()' print ' CaptchaTask getCaptchaTask(bool exclusive)' - print ' CaptchaStatus getCaptchaTaskStatus(TaskID tid)' + print ' string getCaptchaTaskStatus(TaskID tid)' print ' void setCaptchaResult(TaskID tid, string result)' print ' getEvents()' print ' getAccounts()' diff --git a/module/remote/thriftgen/pyload/Pyload.py b/module/remote/thriftgen/pyload/Pyload.py index b4912a580..032c1baaa 100644 --- a/module/remote/thriftgen/pyload/Pyload.py +++ b/module/remote/thriftgen/pyload/Pyload.py @@ -7344,7 +7344,7 @@ class getCaptchaTaskStatus_result: """ thrift_spec = ( - (0, TType.I32, 'success', None, None, ), # 0 + (0, TType.STRING, 'success', None, None, ), # 0 ) def __init__(self, success=None,): @@ -7360,8 +7360,8 @@ class getCaptchaTaskStatus_result: if ftype == TType.STOP: break if fid == 0: - if ftype == TType.I32: - self.success = iprot.readI32(); + if ftype == TType.STRING: + self.success = iprot.readString(); else: iprot.skip(ftype) else: @@ -7375,8 +7375,8 @@ class getCaptchaTaskStatus_result: return oprot.writeStructBegin('getCaptchaTaskStatus_result') if self.success != None: - oprot.writeFieldBegin('success', TType.I32, 0) - oprot.writeI32(self.success) + oprot.writeFieldBegin('success', TType.STRING, 0) + oprot.writeString(self.success) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() diff --git a/module/remote/thriftgen/pyload/ttypes.py b/module/remote/thriftgen/pyload/ttypes.py index 96d2fd7aa..6c5642e60 100644 --- a/module/remote/thriftgen/pyload/ttypes.py +++ b/module/remote/thriftgen/pyload/ttypes.py @@ -81,64 +81,6 @@ class Destination: "Collector": 1, } -class CaptchaStatus: - Init = 0 - Waiting = 1 - User = 2 - SharedUser = 3 - Done = 4 - - _VALUES_TO_NAMES = { - 0: "Init", - 1: "Waiting", - 2: "User", - 3: "SharedUser", - 4: "Done", - } - - _NAMES_TO_VALUES = { - "Init": 0, - "Waiting": 1, - "User": 2, - "SharedUser": 3, - "Done": 4, - } - -class ConfigItemType: - String = 0 - Password = 1 - Choice = 2 - Bool = 3 - Integer = 4 - IP = 5 - File = 6 - Folder = 7 - Time = 8 - - _VALUES_TO_NAMES = { - 0: "String", - 1: "Password", - 2: "Choice", - 3: "Bool", - 4: "Integer", - 5: "IP", - 6: "File", - 7: "Folder", - 8: "Time", - } - - _NAMES_TO_VALUES = { - "String": 0, - "Password": 1, - "Choice": 2, - "Bool": 3, - "Integer": 4, - "IP": 5, - "File": 6, - "Folder": 7, - "Time": 8, - } - class ElementType: Package = 0 File = 1 @@ -556,7 +498,7 @@ class ConfigItem: (1, TType.STRING, 'name', None, None, ), # 1 (2, TType.STRING, 'description', None, None, ), # 2 (3, TType.STRING, 'value', None, None, ), # 3 - (4, TType.I32, 'type', None, None, ), # 4 + (4, TType.STRING, 'type', None, None, ), # 4 (5, TType.SET, 'choice', (TType.STRING,None), None, ), # 5 ) @@ -592,8 +534,8 @@ class ConfigItem: else: iprot.skip(ftype) elif fid == 4: - if ftype == TType.I32: - self.type = iprot.readI32(); + if ftype == TType.STRING: + self.type = iprot.readString(); else: iprot.skip(ftype) elif fid == 5: @@ -629,8 +571,8 @@ class ConfigItem: oprot.writeString(self.value) oprot.writeFieldEnd() if self.type != None: - oprot.writeFieldBegin('type', TType.I32, 4) - oprot.writeI32(self.type) + oprot.writeFieldBegin('type', TType.STRING, 4) + oprot.writeString(self.type) oprot.writeFieldEnd() if self.choice != None: oprot.writeFieldBegin('choice', TType.SET, 5) @@ -1113,7 +1055,7 @@ class CaptchaTask: thrift_spec = ( None, # 0 - (1, TType.I32, 'tid', None, None, ), # 1 + (1, TType.I16, 'tid', None, None, ), # 1 (2, TType.STRING, 'data', None, None, ), # 2 (3, TType.STRING, 'type', None, None, ), # 3 ) @@ -1133,8 +1075,8 @@ class CaptchaTask: if ftype == TType.STOP: break if fid == 1: - if ftype == TType.I32: - self.tid = iprot.readI32(); + if ftype == TType.I16: + self.tid = iprot.readI16(); else: iprot.skip(ftype) elif fid == 2: @@ -1158,8 +1100,8 @@ class CaptchaTask: return oprot.writeStructBegin('CaptchaTask') if self.tid != None: - oprot.writeFieldBegin('tid', TType.I32, 1) - oprot.writeI32(self.tid) + oprot.writeFieldBegin('tid', TType.I16, 1) + oprot.writeI16(self.tid) oprot.writeFieldEnd() if self.data != None: oprot.writeFieldBegin('data', TType.STRING, 2) diff --git a/module/web/ajax/__init__.py b/module/web/ajax/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/module/web/ajax/models.py b/module/web/ajax/models.py deleted file mode 100644 index 35e0d6486..000000000 --- a/module/web/ajax/models.py +++ /dev/null @@ -1,2 +0,0 @@ - -# Create your models here. diff --git a/module/web/ajax/tests.py b/module/web/ajax/tests.py deleted file mode 100644 index 2247054b3..000000000 --- a/module/web/ajax/tests.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -This file demonstrates two different styles of tests (one doctest and one -unittest). These will both pass when you run "manage.py test". - -Replace these with more appropriate tests for your application. -""" - -from django.test import TestCase - -class SimpleTest(TestCase): - def test_basic_addition(self): - """ - Tests that 1 + 1 always equals 2. - """ - self.failUnlessEqual(1 + 1, 2) - -__test__ = {"doctest": """ -Another way to test that 1 + 1 is equal to 2. - ->>> 1 + 1 == 2 -True -"""} - diff --git a/module/web/ajax/urls.py b/module/web/ajax/urls.py deleted file mode 100644 index ebbf401af..000000000 --- a/module/web/ajax/urls.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -from django.conf.urls.defaults import * - - -urlpatterns = patterns('ajax', - # Example: - # (r'^pyload/', include('pyload.foo.urls')), - - # Uncomment the admin/doc line below and add 'django.contrib.admindocs' - # to INSTALLED_APPS to enable admin documentation: - # (r'^admin/doc/', include('django.contrib.admindocs.urls')), - - # Uncomment the next line to enable the admin: - (r'^add_package$', 'views.add_package'), - (r'^abort_link/(\d+)$', 'views.abort_link'), - (r'^status$', 'views.status'), - (r'^links$', 'views.links'), #currently active links - (r'^queue$', 'views.queue'), - (r'^pause$', 'views.pause'), - (r'^unpause$', 'views.unpause'), - (r'^cancel$', 'views.cancel'), - (r'^packages$', 'views.packages'), - (r'^package/(\d+)$', 'views.package'), - (r'^link/(\d+)$', 'views.link'), - (r'^remove_package/(\d+)$', 'views.remove_package'), - (r'^restart_package/(\d+)$', 'views.restart_package'), - (r'^remove_link/(\d+)$', 'views.remove_link'), - (r'^restart_link/(\d+)$', 'views.restart_link'), - (r'^move_package/(\d+)/(\d+)$', 'views.move_package'), - (r'^set_captcha$', 'views.set_captcha'), - (r'^package_order/([0-9|]+)$', 'views.package_order'), - (r'^link_order/([0-9|]+)$', 'views.link_order'), - (r'edit_package$', 'views.edit_package'), - (r'restart_failed$', 'views.restart_failed'), - (r'delete_finished$', 'views.delete_finished'), - ) \ No newline at end of file diff --git a/module/web/ajax/views.py b/module/web/ajax/views.py deleted file mode 100644 index acceac585..000000000 --- a/module/web/ajax/views.py +++ /dev/null @@ -1,321 +0,0 @@ -# Create your views here. -from os.path import join -import time - -from django.conf import settings -from django.core.serializers import json -from django.http import HttpResponse -from django.http import HttpResponseForbidden -from django.http import HttpResponseServerError -from django.utils import simplejson -from django.utils.translation import ugettext as _ -import base64 - -from traceback import print_exc - -def format_time(seconds): - seconds = int(seconds) - - hours, seconds = divmod(seconds, 3600) - minutes, seconds = divmod(seconds, 60) - return "%.2i:%.2i:%.2i" % (hours, minutes, seconds) - -def get_sort_key(item): - return item["order"] - -def permission(perm): - def _dec(view_func): - def _view(request, * args, ** kwargs): - if request.user.has_perm(perm) and request.user.is_authenticated(): - return view_func(request, * args, ** kwargs) - else: - return HttpResponseForbidden() - - _view.__name__ = view_func.__name__ - _view.__dict__ = view_func.__dict__ - _view.__doc__ = view_func.__doc__ - - return _view - - return _dec - -class JsonResponse(HttpResponse): - def __init__(self, object): - content = simplejson.dumps( - object, indent=2, cls=json.DjangoJSONEncoder, - ensure_ascii=False) - super(JsonResponse, self).__init__( - content)#, content_type='application/json') #@TODO uncomment - self['Cache-Control'] = 'no-cache, must-revalidate' - - -@permission('pyload.can_add') -def add_package(request): - - name = request.POST['add_name'] - queue = int(request.POST['add_dest']) - links = request.POST['add_links'].split("\n") - pw = request.POST.get("add_password", "").strip("\n\r") - - try: - f = request.FILES['add_file'] - - if name is None or name == "": - name = f.name - - fpath = join(settings.PYLOAD.get_conf_val("general","download_folder"), "tmp_"+ f.name) - destination = open(fpath, 'wb') - for chunk in f.chunks(): - destination.write(chunk) - destination.close() - links.insert(0, fpath) - except: - pass - - if name is None or name == "": - return HttpResponseServerError() - - links = map(lambda x: x.strip(), links) - links = filter(lambda x: x != "", links) - - pack = settings.PYLOAD.add_package(name, links, queue) - if pw: - data = {"password": pw} - settings.PYLOAD.set_package_data(pack, data) - - return JsonResponse("success") - -@permission('pyload.can_delete') -def remove_link(request, id): - try: - settings.PYLOAD.del_links([int(id)]) - return JsonResponse("sucess") - except Exception, e: - return HttpResponseServerError() - -@permission('pyload.can_see_dl') -def status(request): - try: - status = settings.PYLOAD.status_server() - status['captcha'] = settings.PYLOAD.is_captcha_waiting() - return JsonResponse(status) - except: - return HttpResponseServerError() - -@permission('pyload.can_see_dl') -def links(request): - try: - links = settings.PYLOAD.status_downloads() - ids = [] - for link in links: - ids.append(link['id']) - - if link['status'] == 12: - link['info'] = "%s @ %s kb/s" % (link['format_eta'], round(link['speed'], 2)) - elif link['status'] == 5: - link['percent'] = 0 - link['size'] = 0 - link['kbleft'] = 0 - link['info'] = _("waiting %s") % link['format_wait'] - else: - link['info'] = "" - - - data = {} - data['links'] = links - data['ids'] = ids - return JsonResponse(data) - except Exception, e: - return HttpResponseServerError() - -@permission('pyload.can_see_dl') -def queue(request): - try: - return JsonResponse(settings.PYLOAD.get_queue()) - - except: - return HttpResponseServerError() - - -@permission('pyload.can_change_satus') -def pause(request): - try: - return JsonResponse(settings.PYLOAD.pause_server()) - - except: - return HttpResponseServerError() - - -@permission('pyload.can_change_status') -def unpause(request): - try: - return JsonResponse(settings.PYLOAD.unpause_server()) - - except: - return HttpResponseServerError() - - -@permission('pyload.can_change_status') -def cancel(request): - try: - return JsonResponse(settings.PYLOAD.stop_downloads()) - except: - return HttpResponseServerError() - -@permission('pyload.can_see_dl') -def packages(request): - try: - data = settings.PYLOAD.get_queue() - - for package in data: - package['links'] = [] - for file in settings.PYLOAD.get_package_files(package['id']): - package['links'].append(settings.PYLOAD.get_file_info(file)) - - return JsonResponse(data) - - except: - return HttpResponseServerError() - -@permission('pyload.can_see_dl') -def package(request, id): - try: - data = settings.PYLOAD.get_package_data(int(id)) - - for pyfile in data["links"].itervalues(): - if pyfile["status"] == 0: - pyfile["icon"] = "status_finished.png" - elif pyfile["status"] in (2,3): - pyfile["icon"] = "status_queue.png" - elif pyfile["status"] in (9,1): - pyfile["icon"] = "status_offline.png" - elif pyfile["status"] == 5: - pyfile["icon"] = "status_waiting.png" - elif pyfile["status"] == 8: - pyfile["icon"] = "status_failed.png" - elif pyfile["status"] in (11,13): - pyfile["icon"] = "status_proc.png" - else: - pyfile["icon"] = "status_downloading.png" - - tmp = data["links"].values() - tmp.sort(key=get_sort_key) - data["links"] = tmp - - return JsonResponse(data) - - except: - print_exc() - return HttpResponseServerError() - -@permission('pyload.can_add') -def package_order(request, ids): - try: - pid, pos = ids.split("|") - settings.PYLOAD.order_package(int(pid), int(pos)) - return JsonResponse("success") - except: - print_exc() - return HttpResponseServerError() - -@permission('pyload.can_add') -def link_order(request, ids): - try: - pid, pos = ids.split("|") - settings.PYLOAD.order_file(int(pid), int(pos)) - return JsonResponse("success") - except: - print_exc() - return HttpResponseServerError() - -@permission('pyload.can_see_dl') -def link(request, id): - try: - data = settings.PYLOAD.get_file_info(int(id)) - return JsonResponse(data) - - except: - return HttpResponseServerError() - -@permission('pyload.can_delete') -def remove_package(request, id): - try: - settings.PYLOAD.del_packages([int(id)]) - return JsonResponse("success") - except Exception, e: - print_exc() - return HttpResponseServerError() - -@permission('pyload.can_add') -def restart_package(request, id): - try: - settings.PYLOAD.restart_package(int(id)) - return JsonResponse("success") - except Exception: - return HttpResponseServerError() - -@permission('pyload.can_add') -def restart_link(request, id): - try: - settings.PYLOAD.restart_file(int(id)) - return JsonResponse("success") - except Exception: - return HttpResponseServerError() - -@permission('pyload.can_delete') -def abort_link(request, id): - try: - settings.PYLOAD.stop_download("link", int(id)) - return JsonResponse("success") - except: - return HttpResponseServerError() - -@permission('pyload.can_add') -def move_package(request, dest, id): - try: - settings.PYLOAD.move_package(int(dest), int(id)) - return JsonResponse("success") - except: - return HttpResponseServerError() - -@permission('pyload.can_add') -def edit_package(request): - try: - id = int(request.POST.get("pack_id")) - data = {"name": request.POST.get("pack_name"), - "folder": request.POST.get("pack_folder"), - "priority": request.POST.get("pack_prio"), - "password": request.POST.get("pack_pws")} - - settings.PYLOAD.set_package_data(id, data) - return JsonResponse("success") - - except: - return HttpResponseServerError() - -@permission('pyload.can_add') -def set_captcha(request): - if request.META['REQUEST_METHOD'] == "POST": - try: - settings.PYLOAD.set_captcha_result(request.POST["cap_id"], request.POST["cap_text"]) - except: - pass - - id, binary, typ = settings.PYLOAD.get_captcha_task() - - if id: - binary = base64.standard_b64encode(str(binary)) - src = "data:image/%s;base64,%s" % (typ, binary) - - return JsonResponse({'captcha': True, 'src': src, 'id': id}) - else: - return JsonResponse({'captcha': False}) - - -@permission('pyload.can_delete') -def delete_finished(request): - return JsonResponse({"del":settings.PYLOAD.delete_finished()}) - -@permission('pyload.can_delete') -def restart_failed(request): - return JsonResponse(settings.PYLOAD.restart_failed()) \ No newline at end of file diff --git a/module/web/cnl/__init__.py b/module/web/cnl/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/module/web/cnl/models.py b/module/web/cnl/models.py deleted file mode 100644 index 71a836239..000000000 --- a/module/web/cnl/models.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.db import models - -# Create your models here. diff --git a/module/web/cnl/tests.py b/module/web/cnl/tests.py deleted file mode 100644 index 2247054b3..000000000 --- a/module/web/cnl/tests.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -This file demonstrates two different styles of tests (one doctest and one -unittest). These will both pass when you run "manage.py test". - -Replace these with more appropriate tests for your application. -""" - -from django.test import TestCase - -class SimpleTest(TestCase): - def test_basic_addition(self): - """ - Tests that 1 + 1 always equals 2. - """ - self.failUnlessEqual(1 + 1, 2) - -__test__ = {"doctest": """ -Another way to test that 1 + 1 is equal to 2. - ->>> 1 + 1 == 2 -True -"""} - diff --git a/module/web/cnl/urls.py b/module/web/cnl/urls.py deleted file mode 100644 index 635e751ca..000000000 --- a/module/web/cnl/urls.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -from django.conf.urls.defaults import * - - -urlpatterns = patterns('cnl', - # Example: - # (r'^pyload/', include('pyload.foo.urls')), - - # Uncomment the admin/doc line below and add 'django.contrib.admindocs' - # to INSTALLED_APPS to enable admin documentation: - # (r'^admin/doc/', include('django.contrib.admindocs.urls')), - - # Uncomment the next line to enable the admin: - (r'^add$', 'views.add'), - (r'^addcrypted$', 'views.addcrypted'), - (r'^addcrypted2$', 'views.addcrypted2'), - (r'^crossdomain\.xml', 'views.crossdomain'), - (r'^jdcheck\.js', 'views.jdcheck'), - (r'^checkSupportForUrl', 'views.checksupport'), - (r'', 'views.flash') - ) diff --git a/module/web/cnl/views.py b/module/web/cnl/views.py deleted file mode 100644 index 8b43bdccc..000000000 --- a/module/web/cnl/views.py +++ /dev/null @@ -1,166 +0,0 @@ -# Create your views here. - - -from os.path import join -import re -from urllib import unquote -from base64 import standard_b64decode -from binascii import unhexlify - -from django.conf import settings -from django.http import HttpResponse -from django.http import HttpResponseServerError - -from django.core.serializers import json -from django.utils import simplejson - -try: - from Crypto.Cipher import AES -except: - pass - -def local_check(function): - def _dec(view_func): - def _view(request, * args, ** kwargs): - if request.META.get('REMOTE_ADDR', "0") in ('127.0.0.1','localhost') or request.META.get('HTTP_HOST','0') == '127.0.0.1:9666': - return view_func(request, * args, ** kwargs) - else: - return HttpResponseServerError() - - _view.__name__ = view_func.__name__ - _view.__dict__ = view_func.__dict__ - _view.__doc__ = view_func.__doc__ - - return _view - - if function is None: - return _dec - else: - return _dec(function) - -class JsonResponse(HttpResponse): - def __init__(self, obj, request): - cb = request.GET.get("callback") - if cb: - obj = {"content": obj} - content = simplejson.dumps(obj, indent=2, cls=json.DjangoJSONEncoder, ensure_ascii=False) - content = "%s(%s)\r\n" % (cb, content) - HttpResponse.__init__(self, content, content_type="application/json") - else: - content = "%s\r\n" % obj - HttpResponse.__init__(self, content, content_type="text/html") - self["Cache-Control"] = "no-cache, must-revalidate" - -@local_check -def flash(request): - return HttpResponse("JDownloader") - -@local_check -def add(request): - package = request.POST.get('referer', 'ClickAndLoad Package') - urls = filter(lambda x: x != "", request.POST['urls'].split("\n")) - - settings.PYLOAD.add_package(package, urls, False) - - return HttpResponse() - -@local_check -def addcrypted(request): - - package = request.POST.get('referer', 'ClickAndLoad Package') - dlc = request.POST['crypted'].replace(" ", "+") - - dlc_path = join(settings.DL_ROOT, package.replace("/", "").replace("\\", "").replace(":", "") + ".dlc") - dlc_file = file(dlc_path, "wb") - dlc_file.write(dlc) - dlc_file.close() - - try: - settings.PYLOAD.add_package(package, [dlc_path], False) - except: - return JsonResponse("", request) - else: - return JsonResponse("success", request) - -@local_check -def addcrypted2(request): - - package = request.POST.get("source", "ClickAndLoad Package") - crypted = request.POST["crypted"] - jk = request.POST["jk"] - - crypted = standard_b64decode(unquote(crypted.replace(" ", "+"))) - if settings.JS: - jk = "%s f()" % jk - jk = settings.JS.eval(jk) - - else: - try: - jk = re.findall(r"return ('|\")(.+)('|\")", jk)[0][1] - except: - ## Test for some known js functions to decode - if jk.find("dec") > -1 and jk.find("org") > -1: - org = re.findall(r"var org = ('|\")([^\"']+)", jk)[0][1] - jk = list(org) - jk.reverse() - jk = "".join(jk) - else: - print "Could not decrypt key, please install py-spidermonkey or ossp-js" - - try: - Key = unhexlify(jk) - except: - print "Could not decrypt key, please install py-spidermonkey or ossp-js" - return JsonResponse("failed", request) - - IV = Key - - obj = AES.new(Key, AES.MODE_CBC, IV) - result = obj.decrypt(crypted).replace("\x00", "").replace("\r","").split("\n") - - result = filter(lambda x: x != "", result) - - try: - settings.PYLOAD.add_package(package, result, False) - except: - return JsonResponse("failed can't add", request) - else: - return JsonResponse("success", request) - -@local_check -def flashgot(request): - if request.META['HTTP_REFERER'] != "http://localhost:9666/flashgot" and request.META['HTTP_REFERER'] != "http://127.0.0.1:9666/flashgot": - return HttpResponseServerError() - - autostart = int(request.POST.get('autostart', 0)) - package = request.POST.get('package', "FlashGot") - urls = filter(lambda x: x != "", request.POST['urls'].split("\n")) - folder = request.POST.get('dir', None) - - settings.PYLOAD.add_package(package, urls, autostart) - - return HttpResponse("") - -@local_check -def crossdomain(request): - rep = "\n" - rep += "\n" - rep += "\n" - rep += "\n" - rep += "" - return HttpResponse(rep) - -@local_check -def checksupport(request): - - url = request.GET.get("url") - res = settings.PYLOAD.checkURLs([url]) - supported = (not res[0][1] is None) - - return JsonResponse(str(supported).lower(), request) - -@local_check -def jdcheck(request): - rep = "jdownloader=true;\n" - rep += "var version='10629';\n" - return HttpResponse(rep) diff --git a/module/web/pyload/__init__.py b/module/web/pyload/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/module/web/pyload/admin.py b/module/web/pyload/admin.py deleted file mode 100644 index 99cb28836..000000000 --- a/module/web/pyload/admin.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -from django.contrib import admin -from models import Prefs -from django.contrib.auth.models import User -from django.contrib.auth.admin import UserAdmin as RealUserAdmin - - -class UserProfileInline(admin.StackedInline): - model = Prefs - -class UserAdmin(RealUserAdmin): - inlines = [ UserProfileInline ] - -admin.site.unregister(User) -admin.site.register(User, UserAdmin) \ No newline at end of file diff --git a/module/web/pyload/models.py b/module/web/pyload/models.py deleted file mode 100644 index 86962f23c..000000000 --- a/module/web/pyload/models.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -from django.db import models -from django.contrib.auth.models import User -# Create your models here. - -class Prefs(models.Model): - """ Permissions setting """ - - user = models.ForeignKey(User, unique=True) - template = models.CharField(max_length=30, default='default', null=False, blank=False) #@TODO: currently unused - - class Meta: - permissions = ( - ('can_see_dl', 'User can see Downloads'), - ('can_change_status', 'User can change Status'), - ('can_download', 'User can download'), - ('can_add', 'User can add Links'), - ('can_delete', 'User can delete Links'), - ('can_see_logs', 'User can see Logs'), - ) - verbose_name = "Preferences" - verbose_name_plural = "Preferences" - - def __unicode__(self): - return "Preferences for %s" % self.user - - -def user_post_save(sender, instance, **kwargs): - profile, new = Prefs.objects.get_or_create(user=instance) - -models.signals.post_save.connect(user_post_save, User) \ No newline at end of file diff --git a/module/web/pyload/templatetags/__init__.py b/module/web/pyload/templatetags/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/module/web/pyload/templatetags/contains.py b/module/web/pyload/templatetags/contains.py deleted file mode 100644 index ed6225a95..000000000 --- a/module/web/pyload/templatetags/contains.py +++ /dev/null @@ -1,14 +0,0 @@ -from django import template -register = template.Library() - -@register.filter() -def contains(value, arg): - """ - Usage: - {% if text|contains:" http://" %} - This is a link. - {% else %} - Not a link. - {% endif %} - """ - return arg in value diff --git a/module/web/pyload/templatetags/quotepath.py b/module/web/pyload/templatetags/quotepath.py deleted file mode 100644 index 8483a1553..000000000 --- a/module/web/pyload/templatetags/quotepath.py +++ /dev/null @@ -1,68 +0,0 @@ -import os - -from django.template.defaultfilters import stringfilter -from django import template - -try: - from os.path import relpath -except: - from posixpath import curdir, sep, pardir, join - def relpath(path, start=curdir): - """Return a relative version of a path""" - if not path: - raise ValueError("no path specified") - start_list = os.path.abspath(start).split(sep) - path_list = os.path.abspath(path).split(sep) - # Work out how much of the filepath is shared by start and path. - i = len(os.path.commonprefix([start_list, path_list])) - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - -register = template.Library() - -quotechar = "::/" - -@stringfilter -def quotepath(path): - try: - return path.replace("../", quotechar) - except AttributeError: - return path - except: - return "" - - -register.filter(quotepath) - -@stringfilter -def unquotepath(path): - try: - return path.replace(quotechar, "../") - except AttributeError: - return path - except: - return "" - -register.filter(unquotepath) - -def path_make_absolute(path): - p = os.path.abspath(path) - if p[-1] == os.path.sep: - return p - else: - return p + os.path.sep - - -register.filter(path_make_absolute) - -def path_make_relative(path): - p = relpath(path) - if p[-1] == os.path.sep: - return p - else: - return p + os.path.sep - -register.filter(path_make_relative) - diff --git a/module/web/pyload/templatetags/token.py b/module/web/pyload/templatetags/token.py deleted file mode 100644 index e6117b839..000000000 --- a/module/web/pyload/templatetags/token.py +++ /dev/null @@ -1,17 +0,0 @@ - -from django import VERSION -from django import template -register = template.Library() - -if VERSION[:3] < (1,1,2): - - class TokenNode(template.Node): - def render(self, content): - return "" - - @register.tag() - def csrf_token(parser, token): - """ - Return nothing, since csrf is deactivated in django 1.1 - """ - return TokenNode() diff --git a/module/web/pyload/templatetags/truncate.py b/module/web/pyload/templatetags/truncate.py deleted file mode 100644 index ed35ea998..000000000 --- a/module/web/pyload/templatetags/truncate.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.template.defaultfilters import stringfilter -from django import template - -register = template.Library() - - -@stringfilter -def truncate(value, n): - if (n - len(value)) < 3: - return value[:n]+"..." - return value - -register.filter(truncate) diff --git a/module/web/pyload/tests.py b/module/web/pyload/tests.py deleted file mode 100644 index 2247054b3..000000000 --- a/module/web/pyload/tests.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -This file demonstrates two different styles of tests (one doctest and one -unittest). These will both pass when you run "manage.py test". - -Replace these with more appropriate tests for your application. -""" - -from django.test import TestCase - -class SimpleTest(TestCase): - def test_basic_addition(self): - """ - Tests that 1 + 1 always equals 2. - """ - self.failUnlessEqual(1 + 1, 2) - -__test__ = {"doctest": """ -Another way to test that 1 + 1 is equal to 2. - ->>> 1 + 1 == 2 -True -"""} - diff --git a/module/web/pyload/urls.py b/module/web/pyload/urls.py deleted file mode 100644 index fe85c1096..000000000 --- a/module/web/pyload/urls.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- - -from os.path import join - -from django.conf import settings -from django.conf.urls.defaults import * - - -urlpatterns = patterns('pyload', - (r'^home/$', 'views.home'), - (r'^downloads/$', 'views.downloads',{},'downloads'), - (r'^download/(?P[a-zA-z\.0-9\-/_% "\\]+)$', 'views.download',{},'download'), - (r'^queue/$', 'views.queue',{}, 'queue'), - (r'^collector/$', 'views.collector',{}, 'collector'), - (r'^settings/$', 'views.config',{}, 'config'), - (r'^logs/$', 'views.logs',{}, 'logs'), - (r'^logs/(?P\d+)$', 'views.logs',{}, 'logs'), - (r'^package_ui.js$', 'views.package_ui', {}, 'package_ui'), - (r'^$', 'views.home',{}, 'home'), - url(r'^pathchooser/(?P.*)', 'views.path', {'type':'folder'}, name='path'), - url(r'^pathchooser/$', 'views.root', {'type':'folder'}, name='pathroot'), - url(r'^filechooser/(?P.*)', 'views.path', {'type':'file'}, name='file'), - url(r'^filechooser/$', 'views.root', {'type':'file'}, name='fileroot'), - ) - -urlpatterns += patterns('django.contrib.auth', - (r'^login/$', 'views.login', {'template_name': join(settings.TEMPLATE, 'login.html')}), - (r'^logout/$', 'views.logout', {'template_name': join(settings.TEMPLATE, 'logout.html')}, 'logout'), -) - - diff --git a/module/web/pyload/views.py b/module/web/pyload/views.py deleted file mode 100644 index 8e72402a4..000000000 --- a/module/web/pyload/views.py +++ /dev/null @@ -1,492 +0,0 @@ -# -*- coding: utf-8 -*- - -# Create your views here. -import mimetypes -import os -from os import listdir -from os import stat -from os.path import isdir -from os.path import isfile -from os.path import join -from sys import getfilesystemencoding -from urllib import unquote -from itertools import chain -from datetime import datetime -from time import localtime, strftime -from copy import deepcopy -from operator import itemgetter -from pyload.templatetags import quotepath - -try: - from os.path import relpath -except: - from posixpath import curdir, sep, pardir - def relpath(path, start=curdir): - """Return a relative version of a path""" - if not path: - raise ValueError("no path specified") - start_list = os.path.abspath(start).split(sep) - path_list = os.path.abspath(path).split(sep) - # Work out how much of the filepath is shared by start and path. - i = len(os.path.commonprefix([start_list, path_list])) - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - -from django.conf import settings -from django.contrib.auth.decorators import login_required -from django.http import HttpResponse -from django.http import HttpResponseNotFound -from django.http import HttpResponseRedirect -from django.shortcuts import render_to_response -from django.template import RequestContext -from django.utils.translation import ugettext as _ -from django.core.urlresolvers import reverse - - -def get_sort_key(item): - return item[1]["order"] - -def formatSize(size): - """formats size of bytes""" - size = int(size) - steps = 0 - sizes = ["KB", "MB", "GB", "TB"] - - while size > 1000: - size /= 1024.0 - steps += 1 - - return "%.2f %s" % (size, sizes[steps]) - -def check_server(function): - def _dec(view_func): - def _view(request, * args, ** kwargs): - try: - version = settings.PYLOAD.get_server_version() - except Exception, e: - return base(request, messages=[_('Can\'t connect to pyLoad. Please check your configuration and make sure pyLoad is running.'), str(e)]) - return view_func(request, * args, ** kwargs) - - _view.__name__ = view_func.__name__ - _view.__dict__ = view_func.__dict__ - _view.__doc__ = view_func.__doc__ - - return _view - - if function is None: - return _dec - else: - return _dec(function) - - -def permission(perm): - def _dec(view_func): - def _view(request, * args, ** kwargs): - if request.user.has_perm(perm) and request.user.is_authenticated(): - return view_func(request, * args, ** kwargs) - else: - return base(request, messages=[_('You don\'t have permission to view this page.')]) - - _view.__name__ = view_func.__name__ - _view.__dict__ = view_func.__dict__ - _view.__doc__ = view_func.__doc__ - - return _view - - return _dec - - - -def status_proc(request): - return {'status': settings.PYLOAD.status_server(), 'captcha': settings.PYLOAD.is_captcha_waiting()} - - -def base(request, messages): - return render_to_response(join(settings.TEMPLATE, 'base.html'), {'messages': messages}, RequestContext(request)) - -@login_required -@permission('pyload.can_see_dl') -@check_server -def home(request): - res = settings.PYLOAD.status_downloads() - - for link in res: - if link["status"] == 12: - link["information"] = "%s kB @ %s kB/s" % (link["size"] - link["kbleft"], link["speed"]) - - return render_to_response(join(settings.TEMPLATE, 'home.html'), RequestContext(request, {'content': res}, [status_proc])) - - -@login_required -@permission('pyload.can_see_dl') -@check_server -def queue(request): - queue = settings.PYLOAD.get_queue_info() - - data = zip(queue.keys(), queue.values()) - data.sort(key=get_sort_key) - - return render_to_response(join(settings.TEMPLATE, 'queue.html'), RequestContext(request, {'content': data}, [status_proc])) - - -@login_required -@permission('pyload.can_download') -@check_server -def downloads(request): - - root = settings.PYLOAD.get_conf_val("general", "download_folder") - - if not isdir(root): - return base(request, [_('Download directory not found.')]) - data = { - 'folder': [], - 'files': [] - } - - for item in sorted(listdir(root)): - if isdir(join(root, item)): - folder = { - 'name': item, - 'path': item, - 'files': [] - } - for file in sorted(listdir(join(root, item))): - try: - if isfile(join(root, item, file)): - folder['files'].append(file) - except: - pass - - data['folder'].append(folder) - elif isfile(join(root, item)): - data['files'].append(item) - - - return render_to_response(join(settings.TEMPLATE, 'downloads.html'), RequestContext(request, {'files': data}, [status_proc])) - -@login_required -@permission('pyload.can_download') -@check_server -def download(request, path): - path = unquote(path) - path = path.split("/") - - root = settings.PYLOAD.get_conf_val("general", "download_folder") - - dir = join(root, path[1].replace('..', '')) - if isdir(dir) or isfile(dir): - if isdir(dir): filepath = join(dir, path[2]) - elif isfile(dir): filepath = dir - - if isfile(filepath): - try: - type, encoding = mimetypes.guess_type(filepath) - if type is None: - type = 'application/octet-stream' - - response = HttpResponse(mimetype=type) - response['Content-Length'] = str(stat(filepath).st_size) - - if encoding is not None: - response['Content-Encoding'] = encoding - - response.write(file(filepath, "rb").read()) - return response - - except Exception, e: - return HttpResponseNotFound("File not Found. %s" % str(e)) - - return HttpResponseNotFound("File not Found.") - -@login_required -@permission('pyload.can_see_logs') -@check_server -def logs(request, item=-1): - - perpage = request.session.get('perpage', 34) - reversed = request.session.get('reversed', False) - - warning = "" - conf = settings.PYLOAD.get_config() - if not conf['log']['file_log']['value']: - warning = "Warning: File log is disabled, see settings page." - - perpage_p = ((20,20), (34, 34), (40, 40), (100, 100), (0,'all')) - fro = None - - if request.method == 'POST': - try: - fro = datetime.strptime(request.POST['from'], '%d.%m.%Y %H:%M:%S') - except: - pass - try: - perpage = int(request.POST['perpage']) - request.session['perpage'] = perpage - - reversed = bool(request.POST.get('reversed', False)) - request.session['reversed'] = reversed - except: - pass - - try: - item = int(item) - except: - pass - - log = settings.PYLOAD.get_log() - if perpage == 0: - item = 0 - - if item < 1 or type(item) is not int: - item = 1 if len(log) - perpage + 1 < 1 else len(log) - perpage + 1 - - if type(fro) is datetime: # we will search for datetime - item = -1 - - data = [] - counter = 0 - perpagecheck = 0 - for l in log: - counter = counter+1 - - if counter >= item: - try: - date,time,level,message = l.split(" ", 3) - dtime = datetime.strptime(date+' '+time, '%d.%m.%Y %H:%M:%S') - except: - dtime = None - date = '?' - time = ' ' - level = '?' - message = l - if item == -1 and dtime is not None and fro <= dtime: - item = counter #found our datetime - if item >= 0: - data.append({'line': counter, 'date': date+" "+time, 'level':level, 'message': message}) - perpagecheck += 1 - if fro is None and dtime is not None: #if fro not set set it to first showed line - fro = dtime - if perpagecheck >= perpage > 0: - break - - if fro is None: #still not set, empty log? - fro = datetime.now() - if reversed: - data.reverse() - return render_to_response(join(settings.TEMPLATE, 'logs.html'), RequestContext(request, {'warning': warning, 'log': data, 'from': fro.strftime('%d.%m.%Y %H:%M:%S'), 'reversed': reversed, 'perpage':perpage, 'perpage_p':sorted(perpage_p), 'iprev': 1 if item - perpage < 1 else item - perpage, 'inext': (item + perpage) if item+perpage < len(log) else item}, [status_proc])) - -@login_required -@permission('pyload.can_add_dl') -@check_server -def collector(request): - queue = settings.PYLOAD.get_collector_info() - - data = zip(queue.keys(), queue.values()) - data.sort(key=get_sort_key) - - return render_to_response(join(settings.TEMPLATE, 'collector.html'), RequestContext(request, {'content': data}, [status_proc])) - - -@login_required -@permission('pyload.can_change_status') -@check_server -def config(request): - conf = settings.PYLOAD.get_config() - plugin = settings.PYLOAD.get_plugin_config() - accs = settings.PYLOAD.get_accounts(False, False) - messages = [] - - for section in chain(conf.itervalues(), plugin.itervalues()): - for key, option in section.iteritems(): - if key == "desc": continue - - if ";" in option["type"]: - option["list"] = option["type"].split(";") - - if request.META.get('REQUEST_METHOD', "GET") == "POST": - - errors = [] - - for key, value in request.POST.iteritems(): - if not "|" in key: continue - sec, skey, okey = key.split("|")[:] - - if sec == "General": - - if conf.has_key(skey): - if conf[skey].has_key(okey): - try: - if str(conf[skey][okey]['value']) != value: - settings.PYLOAD.set_conf_val(skey, okey, value) - except Exception, e: - errors.append("%s | %s : %s" % (skey, okey, e)) - else: - continue - else: - continue - - elif sec == "Plugin": - if plugin.has_key(skey): - if plugin[skey].has_key(okey): - try: - if str(plugin[skey][okey]['value']) != value: - settings.PYLOAD.set_conf_val(skey, okey, value, "plugin") - except Exception, e: - errors.append("%s | %s : %s" % (skey, okey, e)) - else: - continue - else: - continue - elif sec == "Accounts": - if ";" in okey: - action, name = okey.split(";") - if action == "delete": - settings.PYLOAD.remove_account(skey, name) - - if okey == "newacc" and value: - # add account - - pw = request.POST.get("Accounts|%s|newpw" % skey) - - settings.PYLOAD.update_account(skey, value, pw) - - for pluginname, accdata in accs.iteritems(): - for data in accdata: - newpw = request.POST.get("Accounts|%s|password;%s" % (pluginname, data["login"]), "").strip() - time = request.POST.get("Accounts|%s|time;%s" % (pluginname, data["login"]), "").strip() - - if newpw or (time and (not data["options"].has_key("time") or [time] != data["options"]["time"])): - settings.PYLOAD.update_account(pluginname, data["login"], newpw, {"time": [time]}) - - - if errors: - messages.append(_("Error occured when setting the following options:")) - messages.append("") - messages += errors - else: - messages.append(_("All options were set correctly.")) - - accs = deepcopy(settings.PYLOAD.get_accounts(False, False)) - for accounts in accs.itervalues(): - for data in accounts: - if data["trafficleft"] == -1: - data["trafficleft"] = _("unlimited") - elif not data["trafficleft"]: - data["trafficleft"] = _("not available") - else: - data["trafficleft"] = formatSize(data["trafficleft"]) - - if data["validuntil"] == -1: - data["validuntil"] = _("unlimited") - elif not data["validuntil"]: - data["validuntil"] = _("not available") - else: - t = localtime(data["validuntil"]) - data["validuntil"] = strftime("%d.%m.%Y",t) - - if data["options"].has_key("time"): - try: - data["time"] = data["options"]["time"][0] - except: - data["time"] = "invalid" - - return render_to_response(join(settings.TEMPLATE, 'settings.html'), RequestContext(request, {'conf': {'Plugin':plugin, 'General':conf, 'Accounts': accs}, 'errors': messages}, [status_proc])) - -@login_required -@permission('pyload.can_change_status') -@check_server -def package_ui(request): - return render_to_response(join(settings.TEMPLATE, 'package_ui.js'), RequestContext(request, {}, )) - - -@login_required -@permission('pyload.can_change_status') -@check_server -def root(request, type): - cwd = os.getcwd() - return HttpResponseRedirect(reverse('path', args=[cwd[1:], type])) - -@login_required -@permission('pyload.can_change_status') -@check_server -def path(request, path, type): - - path = os.path.normpath(quotepath.unquotepath(path)) - - if os.path.isfile(path): - oldfile = path - path = os.path.dirname(path) - else: - oldfile = '' - - abs = False - - if os.path.isdir(path): - if os.path.isabs(path): - cwd = os.path.abspath(path) - abs = True - else: - cwd = relpath(path) - else: - cwd = os.getcwd() - - try: - cwd = cwd.encode("utf8") - except: - pass - - cwd = os.path.normpath(os.path.abspath(cwd)) - parentdir = os.path.dirname(cwd) - if not abs: - if os.path.abspath(cwd) == "/": - cwd = relpath(cwd) - else: - cwd = relpath(cwd) + os.path.sep - parentdir = relpath(parentdir) + os.path.sep - - if os.path.abspath(cwd) == "/": - parentdir = "" - - try: - folders = os.listdir(cwd) - except: - folders = [] - - files = [] - - for f in folders: - try: - f = f.decode(getfilesystemencoding()) - data = {} - data['name'] = f - data['fullpath'] = os.path.join(cwd, f) - data['sort'] = data['fullpath'].lower() - data['modified'] = datetime.fromtimestamp(int(os.path.getmtime(os.path.join(cwd, f)))) - data['ext'] = os.path.splitext(f)[1] - except: - continue - - if os.path.isdir(os.path.join(cwd, f)): - data['type'] = 'dir' - else: - data['type'] = 'file' - - if os.path.isfile(os.path.join(cwd, f)): - data['size'] = os.path.getsize(os.path.join(cwd, f)) - - power = 0 - while (data['size']/1024) > 0.3: - power += 1 - data['size'] = data['size'] / 1024. - units = ('', 'K','M','G','T') - data['unit'] = units[power]+'Byte' - else: - data['size'] = '' - - files.append(data) - - files = sorted(files, key=itemgetter('type', 'sort')) - - return render_to_response(join(settings.TEMPLATE, 'pathchooser.html'), {'cwd': cwd, 'files': files, 'parentdir': parentdir, 'type': type, 'oldfile': oldfile, 'absolute': abs}, RequestContext(request)) \ No newline at end of file diff --git a/module/web/pyload_app.py b/module/web/pyload_app.py index 0f8dd859c..d240564ab 100644 --- a/module/web/pyload_app.py +++ b/module/web/pyload_app.py @@ -31,7 +31,7 @@ from sys import getfilesystemencoding from hashlib import sha1 from urllib import unquote -from bottle import route, static_file, request, response, redirect, HTTPError +from bottle import route, static_file, request, response, redirect, HTTPError, error from webinterface import PYLOAD, PROJECT_DIR @@ -65,10 +65,16 @@ def base(messages): ## Views +@error(500) +def error500(error): + return base(["An Error occured, please enable debug mode to get more details.", error, + error.traceback.replace("\n", "
")]) + @route('/media/:path#.+#') def server_static(path): - response.headers['Expires'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(time.time() + 60 * 60 * 24 * 7)) + response.headers['Expires'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", + time.gmtime(time.time() + 60 * 60 * 24 * 7)) response.headers['Cache-control'] = "public" return static_file(path, root=join(PROJECT_DIR, "media")) @@ -258,7 +264,8 @@ def config(): newpw = request.POST.get("Accounts|%s|password;%s" % (pluginname, data["login"]), "").strip() new_time = request.POST.get("Accounts|%s|time;%s" % (pluginname, data["login"]), "").strip() - if newpw or (new_time and (not data["options"].has_key("time") or [new_time] != data["options"]["time"])): + if newpw or ( + new_time and (not data["options"].has_key("time") or [new_time] != data["options"]["time"])): PYLOAD.update_account(pluginname, data["login"], newpw, {"time": [new_time]}) if errors: @@ -292,7 +299,6 @@ def config(): except: data["time"] = "invalid" - return render_to_response('settings.html', {'conf': {'Plugin': plugin, 'General': conf, 'Accounts': accs}, 'errors': messages}, [pre_processor]) @@ -300,7 +306,8 @@ def config(): @route("/package_ui.js") @login_required('can_see_dl') def package_ui(): - response.headers['Expires'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(time.time() + 60 * 60 * 24 * 7)) + response.headers['Expires'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", + time.gmtime(time.time() + 60 * 60 * 24 * 7)) response.headers['Cache-control'] = "public" return render_to_response('package_ui.js') @@ -380,11 +387,11 @@ def path(file="", path=""): data['size'] = os.path.getsize(join(cwd, f)) power = 0 - while (data['size']/1024) > 0.3: + while (data['size'] / 1024) > 0.3: power += 1 data['size'] /= 1024. - units = ('', 'K','M','G','T') - data['unit'] = units[power]+'Byte' + units = ('', 'K', 'M', 'G', 'T') + data['unit'] = units[power] + 'Byte' else: data['size'] = '' @@ -392,7 +399,9 @@ def path(file="", path=""): files = sorted(files, key=itemgetter('type', 'sort')) - return render_to_response('pathchooser.html', {'cwd': cwd, 'files': files, 'parentdir': parentdir, 'type': type, 'oldfile': oldfile, 'absolute': abs}, []) + return render_to_response('pathchooser.html', + {'cwd': cwd, 'files': files, 'parentdir': parentdir, 'type': type, 'oldfile': oldfile, + 'absolute': abs}, []) @route("/logs") @route("/logs", method="POST") @@ -410,7 +419,7 @@ def logs(item=-1): if not conf['log']['file_log']['value']: warning = "Warning: File log is disabled, see settings page." - perpage_p = ((20,20), (34, 34), (40, 40), (100, 100), (0,'all')) + perpage_p = ((20, 20), (34, 34), (40, 40), (100, 100), (0, 'all')) fro = None if request.environ.get('REQUEST_METHOD', "GET") == "POST": @@ -439,7 +448,7 @@ def logs(item=-1): item = 0 if item < 1 or type(item) is not int: - item = 1 if len(log) - perpage + 1 < 1 else len(log) - perpage + 1 + item = 1 if len(log) - perpage + 1 < 1 else len(log) - perpage + 1 if type(fro) is datetime: # we will search for datetime item = -1 @@ -452,8 +461,8 @@ def logs(item=-1): if counter >= item: try: - date,time,level,message = l.split(" ", 3) - dtime = datetime.strptime(date+' '+time, '%d.%m.%Y %H:%M:%S') + date, time, level, message = l.split(" ", 3) + dtime = datetime.strptime(date + ' ' + time, '%d.%m.%Y %H:%M:%S') except: dtime = None date = '?' @@ -463,7 +472,7 @@ def logs(item=-1): if item == -1 and dtime is not None and fro <= dtime: item = counter #found our datetime if item >= 0: - data.append({'line': counter, 'date': date+" "+time, 'level':level, 'message': message}) + data.append({'line': counter, 'date': date + " " + time, 'level': level, 'message': message}) perpagecheck += 1 if fro is None and dtime is not None: #if fro not set set it to first showed line fro = dtime @@ -474,4 +483,12 @@ def logs(item=-1): fro = datetime.now() if reversed: data.reverse() - return render_to_response('logs.html', {'warning': warning, 'log': data, 'from': fro.strftime('%d.%m.%Y %H:%M:%S'), 'reversed': reversed, 'perpage':perpage, 'perpage_p':sorted(perpage_p), 'iprev': 1 if item - perpage < 1 else item - perpage, 'inext': (item + perpage) if item+perpage < len(log) else item}, [pre_processor]) + return render_to_response('logs.html', {'warning': warning, 'log': data, 'from': fro.strftime('%d.%m.%Y %H:%M:%S'), + 'reversed': reversed, 'perpage': perpage, 'perpage_p': sorted(perpage_p), + 'iprev': 1 if item - perpage < 1 else item - perpage, + 'inext': (item + perpage) if item + perpage < len(log) else item}, + [pre_processor]) + +@route("/admin") +def admin(): + return base([]) \ No newline at end of file -- cgit v1.2.3