summaryrefslogtreecommitdiffstats
path: root/pyload/common
diff options
context:
space:
mode:
authorGravatar Walter Purcaro <vuolter@gmail.com> 2014-09-08 00:29:57 +0200
committerGravatar Walter Purcaro <vuolter@gmail.com> 2014-09-14 11:02:23 +0200
commit68d662e689cd42687341c550fb6ebb74e6968d21 (patch)
tree486cef41bd928b8db704894233b2cef94a6e346f /pyload/common
parentsave_join -> safe_join & save_path -> safe_filename (diff)
downloadpyload-68d662e689cd42687341c550fb6ebb74e6968d21.tar.xz
module -> pyload
Diffstat (limited to 'pyload/common')
-rw-r--r--pyload/common/APIExerciser.py157
-rw-r--r--pyload/common/ImportDebugger.py19
-rw-r--r--pyload/common/JsEngine.py155
-rw-r--r--pyload/common/__init__.py0
-rw-r--r--pyload/common/json_layer.py12
-rw-r--r--pyload/common/packagetools.py136
-rw-r--r--pyload/common/pavement.py412
-rw-r--r--pyload/common/pylgettext.py60
-rw-r--r--pyload/common/test_api.py20
-rw-r--r--pyload/common/test_json.py48
10 files changed, 1019 insertions, 0 deletions
diff --git a/pyload/common/APIExerciser.py b/pyload/common/APIExerciser.py
new file mode 100644
index 000000000..886c72a4a
--- /dev/null
+++ b/pyload/common/APIExerciser.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+
+import string
+from threading import Thread
+from random import choice, random, sample, randint
+from time import time, sleep
+from math import floor
+import gc
+
+from traceback import print_exc, format_exc
+
+from pyload.remote.thriftbackend.ThriftClient import ThriftClient, Destination
+
+def createURLs():
+ """ create some urls, some may fail """
+ urls = []
+ for x in range(0, randint(20, 100)):
+ name = "DEBUG_API"
+ if randint(0, 5) == 5:
+ name = "" #this link will fail
+
+ urls.append(name + "".join(sample(string.ascii_letters, randint(10, 20))))
+
+ return urls
+
+AVOID = (0, 3, 8)
+
+idPool = 0
+sumCalled = 0
+
+
+def startApiExerciser(core, n):
+ for i in range(n):
+ APIExerciser(core).start()
+
+class APIExerciser(Thread):
+
+
+ def __init__(self, core, thrift=False, user=None, pw=None):
+ global idPool
+
+ Thread.__init__(self)
+ self.setDaemon(True)
+ self.core = core
+ self.count = 0 #number of methods
+ self.time = time()
+
+ if thrift:
+ self.api = ThriftClient(user=user, password=pw)
+ else:
+ self.api = core.api
+
+
+ self.id = idPool
+
+ idPool += 1
+
+ #self.start()
+
+ def run(self):
+
+ self.core.log.info("API Excerciser started %d" % self.id)
+
+ out = open("error.log", "ab")
+ #core errors are not logged of course
+ out.write("\n" + "Starting\n")
+ out.flush()
+
+ while True:
+ try:
+ self.testAPI()
+ except Exception:
+ self.core.log.error("Excerciser %d throw an execption" % self.id)
+ print_exc()
+ out.write(format_exc() + 2 * "\n")
+ out.flush()
+
+ if not self.count % 100:
+ self.core.log.info("Exerciser %d tested %d api calls" % (self.id, self.count))
+ if not self.count % 1000:
+ out.flush()
+
+ if not sumCalled % 1000: #not thread safe
+ self.core.log.info("Exercisers tested %d api calls" % sumCalled)
+ persec = sumCalled / (time() - self.time)
+ self.core.log.info("Approx. %.2f calls per second." % persec)
+ self.core.log.info("Approx. %.2f ms per call." % (1000 / persec))
+ self.core.log.info("Collected garbage: %d" % gc.collect())
+
+
+ #sleep(random() / 500)
+
+ def testAPI(self):
+ global sumCalled
+
+ m = ["statusDownloads", "statusServer", "addPackage", "getPackageData", "getFileData", "deleteFiles",
+ "deletePackages", "getQueue", "getCollector", "getQueueData", "getCollectorData", "isCaptchaWaiting",
+ "getCaptchaTask", "stopAllDownloads", "getAllInfo", "getServices" , "getAccounts", "getAllUserData"]
+
+ method = choice(m)
+ #print "Testing:", method
+
+ if hasattr(self, method):
+ res = getattr(self, method)()
+ else:
+ res = getattr(self.api, method)()
+
+ self.count += 1
+ sumCalled += 1
+
+ #print res
+
+ def addPackage(self):
+ name = "".join(sample(string.ascii_letters, 10))
+ urls = createURLs()
+
+ self.api.addPackage(name, urls, choice([Destination.Queue, Destination.Collector]))
+
+
+ def deleteFiles(self):
+ info = self.api.getQueueData()
+ if not info: return
+
+ pack = choice(info)
+ fids = pack.links
+
+ if len(fids):
+ fids = [f.fid for f in sample(fids, randint(1, max(len(fids) / 2, 1)))]
+ self.api.deleteFiles(fids)
+
+
+ def deletePackages(self):
+ info = choice([self.api.getQueue(), self.api.getCollector()])
+ if not info: return
+
+ pids = [p.pid for p in info]
+ if len(pids):
+ pids = sample(pids, randint(1, max(floor(len(pids) / 2.5), 1)))
+ self.api.deletePackages(pids)
+
+ def getFileData(self):
+ info = self.api.getQueueData()
+ if info:
+ p = choice(info)
+ if p.links:
+ self.api.getFileData(choice(p.links).fid)
+
+ def getPackageData(self):
+ info = self.api.getQueue()
+ if info:
+ self.api.getPackageData(choice(info).pid)
+
+ def getAccounts(self):
+ self.api.getAccounts(False)
+
+ def getCaptchaTask(self):
+ self.api.getCaptchaTask(False)
diff --git a/pyload/common/ImportDebugger.py b/pyload/common/ImportDebugger.py
new file mode 100644
index 000000000..ae3aef629
--- /dev/null
+++ b/pyload/common/ImportDebugger.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+import sys
+
+class ImportDebugger(object):
+
+ def __init__(self):
+ self.imported = {}
+
+ def find_module(self, name, path=None):
+
+ if name not in self.imported:
+ self.imported[name] = 0
+
+ self.imported[name] += 1
+
+ print name, path
+
+sys.meta_path.append(ImportDebugger())
diff --git a/pyload/common/JsEngine.py b/pyload/common/JsEngine.py
new file mode 100644
index 000000000..46789f64d
--- /dev/null
+++ b/pyload/common/JsEngine.py
@@ -0,0 +1,155 @@
+# -*- coding: utf-8 -*-
+"""
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License,
+ or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, see <http://www.gnu.org/licenses/>.
+
+ @author: RaNaN
+"""
+
+from imp import find_module
+from os.path import join, exists
+from urllib import quote
+
+
+ENGINE = ""
+
+DEBUG = False
+JS = False
+PYV8 = False
+RHINO = False
+
+
+if not ENGINE:
+ try:
+ import subprocess
+
+ subprocess.Popen(["js", "-v"], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
+ p = subprocess.Popen(["js", "-e", "print(23+19)"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ #integrity check
+ if out.strip() == "42":
+ ENGINE = "js"
+ JS = True
+ except:
+ pass
+
+if not ENGINE or DEBUG:
+ try:
+ find_module("PyV8")
+ ENGINE = "pyv8"
+ PYV8 = True
+ except:
+ pass
+
+if not ENGINE or DEBUG:
+ try:
+ path = "" #path where to find rhino
+
+ if exists("/usr/share/java/js.jar"):
+ path = "/usr/share/java/js.jar"
+ elif exists("js.jar"):
+ path = "js.jar"
+ elif exists(join(pypath, "js.jar")): #may raises an exception, but js.jar wasnt found anyway
+ path = join(pypath, "js.jar")
+
+ if not path:
+ raise Exception
+
+ import subprocess
+
+ p = subprocess.Popen(["java", "-cp", path, "org.mozilla.javascript.tools.shell.Main", "-e", "print(23+19)"],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ #integrity check
+ if out.strip() == "42":
+ ENGINE = "rhino"
+ RHINO = True
+ except:
+ pass
+
+class JsEngine:
+ def __init__(self):
+ self.engine = ENGINE
+ self.init = False
+
+ def __nonzero__(self):
+ return False if not ENGINE else True
+
+ def eval(self, script):
+ if not self.init:
+ if ENGINE == "pyv8" or (DEBUG and PYV8):
+ import PyV8
+ global PyV8
+
+ self.init = True
+
+ if type(script) == unicode:
+ script = script.encode("utf8")
+
+ if not ENGINE:
+ raise Exception("No JS Engine")
+
+ if not DEBUG:
+ if ENGINE == "pyv8":
+ return self.eval_pyv8(script)
+ elif ENGINE == "js":
+ return self.eval_js(script)
+ elif ENGINE == "rhino":
+ return self.eval_rhino(script)
+ else:
+ results = []
+ if PYV8:
+ res = self.eval_pyv8(script)
+ print "PyV8:", res
+ results.append(res)
+ if JS:
+ res = self.eval_js(script)
+ print "JS:", res
+ results.append(res)
+ if RHINO:
+ res = self.eval_rhino(script)
+ print "Rhino:", res
+ results.append(res)
+
+ warning = False
+ for x in results:
+ for y in results:
+ if x != y:
+ warning = True
+
+ if warning: print "### WARNING ###: Different results"
+
+ return results[0]
+
+ def eval_pyv8(self, script):
+ rt = PyV8.JSContext()
+ rt.enter()
+ return rt.eval(script)
+
+ def eval_js(self, script):
+ script = "print(eval(unescape('%s')))" % quote(script)
+ p = subprocess.Popen(["js", "-e", script], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1)
+ out, err = p.communicate()
+ res = out.strip()
+ return res
+
+ def eval_rhino(self, script):
+ script = "print(eval(unescape('%s')))" % quote(script)
+ p = subprocess.Popen(["java", "-cp", path, "org.mozilla.javascript.tools.shell.Main", "-e", script],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1)
+ out, err = p.communicate()
+ res = out.strip()
+ return res.decode("utf8").encode("ISO-8859-1")
+
+ def error(self):
+ return _("No js engine detected, please install either Spidermonkey, ossp-js, pyv8 or rhino")
diff --git a/pyload/common/__init__.py b/pyload/common/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/pyload/common/__init__.py
diff --git a/pyload/common/json_layer.py b/pyload/common/json_layer.py
new file mode 100644
index 000000000..bb3937cdc
--- /dev/null
+++ b/pyload/common/json_layer.py
@@ -0,0 +1,12 @@
+# -*- coding: utf-8 -*-
+
+# abstraction layer for json operations
+
+try: # since python 2.6
+ import json
+ from json import loads as json_loads
+ from json import dumps as json_dumps
+except ImportError: #use system simplejson if available
+ import simplejson as json
+ from simplejson import loads as json_loads
+ from simplejson import dumps as json_dumps
diff --git a/pyload/common/packagetools.py b/pyload/common/packagetools.py
new file mode 100644
index 000000000..d5ab4d182
--- /dev/null
+++ b/pyload/common/packagetools.py
@@ -0,0 +1,136 @@
+# JDownloader/src/jd/controlling/LinkGrabberPackager.java
+
+import re
+from urlparse import urlparse
+
+def matchFirst(string, *args):
+ """ matches against list of regexp and returns first match"""
+ for patternlist in args:
+ for pattern in patternlist:
+ r = pattern.search(string)
+ if r is not None:
+ name = r.group(1)
+ return name
+
+ return string
+
+
+def parseNames(files):
+ """ Generates packages names from name, data lists
+
+ :param files: list of (name, data)
+ :return: packagenames mapt to data lists (eg. urls)
+ """
+ packs = {}
+
+ endings = "\\.(3gp|7zip|7z|abr|ac3|aiff|aifc|aif|ai|au|avi|bin|bz2|cbr|cbz|ccf|cue|cvd|chm|dta|deb|divx|djvu|dlc|dmg|doc|docx|dot|eps|exe|ff|flv|f4v|gsd|gif|gz|iwd|iso|ipsw|java|jar|jpg|jpeg|jdeatme|load|mws|mw|m4v|m4a|mkv|mp2|mp3|mp4|mov|movie|mpeg|mpe|mpg|msi|msu|msp|nfo|npk|oga|ogg|ogv|otrkey|pkg|png|pdf|pptx|ppt|pps|ppz|pot|psd|qt|rmvb|rm|rar|ram|ra|rev|rnd|r\\d+|rpm|run|rsdf|rtf|sh(!?tml)|srt|snd|sfv|swf|tar|tif|tiff|ts|txt|viv|vivo|vob|wav|wmv|xla|xls|xpi|zeno|zip|z\\d+|_[_a-z]{2}|\\d+$)"
+
+ rarPats = [re.compile("(.*)(\\.|_|-)pa?r?t?\\.?[0-9]+.(rar|exe)$", re.I),
+ re.compile("(.*)(\\.|_|-)part\\.?[0]*[1].(rar|exe)$", re.I),
+ re.compile("(.*)\\.rar$", re.I),
+ re.compile("(.*)\\.r\\d+$", re.I),
+ re.compile("(.*)(\\.|_|-)\\d+$", re.I)]
+
+ zipPats = [re.compile("(.*)\\.zip$", re.I),
+ re.compile("(.*)\\.z\\d+$", re.I),
+ re.compile("(?is).*\\.7z\\.[\\d]+$", re.I),
+ re.compile("(.*)\\.a.$", re.I)]
+
+ ffsjPats = [re.compile("(.*)\\._((_[a-z])|([a-z]{2}))(\\.|$)"),
+ re.compile("(.*)(\\.|_|-)[\\d]+(" + endings + "$)", re.I)]
+
+ iszPats = [re.compile("(.*)\\.isz$", re.I),
+ re.compile("(.*)\\.i\\d{2}$", re.I)]
+
+ pat1 = re.compile("(\\.?CD\\d+)", re.I)
+ pat2 = re.compile("(\\.?part\\d+)", re.I)
+
+ pat3 = re.compile("(.+)[\\.\\-_]+$")
+ pat4 = re.compile("(.+)\\.\\d+\\.xtm$")
+
+ for file, url in files:
+ patternMatch = False
+
+ if file is None:
+ continue
+
+ # remove trailing /
+ name = file.rstrip('/')
+
+ # extract last path part .. if there is a path
+ split = name.rsplit("/", 1)
+ if len(split) > 1:
+ name = split.pop(1)
+
+ #check if a already existing package may be ok for this file
+ # found = False
+ # for pack in packs:
+ # if pack in file:
+ # packs[pack].append(url)
+ # found = True
+ # break
+ #
+ # if found: continue
+
+ # unrar pattern, 7zip/zip and hjmerge pattern, isz pattern, FFSJ pattern
+ before = name
+ name = matchFirst(name, rarPats, zipPats, iszPats, ffsjPats)
+ if before != name:
+ patternMatch = True
+
+ # xtremsplit pattern
+ r = pat4.search(name)
+ if r is not None:
+ name = r.group(1)
+
+ # remove part and cd pattern
+ r = pat1.search(name)
+ if r is not None:
+ name = name.replace(r.group(0), "")
+ patternMatch = True
+
+ r = pat2.search(name)
+ if r is not None:
+ name = name.replace(r.group(0), "")
+ patternMatch = True
+
+ # additional checks if extension pattern matched
+ if patternMatch:
+ # remove extension
+ index = name.rfind(".")
+ if index <= 0:
+ index = name.rfind("_")
+ if index > 0:
+ length = len(name) - index
+ if length <= 4:
+ name = name[:-length]
+
+ # remove endings like . _ -
+ r = pat3.search(name)
+ if r is not None:
+ name = r.group(1)
+
+ # replace . and _ with space
+ name = name.replace(".", " ")
+ name = name.replace("_", " ")
+
+ name = name.strip()
+ else:
+ name = ""
+
+ # fallback: package by hoster
+ if not name:
+ name = urlparse(file).hostname
+ if name: name = name.replace("www.", "")
+
+ # fallback : default name
+ if not name:
+ name = "unknown"
+
+ # build mapping
+ if name in packs:
+ packs[name].append(url)
+ else:
+ packs[name] = [url]
+
+ return packs
diff --git a/pyload/common/pavement.py b/pyload/common/pavement.py
new file mode 100644
index 000000000..9b2dc98b3
--- /dev/null
+++ b/pyload/common/pavement.py
@@ -0,0 +1,412 @@
+# -*- coding: utf-8 -*-
+
+from paver.easy import *
+from paver.setuputils import setup
+from paver.doctools import cog
+
+import os
+import sys
+import shutil
+import re
+from glob import glob
+from tempfile import mkdtemp
+from urllib import urlretrieve
+from subprocess import call, Popen, PIPE
+from zipfile import ZipFile
+
+PROJECT_DIR = path(__file__).dirname()
+sys.path.append(PROJECT_DIR)
+
+options = environment.options
+path("pyload").mkdir()
+
+extradeps = []
+if sys.version_info <= (2, 5):
+ extradeps += 'simplejson'
+
+setup(
+ name="pyload",
+ version="0.4.10",
+ description='Fast, lightweight and full featured download manager.',
+ long_description=open(PROJECT_DIR / "README.md").read(),
+ keywords = ("pyload", "download-manager", "one-click-hoster", "download"),
+ url="http://pyload.org",
+ download_url='http://pyload.org/download',
+ license='GPL v3',
+ author="pyLoad Team",
+ author_email="support@pyload.org",
+ platforms = ('Any',),
+ #package_dir={'pyload': "src"},
+ packages=["pyload"],
+ #package_data=find_package_data(),
+ #data_files=[],
+ include_package_data=True,
+ exclude_package_data={'pyload': ["docs*", "scripts*", "tests*"]}, #exluced from build but not from sdist
+ # 'bottle >= 0.10.0' not in list, because its small and contain little modifications
+ install_requires=['thrift >= 0.8.0', 'jinja2', 'pycurl', 'Beaker', 'BeautifulSoup >= 3.2, < 3.3'] + extradeps,
+ extras_require={
+ 'SSL': ["pyOpenSSL"],
+ 'DLC': ['pycrypto'],
+ 'lightweight webserver': ['bjoern'],
+ 'RSS plugins': ['feedparser'],
+ },
+ #setup_requires=["setuptools_hg"],
+ entry_points={
+ 'console_scripts': [
+ 'pyLoadCore = pyLoadCore:main',
+ 'pyLoadCli = pyLoadCli:main'
+ ]},
+ zip_safe=False,
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Topic :: Internet :: WWW/HTTP",
+ "Environment :: Console",
+ "Environment :: Web Environment",
+ "Intended Audience :: End Users/Desktop",
+ "License :: OSI Approved :: GNU General Public License (GPL)",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 2"
+ ]
+)
+
+options(
+ sphinx=Bunch(
+ builddir="_build",
+ sourcedir=""
+ ),
+ get_source=Bunch(
+ src="https://bitbucket.org/spoob/pyload/get/tip.zip",
+ rev=None,
+ clean=False
+ ),
+ thrift=Bunch(
+ path="../thrift/trunk/compiler/cpp/thrift",
+ gen=""
+ ),
+ virtualenv=Bunch(
+ dir="env",
+ python="python2",
+ virtual="virtualenv2",
+ ),
+ cog=Bunch(
+ pattern="*.py",
+ )
+)
+
+# xgettext args
+xargs = ["--language=Python", "--add-comments=L10N",
+ "--from-code=utf-8", "--copyright-holder=pyLoad Team", "--package-name=pyLoad",
+ "--package-version=%s" % options.version, "--msgid-bugs-address='bugs@pyload.org'"]
+
+@task
+@needs('cog')
+def html():
+ """Build html documentation"""
+ module = path("docs") / "pyload"
+ pyload.rmtree()
+ call_task('paver.doctools.html')
+
+
+@task
+@cmdopts([
+ ('src=', 's', 'Url to source'),
+ ('rev=', 'r', "HG revision"),
+ ("clean", 'c', 'Delete old source folder')
+])
+def get_source(options):
+ """ Downloads pyload source from bitbucket tip or given rev"""
+ if options.rev: options.url = "https://bitbucket.org/spoob/pyload/get/%s.zip" % options.rev
+
+ pyload = path("pyload")
+
+ if len(pyload.listdir()) and not options.clean:
+ return
+ elif pyload.exists():
+ pyload.rmtree()
+
+ urlretrieve(options.src, "pyload_src.zip")
+ zip = ZipFile("pyload_src.zip")
+ zip.extractall()
+ path("pyload_src.zip").remove()
+
+ folder = [x for x in path(".").dirs() if x.name.startswith("spoob-pyload-")][0]
+ folder.move(pyload)
+
+ change_mode(pyload, 0644)
+ change_mode(pyload, 0755, folder=True)
+
+ for file in pyload.files():
+ if file.name.endswith(".py"):
+ file.chmod(0755)
+
+ (pyload / ".hgtags").remove()
+ (pyload / ".gitignore").remove()
+ #(pyload / "docs").rmtree()
+
+ f = open(pyload / "__init__.py", "wb")
+ f.close()
+
+ #options.setup.packages = find_packages()
+ #options.setup.package_data = find_package_data()
+
+
+@task
+@needs('clean', 'generate_setup', 'minilib', 'get_source', 'setuptools.command.sdist')
+def sdist():
+ """ Build source code package with distutils """
+
+
+@task
+@cmdopts([
+ ('path=', 'p', 'Thrift path'),
+ ('gen=', 'g', "Extra --gen option")
+])
+def thrift(options):
+ """ Generate Thrift stubs """
+
+ print "add import for TApplicationException manually as long it is not fixed"
+
+ outdir = path("pyload") / "remote" / "thriftbackend"
+ (outdir / "gen-py").rmtree()
+
+ cmd = [options.thrift.path, "-strict", "-o", outdir, "--gen", "py:slots, dynamic", outdir / "pyload.thrift"]
+
+ if options.gen:
+ cmd.insert(len(cmd) - 1, "--gen")
+ cmd.insert(len(cmd) - 1, options.gen)
+
+ print "running", cmd
+
+ p = Popen(cmd)
+ p.communicate()
+
+ (outdir / "thriftgen").rmtree()
+ (outdir / "gen-py").move(outdir / "thriftgen")
+
+ #create light ttypes
+ from pyload.remote.socketbackend.create_ttypes import main
+ main()
+
+@task
+def compile_js():
+ """ Compile .coffee files to javascript"""
+
+ root = path("pyload") / "web" / "media" / "js"
+ for f in root.glob("*.coffee"):
+ print "generate", f
+ coffee = Popen(["coffee", "-cbs"], stdin=open(f, "rb"), stdout=PIPE)
+ yui = Popen(["yuicompressor", "--type", "js"], stdin=coffee.stdout, stdout=PIPE)
+ coffee.stdout.close()
+ content = yui.communicate()[0]
+ with open(root / f.name.replace(".coffee", ".js"), "wb") as js:
+ js.write("{% autoescape true %}\n")
+ js.write(content)
+ js.write("\n{% endautoescape %}")
+
+
+@task
+def generate_locale():
+ """ Generates localization files """
+
+ EXCLUDE = ["BeautifulSoup.py", "pyload/cli", "web/locale", "web/ajax", "web/cnl", "web/pyload",
+ "setup.py"]
+ makepot("core", path("pyload"), EXCLUDE, "./pyload.py\n")
+
+ makepot("cli", path("pyload") / "cli", [], includes="./pyload-cli.py\n")
+ makepot("setup", "", [], includes="./pyload/setup.py\n")
+
+ EXCLUDE = ["ServerThread.py", "web/media/default"]
+
+ # strings from js files
+ strings = set()
+
+ for fi in path("pyload/web").walkfiles():
+ if not fi.name.endswith(".js") and not fi.endswith(".coffee"): continue
+ with open(fi, "rb") as c:
+ content = c.read()
+
+ strings.update(re.findall(r"_\s*\(\s*\"([^\"]+)", content))
+ strings.update(re.findall(r"_\s*\(\s*\'([^\']+)", content))
+
+ trans = path("pyload") / "web" / "translations.js"
+
+ with open(trans, "wb") as js:
+ for s in strings:
+ js.write('_("%s")\n' % s)
+
+ makepot("django", path("pyload/web"), EXCLUDE, "./%s\n" % trans.relpath(), [".py", ".html"], ["--language=Python"])
+
+ trans.remove()
+
+ path("includes.txt").remove()
+
+ print "Locale generated"
+
+
+@task
+@cmdopts([
+ ('key=', 'k', 'api key')
+])
+def upload_translations(options):
+ """ Uploads the locale files to translation server """
+ tmp = path(mkdtemp())
+
+ shutil.copy('locale/crowdin.yaml', tmp)
+ os.mkdir(tmp / 'pyLoad')
+ for f in glob('locale/*.pot'):
+ if os.path.isfile(f):
+ shutil.copy(f, tmp / 'pyLoad')
+
+ config = tmp / 'crowdin.yaml'
+ content = open(config, 'rb').read()
+ content = content.format(key=options.key, tmp=tmp)
+ f = open(config, 'wb')
+ f.write(content)
+ f.close()
+
+ call(['crowdin-cli', '-c', config, 'upload', 'source'])
+
+ shutil.rmtree(tmp)
+
+ print "Translations uploaded"
+
+
+@task
+@cmdopts([
+ ('key=', 'k', 'api key')
+])
+def download_translations(options):
+ """ Downloads the translated files from translation server """
+ tmp = path(mkdtemp())
+
+ shutil.copy('locale/crowdin.yaml', tmp)
+ os.mkdir(tmp / 'pyLoad')
+ for f in glob('locale/*.pot'):
+ if os.path.isfile(f):
+ shutil.copy(f, tmp / 'pyLoad')
+
+ config = tmp / 'crowdin.yaml'
+ content = open(config, 'rb').read()
+ content = content.format(key=options.key, tmp=tmp)
+ f = open(config, 'wb')
+ f.write(content)
+ f.close()
+
+ call(['crowdin-cli', '-c', config, 'download'])
+
+ for language in (tmp / 'pyLoad').listdir():
+ if not language.isdir():
+ continue
+
+ target = path('locale') / language.basename()
+ print "Copy language %s" % target
+ if target.exists():
+ shutil.rmtree(target)
+
+ shutil.copytree(language, target)
+
+ shutil.rmtree(tmp)
+
+
+@task
+def compile_translations():
+ """ Compile PO files to MO """
+ for language in path('locale').listdir():
+ if not language.isdir():
+ continue
+
+ for f in glob(language / 'LC_MESSAGES' / '*.po'):
+ print "Compiling %s" % f
+ call(['msgfmt', '-o', f.replace('.po', '.mo'), f])
+
+
+@task
+def tests():
+ call(["nosetests2"])
+
+@task
+def virtualenv(options):
+ """Setup virtual environment"""
+ if path(options.dir).exists():
+ return
+
+ call([options.virtual, "--no-site-packages", "--python", options.python, options.dir])
+ print "$ source %s/bin/activate" % options.dir
+
+
+@task
+def clean_env():
+ """Deletes the virtual environment"""
+ env = path(options.virtualenv.dir)
+ if env.exists():
+ env.rmtree()
+
+
+@task
+@needs('generate_setup', 'minilib', 'get_source', 'virtualenv')
+def env_install():
+ """Install pyLoad into the virtualenv"""
+ venv = options.virtualenv
+ call([path(venv.dir) / "bin" / "easy_install", "."])
+
+
+@task
+def clean():
+ """Cleans build directories"""
+ path("build").rmtree()
+ path("dist").rmtree()
+
+
+#helper functions
+
+def walk_trans(path, EXCLUDE, endings=[".py"]):
+ result = ""
+
+ for f in path.walkfiles():
+ if [True for x in EXCLUDE if x in f.dirname().relpath()]: continue
+ if f.name in EXCLUDE: continue
+
+ for e in endings:
+ if f.name.endswith(e):
+ result += "./%s\n" % f.relpath()
+ break
+
+ return result
+
+
+def makepot(domain, p, excludes=[], includes="", endings=[".py"], xxargs=[]):
+ print "Generate %s.pot" % domain
+
+ f = open("includes.txt", "wb")
+ if includes:
+ f.write(includes)
+
+ if p:
+ f.write(walk_trans(path(p), excludes, endings))
+
+ f.close()
+
+ call(["xgettext", "--files-from=includes.txt", "--default-domain=%s" % domain] + xargs + xxargs)
+
+ # replace charset und move file
+ with open("%s.po" % domain, "rb") as f:
+ content = f.read()
+
+ path("%s.po" % domain).remove()
+ content = content.replace("charset=CHARSET", "charset=UTF-8")
+
+ with open("locale/%s.pot" % domain, "wb") as f:
+ f.write(content)
+
+
+def change_owner(dir, uid, gid):
+ for p in dir.walk():
+ p.chown(uid, gid)
+
+
+def change_mode(dir, mode, folder=False):
+ for p in dir.walk():
+ if folder and p.isdir():
+ p.chmod(mode)
+ elif p.isfile() and not folder:
+ p.chmod(mode)
diff --git a/pyload/common/pylgettext.py b/pyload/common/pylgettext.py
new file mode 100644
index 000000000..cab631cf4
--- /dev/null
+++ b/pyload/common/pylgettext.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+
+from gettext import *
+
+_searchdirs = None
+
+origfind = find
+
+def setpaths(pathlist):
+ global _searchdirs
+ if isinstance(pathlist, list):
+ _searchdirs = pathlist
+ else:
+ _searchdirs = list(pathlist)
+
+
+def addpath(path):
+ global _searchdirs
+ if _searchdirs is None:
+ _searchdirs = list(path)
+ else:
+ if path not in _searchdirs:
+ _searchdirs.append(path)
+
+
+def delpath(path):
+ global _searchdirs
+ if _searchdirs is not None:
+ if path in _searchdirs:
+ _searchdirs.remove(path)
+
+
+def clearpath():
+ global _searchdirs
+ if _searchdirs is not None:
+ _searchdirs = None
+
+
+def find(domain, localedir=None, languages=None, all=False):
+ if _searchdirs is None:
+ return origfind(domain, localedir, languages, all)
+ searches = [localedir] + _searchdirs
+ results = list()
+ for dir in searches:
+ res = origfind(domain, dir, languages, all)
+ if all is False:
+ results.append(res)
+ else:
+ results.extend(res)
+ if all is False:
+ results = filter(lambda x: x is not None, results)
+ if len(results) == 0:
+ return None
+ else:
+ return results[0]
+ else:
+ return results
+
+#Is there a smarter/cleaner pythonic way for this?
+translation.func_globals['find'] = find
diff --git a/pyload/common/test_api.py b/pyload/common/test_api.py
new file mode 100644
index 000000000..4efaa35d6
--- /dev/null
+++ b/pyload/common/test_api.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+
+from pyload.common import APIExerciser
+from nose.tools import nottest
+
+
+class TestApi:
+
+ def __init__(self):
+ self.api = APIExerciser.APIExerciser(None, True, "TestUser", "pwhere")
+
+ def test_login(self):
+ assert self.api.api.login("crapp", "wrong pw") is False
+
+ #takes really long, only test when needed
+ @nottest
+ def test_random(self):
+
+ for i in range(0, 100):
+ self.api.testAPI()
diff --git a/pyload/common/test_json.py b/pyload/common/test_json.py
new file mode 100644
index 000000000..320a42d4f
--- /dev/null
+++ b/pyload/common/test_json.py
@@ -0,0 +1,48 @@
+# -*- coding: utf-8 -*-
+
+from urllib import urlencode
+from urllib2 import urlopen, HTTPError
+from json import loads
+
+from logging import log
+
+url = "http://localhost:8001/api/%s"
+
+class TestJson:
+
+ def call(self, name, post=None):
+ if not post: post = {}
+ post["session"] = self.key
+ u = urlopen(url % name, data=urlencode(post))
+ return loads(u.read())
+
+ def setUp(self):
+ u = urlopen(url % "login", data=urlencode({"username": "TestUser", "password": "pwhere"}))
+ self.key = loads(u.read())
+ assert self.key is not False
+
+ def test_wronglogin(self):
+ u = urlopen(url % "login", data=urlencode({"username": "crap", "password": "wrongpw"}))
+ assert loads(u.read()) is False
+
+ def test_access(self):
+ try:
+ urlopen(url % "getServerVersion")
+ except HTTPError, e:
+ assert e.code == 403
+ else:
+ assert False
+
+ def test_status(self):
+ ret = self.call("statusServer")
+ log(1, str(ret))
+ assert "pause" in ret
+ assert "queue" in ret
+
+ def test_unknown_method(self):
+ try:
+ self.call("notExisting")
+ except HTTPError, e:
+ assert e.code == 404
+ else:
+ assert False