diff options
author | ardi69 <armin@diedering.de> | 2015-04-21 06:51:24 +0200 |
---|---|---|
committer | ardi69 <armin@diedering.de> | 2015-04-21 06:51:24 +0200 |
commit | 2f8433b6a10505d29a1b63ea8bbd9b0bf3f7d9f6 (patch) | |
tree | b82a8b5fc0a309f69733b0a004284f4ef45833d8 | |
parent | added check of classname == filename (diff) | |
parent | Merge branch 'pr/n10_ardi69' into 0.4.10 (diff) | |
download | pyload-2f8433b6a10505d29a1b63ea8bbd9b0bf3f7d9f6.tar.xz |
Merge pull request #4 from vuolter/0.4.10
vuolter HEAD
138 files changed, 643 insertions, 652 deletions
diff --git a/docs/build_docs.py b/docs/build_docs.py index 8a7ab7a18..05f34be44 100644 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -25,12 +25,12 @@ sys.path.append(dir_name) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. @@ -50,7 +50,7 @@ templates_path = ['_templates'] source_suffix = '.rst' # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' @@ -79,37 +79,37 @@ release = '0.4.10' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- @@ -121,17 +121,17 @@ html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. @@ -149,44 +149,44 @@ html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'pyLoaddoc' @@ -195,10 +195,10 @@ htmlhelp_basename = 'pyLoaddoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' +# latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +# latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). @@ -209,26 +209,26 @@ latex_documents = [ # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Additional stuff for the LaTeX preamble. -#latex_preamble = '' +# latex_preamble = '' # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output -------------------------------------------- diff --git a/docs/docs.conf b/docs/docs.conf index 61a2c45ee..c5e619fd6 100644 --- a/docs/docs.conf +++ b/docs/docs.conf @@ -1,5 +1,6 @@ -#@NOTE: usage: epydoc --conf docs.conf , results goes to ~/.pyload/docs +#@NOTE: usage: epydoc --conf docs.conf +# results goes to ~/.pyload/docs [epydoc] diff --git a/docs/write_addons.rst b/docs/write_addons.rst index b7f6dfdb8..58e73e675 100644 --- a/docs/write_addons.rst +++ b/docs/write_addons.rst @@ -20,12 +20,15 @@ All addons should start with something like this: :: class YourAddon(Addon): - __name = "YourAddon" + __name = "YourAddon" + __tupe = "addon" __version = "0.1" + + __config = [("activated", "bool", "Activated", "True")] + __description = "Does really cool stuff" - __config = [ ("activated" , "bool" , "Activated" , "True" ) ] - __author_name__ = ("Me") - __author_mail__ = ("me@has-no-mail.com") + __license = "Your license short name" + __authors = [("Me", "me@has-no-mail.com")] All meta-data is defined in the header, you need at least one option at ``__config`` so the user can toggle your addon on and off. Dont't overwrite the ``init`` method if not neccesary, use ``setup`` instead. @@ -130,7 +133,7 @@ Sounds complicated but is very easy to do. Just use the ``Expose`` decorator: :: Thats all, it's available via the :class:`Api <pyload.api.Api>` now. If you want to use it read :ref:`access_api`. Here is a basic example: :: - #Assuming client is a ThriftClient or Api object + # Assuming client is a ThriftClient or Api object print client.getServices() print client.call(ServiceCall("YourAddon", "invoke", "an argument")) @@ -158,7 +161,7 @@ Just store everything in ``self.info``. :: Usable with: :: - #Assuming client is a ThriftClient or Api object + # Assuming client is a ThriftClient or Api object print client.getAllInfo() diff --git a/docs/write_plugins.rst b/docs/write_plugins.rst index af35a8d55..8d42aea7f 100644 --- a/docs/write_plugins.rst +++ b/docs/write_plugins.rst @@ -51,7 +51,7 @@ An example ``process`` function could look like this :: """ def process(self, pyfile): - html = self.load(pyfile.url) # load the content of the orginal pyfile.url to html + html = self.load(pyfile.url) #: load the content of the orginal pyfile.url to html # parse the name from the site and set attribute in pyfile pyfile.name = self.myFunctionToParseTheName(html) diff --git a/locale/pavement.py b/locale/pavement.py index 06a4f9775..03acf3dcc 100644 --- a/locale/pavement.py +++ b/locale/pavement.py @@ -39,10 +39,10 @@ setup( author="pyLoad Team", author_email="support@pyload.org", platforms = ('Any',), - #package_dir={'pyload': "src"}, + # package_dir={'pyload': "src"}, packages=["pyload"], - #package_data=find_package_data(), - #data_files=[], + # package_data=find_package_data(), + # data_files=[], include_package_data=True, exclude_package_data={'pyload': ["docs*", "scripts*", "tests*"]}, #: exluced from build but not from sdist # 'bottle >= 0.10.0' not in list, because its small and contain little modifications @@ -53,7 +53,7 @@ setup( 'lightweight webserver': ['bjoern'], 'RSS plugins': ['feedparser'], }, - #setup_requires=["setuptools_hg"], + # setup_requires=["setuptools_hg"], entry_points={ 'console_scripts': [ 'pyLoadCore = pyLoadCore:main', @@ -152,8 +152,8 @@ def get_source(options): f = open(pyload / "__init__.py", "wb") f.close() - #options.setup.packages = find_packages() - #options.setup.package_data = find_package_data() + # options.setup.packages = find_packages() + # options.setup.package_data = find_package_data() @task @@ -191,7 +191,7 @@ def thrift(options): (outdir / "thriftgen").rmtree() (outdir / "gen-py").move(outdir / "thriftgen") - #create light ttypes + # create light ttypes from pyload.remote.socketbackend.create_ttypes import main main() @@ -270,11 +270,11 @@ def upload_translations(options): shutil.copy(f, tmp / 'pyLoad') config = tmp / 'crowdin.yaml' - content = open(config, 'rb').read() + with open(config, 'rb') as f: + content = f.read() content = content.format(key=options.key, tmp=tmp) - f = open(config, 'wb') - f.write(content) - f.close() + with open(config, 'wb') as f: + f.write(content) call(['crowdin-cli', '-c', config, 'upload', 'source']) @@ -300,11 +300,11 @@ def download_translations(options): shutil.copy(f, tmp / 'pyLoad') config = tmp / 'crowdin.yaml' - content = open(config, 'rb').read() + with open(config, 'rb') as f: + content = f.read() content = content.format(key=options.key, tmp=tmp) - f = open(config, 'wb') - f.write(content) - f.close() + with open(config, 'wb') as f: + f.write(content) call(['crowdin-cli', '-c', config, 'download']) @@ -395,14 +395,12 @@ def walk_trans(path, EXCLUDE, endings=[".py"]): def makepot(domain, p, excludes=[], includes="", endings=[".py"], xxargs=[]): print "Generate %s.pot" % domain - f = open("includes.txt", "wb") - if includes: - f.write(includes) + with open("includes.txt", "wb") as f: + if includes: + f.write(includes) - if p: - f.write(walk_trans(path(p), excludes, endings)) - - f.close() + if p: + f.write(walk_trans(path(p), excludes, endings)) call(["xgettext", "--files-from=includes.txt", "--default-domain=%s" % domain] + xargs + xxargs) diff --git a/pyload/Core.py b/pyload/Core.py index f723d8366..e833314f0 100755 --- a/pyload/Core.py +++ b/pyload/Core.py @@ -1,9 +1,10 @@ # -*- coding: utf-8 -*- -# @author: RaNaN, mkaay, sebnapi, spoob +# @author: RaNaN, mkaay, sebnapi, spoob, vuolter # @version: v0.4.10 -CURRENT_VERSION = '0.4.10' +from __future__ import with_statement +import pyload import __builtin__ from getopt import getopt, GetoptError @@ -41,11 +42,10 @@ from codecs import getwriter enc = get_console_encoding(sys.stdout.encoding) sys.stdout = getwriter(enc)(sys.stdout, errors="replace") + # TODO List # - configurable auth system ldap/mysql # - cron job like sheduler - - class Core(object): """pyLoad Core, one tool to rule them all... (the filehosters) :D""" @@ -68,7 +68,7 @@ class Core(object): for option, argument in options: if option in ("-v", "--version"): - print "pyLoad", CURRENT_VERSION + print "pyLoad", pyload.__version__ exit() elif option in ("-p", "--pidfile"): self.pidfile = argument @@ -127,7 +127,7 @@ class Core(object): def print_help(self): print - print "pyLoad v%s 2008-2015 the pyLoad Team" % CURRENT_VERSION + print "pyLoad v%s 2008-2015 the pyLoad Team" % pyload.__version__ print if sys.argv[0].endswith(".py"): print "Usage: python pyload.py [options]" @@ -137,7 +137,7 @@ class Core(object): print "<Options>" print " -v, --version", " " * 10, "Print version to terminal" print " -c, --clear", " " * 12, "Delete all saved packages/links" - #print " -a, --add=<link/list>", " " * 2, "Add the specified links" + # print " -a, --add=<link/list>", " " * 2, "Add the specified links" print " -u, --user", " " * 13, "Manages users" print " -d, --debug", " " * 12, "Enable debug mode" print " -s, --setup", " " * 12, "Run Setup Assistant" @@ -171,9 +171,8 @@ class Core(object): def writePidFile(self): self.deletePidFile() pid = os.getpid() - f = open(self.pidfile, "wb") - f.write(str(pid)) - f.close() + with open(self.pidfile, "wb") as f: + f.write(str(pid)) def deletePidFile(self): @@ -185,9 +184,8 @@ class Core(object): def checkPidFile(self): """ return pid as int or 0""" if os.path.isfile(self.pidfile): - f = open(self.pidfile, "rb") - pid = f.read().strip() - f.close() + with open(self.pidfile, "rb") as f: + pid = f.read().strip() if pid: pid = int(pid) return pid @@ -200,7 +198,7 @@ class Core(object): if not pid or os.name == "nt": return False try: - os.kill(pid, 0) # 0 - default signal (does nothing) + os.kill(pid, 0) #: 0 - default signal (does nothing) except Exception: return 0 @@ -253,7 +251,7 @@ class Core(object): def start(self, rpc=True, web=True): """ starts the fun :D """ - self.version = CURRENT_VERSION + self.version = pyload.__version__ if not exists("pyload.conf"): from pyload.config.Setup import SetupAssistant as Setup @@ -330,7 +328,7 @@ class Core(object): self.do_restart = False self.shuttedDown = False - self.log.info(_("Starting") + " pyLoad %s" % CURRENT_VERSION) + self.log.info(_("Starting") + " pyLoad %s" % pyload.__version__) self.log.info(_("Using home directory: %s") % getcwd()) self.writePidFile() @@ -341,10 +339,10 @@ class Core(object): self.log.debug("Remote activated: %s" % self.remote) self.check_install("Crypto", _("pycrypto to decode container files")) - #img = self.check_install("Image", _("Python Image Library (PIL) for captcha reading")) - #self.check_install("pycurl", _("pycurl to download any files"), True, True) + # img = self.check_install("Image", _("Python Image Library (PIL) for captcha reading")) + # self.check_install("pycurl", _("pycurl to download any files"), True, True) self.check_file("tmp", _("folder for temporary files"), True) - #tesser = self.check_install("tesseract", _("tesseract for captcha reading"), False) if os.name != "nt" else True + # tesser = self.check_install("tesseract", _("tesseract for captcha reading"), False) if os.name != "nt" else True self.captcha = True #: checks seems to fail, although tesseract is available @@ -381,7 +379,7 @@ class Core(object): self.scheduler = Scheduler(self) - #hell yeah, so many important managers :D + # hell yeah, so many important managers :D self.pluginManager = PluginManager(self) self.pullManager = PullManager(self) self.accountManager = AccountManager(self) @@ -409,19 +407,17 @@ class Core(object): link_file = join(pypath, "links.txt") if exists(link_file): - f = open(link_file, "rb") - if f.read().strip(): - self.api.addPackage("links.txt", [link_file], 1) - f.close() + with open(link_file, "rb") as f: + if f.read().strip(): + self.api.addPackage("links.txt", [link_file], 1) link_file = "links.txt" if exists(link_file): - f = open(link_file, "rb") - if f.read().strip(): - self.api.addPackage("links.txt", [link_file], 1) - f.close() + with open(link_file, "rb") as f: + if f.read().strip(): + self.api.addPackage("links.txt", [link_file], 1) - #self.scheduler.addJob(0, self.accountManager.getAccountInfos) + # self.scheduler.addJob(0, self.accountManager.getAccountInfos) self.log.info(_("Activating Accounts...")) self.accountManager.getAccountInfos() @@ -596,7 +592,7 @@ class Core(object): if not file_exists and not quiet: if file_created: - #self.log.info( _("%s created") % description ) + # self.log.info( _("%s created") % description ) pass else: if not empty: @@ -616,7 +612,7 @@ class Core(object): self.shutdown() chdir(owd) # close some open fds - for i in range(3, 50): + for i in xrange(3, 50): try: close(i) except Exception: @@ -682,7 +678,7 @@ def deamon(): sys.exit(1) # Iterate through and close some file descriptors. - for fd in range(0, 3): + for fd in xrange(0, 3): try: os.close(fd) except OSError: # ERROR, fd wasn't open to begin with (ignored) diff --git a/pyload/api/__init__.py b/pyload/api/__init__.py index b5c1dfbf4..442e9ef95 100644 --- a/pyload/api/__init__.py +++ b/pyload/api/__init__.py @@ -1,6 +1,8 @@ # -*- coding: utf-8 -*- # @author: RaNaN +from __future__ import with_statement + from base64 import standard_b64encode from os.path import join from time import time @@ -49,20 +51,20 @@ urlmatcher = re.compile(r"((https?|ftps?|xdcc|sftp):((//)|(\\\\))+[\w\d:#@%/;$() class PERMS(object): - ALL = 0 # requires no permission, but login - ADD = 1 # can add packages - DELETE = 2 # can delete packages - STATUS = 4 # see and change server status - LIST = 16 # see queue and collector - MODIFY = 32 # moddify some attribute of downloads - DOWNLOAD = 64 # can download from webinterface - SETTINGS = 128 # can access settings - ACCOUNTS = 256 # can access accounts - LOGS = 512 # can see server logs + ALL = 0 #: requires no permission, but login + ADD = 1 #: can add packages + DELETE = 2 #: can delete packages + STATUS = 4 #: see and change server status + LIST = 16 #: see queue and collector + MODIFY = 32 #: moddify some attribute of downloads + DOWNLOAD = 64 #: can download from webinterface + SETTINGS = 128 #: can access settings + ACCOUNTS = 256 #: can access accounts + LOGS = 512 #: can see server logs class ROLE(object): - ADMIN = 0 # admin has all permissions implicit + ADMIN = 0 #: admin has all permissions implicit USER = 1 @@ -84,7 +86,7 @@ class Api(Iface): These can be configured via webinterface. Admin user have all permissions, and are the only ones who can access the methods with no specific permission. """ - EXTERNAL = Iface # let the json api know which methods are external + EXTERNAL = Iface #: let the json api know which methods are external def __init__(self, core): @@ -147,7 +149,7 @@ class Api(Iface): self.core.addonManager.dispatchEvent("config-changed", category, option, value, section) if section == "core": self.core.config[category][option] = value - if option in ("limit_speed", "max_speed"): # not so nice to update the limit + if option in ("limit_speed", "max_speed"): #: not so nice to update the limit self.core.requestFactory.updateBucket() elif section == "plugin": self.core.config.setPlugin(category, option, value) @@ -230,7 +232,7 @@ class Api(Iface): not self.core.threadManager.pause and self.isTimeDownload(), self.core.config.get("reconnect", "activated") and self.isTimeReconnect()) for pyfile in [x.active for x in self.core.threadManager.threads if x.active and isinstance(x.active, PyFile)]: - serverStatus.speed += pyfile.getSpeed() # bytes/s + serverStatus.speed += pyfile.getSpeed() #: bytes/s return serverStatus @@ -265,9 +267,8 @@ class Api(Iface): """ filename = join(self.core.config.get("log", "log_folder"), 'log.txt') try: - fh = open(filename, "r") - lines = fh.readlines() - fh.close() + with open(filename, "r") as fh: + lines = fh.readlines() if offset >= len(lines): return [] return lines[offset:] @@ -409,9 +410,8 @@ class Api(Iface): :param data: file content :return: online check """ - th = open(join(self.core.config.get("general", "download_folder"), "tmp_" + container), "wb") - th.write(str(data)) - th.close() + with open(join(self.core.config.get("general", "download_folder"), "tmp_" + container), "wb") as th: + th.write(str(data)) return self.checkOnlineStatus(urls + [th.name]) @@ -707,9 +707,8 @@ class Api(Iface): :param filename: filename, extension is important so it can correctly decrypted :param data: file content """ - th = open(join(self.core.config.get("general", "download_folder"), "tmp_" + filename), "wb") - th.write(str(data)) - th.close() + with open(join(self.core.config.get("general", "download_folder"), "tmp_" + filename), "wb") as th: + th.write(str(data)) self.addPackage(th.name, [th.name], Destination.Queue) @@ -777,7 +776,7 @@ class Api(Iface): order = {} for pid in packs: pack = self.core.files.getPackageData(int(pid)) - while pack['order'] in order.keys(): # just in case + while pack['order'] in order.keys(): #: just in case pack['order'] += 1 order[pack['order']] = pack['id'] return order @@ -793,7 +792,7 @@ class Api(Iface): rawdata = self.core.files.getPackageData(int(pid)) order = {} for id, pyfile in rawdata['links'].iteritems(): - while pyfile['order'] in order.keys(): # just in case + while pyfile['order'] in order.keys(): #: just in case pyfile['order'] += 1 order[pyfile['order']] = pyfile['id'] return order @@ -897,7 +896,7 @@ class Api(Iface): accounts = [AccountInfo(acc['validuntil'], acc['login'], acc['options'], acc['valid'], acc['trafficleft'], acc['maxtraffic'], acc['premium'], acc['type']) for acc in group] - return accounts or [] + return accounts or list() @permission(PERMS.ALL) @@ -910,9 +909,9 @@ class Api(Iface): @permission(PERMS.ACCOUNTS) - def updateAccount(self, plugin, account, password=None, options=None): + def updateAccount(self, plugin, account, password=None, options={}): """Changes pw/options for specific account.""" - self.core.accountManager.updateAccount(plugin, account, password, options or {}) + self.core.accountManager.updateAccount(plugin, account, password, options) @permission(PERMS.ACCOUNTS) diff --git a/pyload/api/types.py b/pyload/api/types.py index 2fd089333..9381df3c7 100644 --- a/pyload/api/types.py +++ b/pyload/api/types.py @@ -2,7 +2,6 @@ # Autogenerated by pyload # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING - class BaseObject(object): __slots__ = [] diff --git a/pyload/cli/Cli.py b/pyload/cli/Cli.py index 84725b625..fc5236ff9 100644 --- a/pyload/cli/Cli.py +++ b/pyload/cli/Cli.py @@ -55,8 +55,8 @@ class Cli(object): self.lock = Lock() # processor funcions, these will be changed dynamically depending on control flow - self.headerHandler = self # the download status - self.bodyHandler = self # the menu section + self.headerHandler = self #: the download status + self.bodyHandler = self #: the menu section self.inputHandler = self os.system("clear") @@ -158,7 +158,7 @@ class Cli(object): line += 1 for download in data: - if download.status == 12: # downloading + if download.status == 12: #: downloading percent = download.percent z = percent / 4 speed += download.speed @@ -214,7 +214,7 @@ class Cli(object): # clear old output if line < self.lastLowestLine: - for i in range(line + 1, self.lastLowestLine + 1): + for i in xrange(line + 1, self.lastLowestLine + 1): println(i, "") self.lastLowestLine = line @@ -265,7 +265,7 @@ class Cli(object): print "No downloads running." for download in files: - if download.status == 12: # downloading + if download.status == 12: #: downloading print print_status(download) print "\tDownloading: %s @ %s/s\t %s (%s%%)" % ( download.format_eta, formatSize(download.speed), formatSize(download.size - download.bleft), @@ -321,9 +321,8 @@ class Cli(object): print _("File does not exists.") return - f = open(join(owd, path), "rb") - content = f.read() - f.close() + with open(join(owd, path), "rb") as f: + content = f.read() rid = self.client.checkOnlineStatusContainer([], basename(f.name), content).rid self.printOnlineCheck(self.client, rid) diff --git a/pyload/cli/Handler.py b/pyload/cli/Handler.py index 444d7f6d0..33e5dd8e6 100644 --- a/pyload/cli/Handler.py +++ b/pyload/cli/Handler.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- # @author: RaNaN - class Handler(object): def __init__(self, cli): diff --git a/pyload/cli/ManageFiles.py b/pyload/cli/ManageFiles.py index c010895c5..3833b2c48 100644 --- a/pyload/cli/ManageFiles.py +++ b/pyload/cli/ManageFiles.py @@ -15,9 +15,9 @@ class ManageFiles(Handler): def init(self): self.target = Destination.Queue - self.pos = 0 # position in queue - self.package = -1 # choosen package - self.mode = "" # move/delete/restart + self.pos = 0 #: position in queue + self.package = -1 #: choosen package + self.mode = "" #: move/delete/restart self.cache = None self.links = None @@ -113,7 +113,7 @@ class ManageFiles(Handler): i += 1 except Exception: pass - for _i in range(5 - i): + for _i in xrange(5 - i): println(line, "") line += 1 else: @@ -128,7 +128,7 @@ class ManageFiles(Handler): i += 1 except Exception, e: pass - for _i in range(5 - i): + for _i in xrange(5 - i): println(line, "") line += 1 @@ -168,7 +168,7 @@ class ManageFiles(Handler): inp = inp.strip() if "-" in inp: l, _, h = inp.partition("-") - r = range(int(l), int(h) + 1) + r = xrange(int(l), int(h) + 1) if package: return [p.pid for p in self.cache if p.pid in r] diff --git a/pyload/config/Parser.py b/pyload/config/Parser.py index 1d76c0164..45fb1c8d0 100644 --- a/pyload/config/Parser.py +++ b/pyload/config/Parser.py @@ -52,29 +52,26 @@ class ConfigParser(object): copy(join(pypath, "pyload", "config", "default.conf"), "pyload.conf") if not exists("plugin.conf"): - f = open("plugin.conf", "wb") - f.write("version: " + str(CONF_VERSION)) - f.close() + with open("plugin.conf", "wb") as f: + f.write("version: " + str(CONF_VERSION)) - f = open("pyload.conf", "rb") - v = f.readline() - f.close() + with open("pyload.conf", "rb") as f: + v = f.readline() v = v[v.find(":") + 1:].strip() if not v or int(v) < CONF_VERSION: copy(join(pypath, "pyload", "config", "default.conf"), "pyload.conf") print "Old version of config was replaced" - f = open("plugin.conf", "rb") - v = f.readline() - f.close() + with open("plugin.conf", "rb") as f: + v = f.readline() v = v[v.find(":") + 1:].strip() if not v or int(v) < CONF_VERSION: - f = open("plugin.conf", "wb") - f.write("version: " + str(CONF_VERSION)) - f.close() + with open("plugin.conf", "wb") as f: + f.write("version: " + str(CONF_VERSION)) print "Old version of plugin-config replaced" + except Exception: if n >= 3: raise @@ -104,9 +101,8 @@ class ConfigParser(object): def parseConfig(self, config): """parses a given configfile""" - f = open(config) - - config = f.read() + with open(config) as f: + config = f.read() config = config.splitlines()[1:] @@ -183,7 +179,6 @@ class ConfigParser(object): print "Config Warning" print_exc() - f.close() return conf diff --git a/pyload/config/Setup.py b/pyload/config/Setup.py index ce5a5fb88..bae75fea4 100644 --- a/pyload/config/Setup.py +++ b/pyload/config/Setup.py @@ -32,9 +32,9 @@ class SetupAssistant(object): load_translation("setup", self.lang) - #Input shorthand for yes + # Input shorthand for yes self.yes = _("y") - #Input shorthand for no + # Input shorthand for no self.no = _("n") # print @@ -261,7 +261,7 @@ class SetupAssistant(object): web = sqlite and beaker - js = True if JsEngine.find() else False + js = bool(JsEngine.find()) self.print_dep(_("JS engine"), js) if not python: diff --git a/pyload/config/default.conf b/pyload/config/default.conf index e07b92f68..b36ed6c9c 100644 --- a/pyload/config/default.conf +++ b/pyload/config/default.conf @@ -1,4 +1,4 @@ -version: 2 +version: 1 remote - "Remote": bool activated : "Activated" = True diff --git a/pyload/database/Backend.py b/pyload/database/Backend.py index b0e94711e..b6540b2be 100644 --- a/pyload/database/Backend.py +++ b/pyload/database/Backend.py @@ -1,11 +1,12 @@ # -*- coding: utf-8 -*- # @author: RaNaN, mkaay +from __future__ import with_statement + from threading import Event, Thread from os import remove from os.path import exists from shutil import move - from Queue import Queue from traceback import print_exc @@ -36,6 +37,7 @@ class style(object): def x(*args, **kwargs): if cls.db: return f(cls.db, *args, **kwargs) + return x @@ -47,6 +49,7 @@ class style(object): def x(*args, **kwargs): if cls.db: return cls.db.queue(f, *args, **kwargs) + return x @@ -73,15 +76,16 @@ class DatabaseJob(object): self.result = None self.exception = False -# import inspect -# self.frame = inspect.currentframe() + # import inspect + # self.frame = inspect.currentframe() def __repr__(self): from os.path import basename + frame = self.frame.f_back output = "" - for _i in range(5): + for _i in xrange(5): output += "\t%s:%s, %s\n" % (basename(frame.f_code.co_filename), frame.f_lineno, frame.f_code.co_name) frame = frame.f_back del frame @@ -167,26 +171,22 @@ class DatabaseBackend(Thread): def _checkVersion(self): """ check db version and delete it if needed""" if not exists("files.version"): - f = open("files.version", "wb") - f.write(str(DB_VERSION)) - f.close() + with open("files.version", "wb") as f: + f.write(str(DB_VERSION)) return - f = open("files.version", "rb") - v = int(f.read().strip()) - f.close() - if v < DB_VERSION: - if v < 2: - try: - self.manager.core.log.warning(_("Filedatabase was deleted due to incompatible version.")) - except Exception: - print "Filedatabase was deleted due to incompatible version." - remove("files.version") - move("files.db", "files.backup.db") - f = open("files.version", "wb") - f.write(str(DB_VERSION)) - f.close() - return v + with open("files.version", "wb+") as f: + v = int(f.read().strip()) + if v < DB_VERSION: + if v < 2: + try: + self.manager.core.log.warning(_("Filedatabase was deleted due to incompatible version.")) + except Exception: + print "Filedatabase was deleted due to incompatible version." + remove("files.version") + move("files.db", "files.backup.db") + f.write(str(DB_VERSION)) + return v def _convertDB(self, v): @@ -198,11 +198,12 @@ class DatabaseBackend(Thread): except Exception: print "Filedatabase could NOT be converted." - #convert scripts start----------------------------------------------------- + # convert scripts start --------------------------------------------------- def _convertV2(self): - self.c.execute('CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")') + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")') try: self.manager.core.log.info(_("Database was converted from v2 to v3.")) except Exception: @@ -211,47 +212,45 @@ class DatabaseBackend(Thread): def _convertV3(self): - self.c.execute('CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)') + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)') try: self.manager.core.log.info(_("Database was converted from v3 to v4.")) except Exception: print "Database was converted from v3 to v4." - #convert scripts end------------------------------------------------------- + # convert scripts end ----------------------------------------------------- def _createTables(self): """create tables for database""" - self.c.execute('CREATE TABLE IF NOT EXISTS "packages" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "folder" TEXT, "password" TEXT DEFAULT "", "site" TEXT DEFAULT "", "queue" INTEGER DEFAULT 0 NOT NULL, "packageorder" INTEGER DEFAULT 0 NOT NULL)') - self.c.execute('CREATE TABLE IF NOT EXISTS "links" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "url" TEXT NOT NULL, "name" TEXT, "size" INTEGER DEFAULT 0 NOT NULL, "status" INTEGER DEFAULT 3 NOT NULL, "plugin" TEXT DEFAULT "BasePlugin" NOT NULL, "error" TEXT DEFAULT "", "linkorder" INTEGER DEFAULT 0 NOT NULL, "package" INTEGER DEFAULT 0 NOT NULL, FOREIGN KEY(package) REFERENCES packages(id))') + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "packages" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "folder" TEXT, "password" TEXT DEFAULT "", "site" TEXT DEFAULT "", "queue" INTEGER DEFAULT 0 NOT NULL, "packageorder" INTEGER DEFAULT 0 NOT NULL)') + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "links" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "url" TEXT NOT NULL, "name" TEXT, "size" INTEGER DEFAULT 0 NOT NULL, "status" INTEGER DEFAULT 3 NOT NULL, "plugin" TEXT DEFAULT "BasePlugin" NOT NULL, "error" TEXT DEFAULT "", "linkorder" INTEGER DEFAULT 0 NOT NULL, "package" INTEGER DEFAULT 0 NOT NULL, FOREIGN KEY(package) REFERENCES packages(id))') self.c.execute('CREATE INDEX IF NOT EXISTS "pIdIndex" ON links(package)') - self.c.execute('CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")') - self.c.execute('CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)') + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")') + self.c.execute( + 'CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)') self.c.execute('CREATE VIEW IF NOT EXISTS "pstats" AS \ SELECT p.id AS id, SUM(l.size) AS sizetotal, COUNT(l.id) AS linkstotal, linksdone, sizedone\ FROM packages p JOIN links l ON p.id = l.package LEFT OUTER JOIN\ (SELECT p.id AS id, COUNT(*) AS linksdone, SUM(l.size) AS sizedone \ - FROM packages p JOIN links l ON p.id = l.package AND l.status in (0, 4, 13) GROUP BY p.id) s ON s.id = p.id \ + FROM packages p JOIN links l ON p.id = l.package AND l.status IN (0, 4, 13) GROUP BY p.id) s ON s.id = p.id \ GROUP BY p.id') - #try to lower ids + # try to lower ids self.c.execute('SELECT max(id) FROM LINKS') fid = self.c.fetchone()[0] - if fid: - fid = int(fid) - else: - fid = 0 + fid = int(fid) if fid else 0 self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (fid, "links")) - self.c.execute('SELECT max(id) FROM packages') pid = self.c.fetchone()[0] - if pid: - pid = int(pid) - else: - pid = 0 + pid = int(pid) if pid else 0 self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (pid, "packages")) self.c.execute('VACUUM') @@ -265,7 +264,7 @@ class DatabaseBackend(Thread): print "Converting old Django DB" conn = sqlite3.connect('pyload.db') c = conn.cursor() - c.execute("SELECT username, password, email from auth_user WHERE is_superuser") + c.execute("SELECT username, password, email FROM auth_user WHERE is_superuser") users = [] for r in c: pw = r[1].split("$") diff --git a/pyload/database/File.py b/pyload/database/File.py index 7cbe1890a..3e930ebcc 100644 --- a/pyload/database/File.py +++ b/pyload/database/File.py @@ -2,7 +2,6 @@ # @author: RaNaN, mkaay from threading import RLock -from time import time from pyload.utils import formatSize, lock from pyload.manager.Event import InsertEvent, ReloadAllEvent, RemoveEvent, UpdateEvent @@ -25,20 +24,22 @@ class FileHandler(object): self.core = core # translations - self.statusMsg = [_("finished"), _("offline"), _("online"), _("queued"), _("skipped"), _("waiting"), _("temp. offline"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), _("downloading"), _("processing"), _("unknown")] + self.statusMsg = [_("finished"), _("offline"), _("online"), _("queued"), _("skipped"), _("waiting"), + _("temp. offline"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), + _("downloading"), _("processing"), _("unknown")] - self.cache = {} # holds instances for files - self.packageCache = {} # same for packages + self.cache = {} #: holds instances for files + self.packageCache = {} #: same for packages #@TODO: purge the cache self.jobCache = {} self.lock = RLock() #@TODO: should be a Lock w/o R - #self.lock._Verbose__verbose = True + # self.lock._Verbose__verbose = True - self.filecount = -1 # if an invalid value is set get current value from db - self.queuecount = -1 # number of package to be loaded - self.unchanged = False # determines if any changes was made since last call + self.filecount = -1 #: if an invalid value is set get current value from db + self.queuecount = -1 #: number of package to be loaded + self.unchanged = False #: determines if any changes was made since last call self.db = self.core.db @@ -52,6 +53,7 @@ class FileHandler(object): args[0].queuecount = -1 args[0].jobCache = {} return func(*args) + return new @@ -149,7 +151,8 @@ class FileHandler(object): p = self.getPackage(id) if not p: - if id in self.packageCache: del self.packageCache[id] + if id in self.packageCache: + del self.packageCache[id] return oldorder = p.order @@ -336,7 +339,7 @@ class FileHandler(object): pyfile = self.getFile(self.jobCache[occ].pop()) else: - self.jobCache = {} # better not caching to much + self.jobCache = {} #: better not caching to much jobs = self.db.getJob(occ) jobs.reverse() self.jobCache[occ] = jobs @@ -349,7 +352,7 @@ class FileHandler(object): #@TODO: maybe the new job has to be approved... - #pyfile = self.getFile(self.jobCache[occ].pop()) + # pyfile = self.getFile(self.jobCache[occ].pop()) return pyfile @@ -498,11 +501,11 @@ class FileHandler(object): if pack.queue != p.queue or pack.order < 0 or pack == p: continue if p.order > position: - if pack.order >= position and pack.order < p.order: + if position <= pack.order < p.order: pack.order += 1 pack.notifyChange() elif p.order < position: - if pack.order <= position and pack.order > p.order: + if position >= pack.order > p.order: pack.order -= 1 pack.notifyChange() @@ -529,11 +532,11 @@ class FileHandler(object): if pyfile.packageid != f['package'] or pyfile.order < 0: continue if f['order'] > position: - if pyfile.order >= position and pyfile.order < f['order']: + if position <= pyfile.order < f['order']: pyfile.order += 1 pyfile.notifyChange() elif f['order'] < position: - if pyfile.order <= position and pyfile.order > f['order']: + if position >= pyfile.order > f['order']: pyfile.order -= 1 pyfile.notifyChange() @@ -592,11 +595,9 @@ class FileHandler(object): new_packs.update(self.db.getAllPackages(1)) # get new packages only from db - deleted = [] - for id in old_packs.iterkeys(): - if id not in new_packs: - deleted.append(id) - self.deletePackage(int(id)) + deleted = [id for id in old_packs.iterkeys() if id not in new_packs] + for id_deleted in deleted: + self.deletePackage(int(id_deleted)) return deleted @@ -614,21 +615,26 @@ class FileMethods(object): @style.queue def filecount(self, queue): """returns number of files in queue""" - self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=?", (queue,)) + self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=?", + (queue,)) return self.c.fetchone()[0] @style.queue def queuecount(self, queue): """ number of files in queue not finished yet""" - self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? AND l.status NOT IN (0, 4)", (queue,)) + self.c.execute( + "SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? AND l.status NOT IN (0, 4)", + (queue,)) return self.c.fetchone()[0] @style.queue def processcount(self, queue, fid): """ number of files which have to be proccessed """ - self.c.execute("SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? AND l.status IN (2, 3, 5, 7, 12) AND l.id != ?", (queue, str(fid))) + self.c.execute( + "SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? AND l.status IN (2, 3, 5, 7, 12) AND l.id != ?", + (queue, str(fid))) return self.c.fetchone()[0] @@ -655,7 +661,8 @@ class FileMethods(object): @style.queue def addLink(self, url, name, plugin, package): order = self._nextFileOrder(package) - self.c.execute('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', (url, name, ".".join(plugintype, pluginname), package, order)) + self.c.execute('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', + (url, name, ".".join(plugintype, pluginname), package, order)) return self.c.lastrowid @@ -663,7 +670,7 @@ class FileMethods(object): def addLinks(self, links, package): """ links is a list of tupels (url, plugin)""" order = self._nextFileOrder(package) - orders = [order + x for x in range(len(links))] + orders = [order + x for x in xrange(len(links))] links = [(x[0], x[0], ".".join((x[1], x[2])), package, o) for x, o in zip(links, orders)] self.c.executemany('INSERT INTO links(url, name, plugin, package, linkorder) VALUES(?,?,?,?,?)', links) @@ -671,7 +678,8 @@ class FileMethods(object): @style.queue def addPackage(self, name, folder, queue): order = self._nextPackageOrder(queue) - self.c.execute('INSERT INTO packages(name, folder, queue, packageorder) VALUES(?,?,?,?)', (name, folder, queue, order)) + self.c.execute('INSERT INTO packages(name, folder, queue, packageorder) VALUES(?,?,?,?)', + (name, folder, queue, order)) return self.c.lastrowid @@ -679,13 +687,15 @@ class FileMethods(object): def deletePackage(self, p): self.c.execute('DELETE FROM links WHERE package=?', (str(p.id),)) self.c.execute('DELETE FROM packages WHERE id=?', (str(p.id),)) - self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=?', (p.order, p.queue)) + self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=?', + (p.order, p.queue)) @style.queue def deleteLink(self, f): self.c.execute('DELETE FROM links WHERE id=?', (str(f.id),)) - self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder > ? AND package=?', (f.order, str(f.packageid))) + self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder > ? AND package=?', + (f.order, str(f.packageid))) @style.queue @@ -702,7 +712,9 @@ class FileMethods(object): } """ - self.c.execute('SELECT l.id, l.url, l.name, l.size, l.status, l.error, l.plugin, l.package, l.linkorder FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.linkorder', (q,)) + self.c.execute( + 'SELECT l.id, l.url, l.name, l.size, l.status, l.error, l.plugin, l.package, l.linkorder FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.linkorder', + (q,)) data = {} for r in self.c: data[r[0]] = { @@ -751,7 +763,7 @@ class FileMethods(object): 'queue': r[5], 'order': r[6], 'sizetotal': int(r[7]), - 'sizedone': r[8] if r[8] else 0, # these can be None + 'sizedone': r[8] if r[8] else 0, #: these can be None 'linksdone': r[9] if r[9] else 0, 'linkstotal': r[10], 'links': {} @@ -763,7 +775,8 @@ class FileMethods(object): @style.queue def getLinkData(self, id): """get link information as dict""" - self.c.execute('SELECT id, url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?', (str(id),)) + self.c.execute('SELECT id, url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?', + (str(id),)) data = {} r = self.c.fetchone() if not r: @@ -788,7 +801,9 @@ class FileMethods(object): @style.queue def getPackageData(self, id): """get data about links for a package""" - self.c.execute('SELECT id, url, name, size, status, error, plugin, package, linkorder FROM links WHERE package=? ORDER BY linkorder', (str(id),)) + self.c.execute( + 'SELECT id, url, name, size, status, error, plugin, package, linkorder FROM links WHERE package=? ORDER BY linkorder', + (str(id),)) data = {} for r in self.c: @@ -811,13 +826,14 @@ class FileMethods(object): @style.async def updateLink(self, f): - self.c.execute('UPDATE links SET url=?, name=?, size=?, status=?, error=?, package=? WHERE id=?', (f.url, f.name, f.size, f.status, str(f.error), str(f.packageid), str(f.id))) + self.c.execute('UPDATE links SET url=?, name=?, size=?, status=?, error=?, package=? WHERE id=?', + (f.url, f.name, f.size, f.status, str(f.error), str(f.packageid), str(f.id))) @style.queue def updatePackage(self, p): - self.c.execute('UPDATE packages SET name=?, folder=?, site=?, password=?, queue=? WHERE id=?', (p.name, p.folder, p.site, p.password, p.queue, str(p.id))) - + self.c.execute('UPDATE packages SET name=?, folder=?, site=?, password=?, queue=? WHERE id=?', + (p.name, p.folder, p.site, p.password, p.queue, str(p.id))) @style.queue def updateLinkInfo(self, data): @@ -836,9 +852,13 @@ class FileMethods(object): position = self._nextPackageOrder(p.queue) if not noMove: if p.order > position: - self.c.execute('UPDATE packages SET packageorder=packageorder+1 WHERE packageorder >= ? AND packageorder < ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue)) + self.c.execute( + 'UPDATE packages SET packageorder=packageorder+1 WHERE packageorder >= ? AND packageorder < ? AND queue=? AND packageorder >= 0', + (position, p.order, p.queue)) elif p.order < position: - self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder <= ? AND packageorder > ? AND queue=? AND packageorder >= 0', (position, p.order, p.queue)) + self.c.execute( + 'UPDATE packages SET packageorder=packageorder-1 WHERE packageorder <= ? AND packageorder > ? AND queue=? AND packageorder >= 0', + (position, p.order, p.queue)) self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (position, str(p.id))) @@ -847,9 +867,11 @@ class FileMethods(object): def reorderLink(self, f, position): """ reorder link with f as dict for pyfile """ if f['order'] > position: - self.c.execute('UPDATE links SET linkorder=linkorder+1 WHERE linkorder >= ? AND linkorder < ? AND package=?', (position, f['order'], f['package'])) + self.c.execute('UPDATE links SET linkorder=linkorder+1 WHERE linkorder >= ? AND linkorder < ? AND package=?', + (position, f['order'], f['package'])) elif f['order'] < position: - self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder <= ? AND linkorder > ? AND package=?', (position, f['order'], f['package'])) + self.c.execute('UPDATE links SET linkorder=linkorder-1 WHERE linkorder <= ? AND linkorder > ? AND package=?', + (position, f['order'], f['package'])) self.c.execute('UPDATE links SET linkorder=? WHERE id=?', (position, f['id'])) @@ -857,7 +879,8 @@ class FileMethods(object): @style.queue def clearPackageOrder(self, p): self.c.execute('UPDATE packages SET packageorder=? WHERE id=?', (-1, str(p.id))) - self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=? AND id != ?', (p.order, p.queue, str(p.id))) + self.c.execute('UPDATE packages SET packageorder=packageorder-1 WHERE packageorder > ? AND queue=? AND id != ?', + (p.order, p.queue, str(p.id))) @style.async @@ -877,7 +900,7 @@ class FileMethods(object): r = self.c.fetchone() if not r: return None - return PyPackage(self.manager, id, * r) + return PyPackage(self.manager, id, *r) #-------------------------------------------------------------------------- @@ -885,13 +908,14 @@ class FileMethods(object): @style.queue def getFile(self, id): """return link instance from id""" - self.c.execute("SELECT url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?", (str(id),)) + self.c.execute("SELECT url, name, size, status, error, plugin, package, linkorder FROM links WHERE id=?", + (str(id),)) r = self.c.fetchone() if not r: return None r = list(r) r[5] = tuple(r[5].split('.')) - return PyFile(self.manager, id, * r) + return PyFile(self.manager, id, *r) @style.queue @@ -899,7 +923,7 @@ class FileMethods(object): """return pyfile ids, which are suitable for download and dont use a occupied plugin""" #@TODO: improve this hardcoded method - pre = "('CCF', 'DLC', 'LinkList', 'RSDF', 'TXT')" # plugins which are processed in collector + pre = "('CCF', 'DLC', 'LinkList', 'RSDF', 'TXT')" #: plugins which are processed in collector cmd = "(" for i, item in enumerate(occ): @@ -910,7 +934,7 @@ class FileMethods(object): cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE ((p.queue=1 AND l.plugin NOT IN %s) OR l.plugin IN %s) AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % (cmd, pre) - self.c.execute(cmd) # very bad! + self.c.execute(cmd) #: very bad! return [x[0] for x in self.c] @@ -920,7 +944,7 @@ class FileMethods(object): """returns pyfile ids with suited plugins""" cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE l.plugin IN %s AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % plugins - self.c.execute(cmd) # very bad! + self.c.execute(cmd) #: very bad! return [x[0] for x in self.c] @@ -947,7 +971,9 @@ class FileMethods(object): @style.queue def findDuplicates(self, id, folder, filename): """ checks if filename exists with different id and same package """ - self.c.execute("SELECT l.plugin FROM links as l INNER JOIN packages as p ON l.package=p.id AND p.folder=? WHERE l.id!=? AND l.status=0 AND l.name=?", (folder, id, filename)) + self.c.execute( + "SELECT l.plugin FROM links as l INNER JOIN packages as p ON l.package=p.id AND p.folder=? WHERE l.id!=? AND l.status=0 AND l.name=?", + (folder, id, filename)) return self.c.fetchone() @@ -956,4 +982,5 @@ class FileMethods(object): self.c.execute("DELETE FROM links;") self.c.execute("DELETE FROM packages;") + DatabaseBackend.registerSub(FileMethods) diff --git a/pyload/database/Storage.py b/pyload/database/Storage.py index 45ad18b2d..70932b55c 100644 --- a/pyload/database/Storage.py +++ b/pyload/database/Storage.py @@ -26,14 +26,12 @@ class StorageMethods(object): return row[0] else: db.c.execute("SELECT key, value FROM storage WHERE identifier=?", (identifier,)) - d = {} - for row in db.c: - d[row[0]] = row[1] - return d + return {row[0]: row[1] for row in db.c} @style.queue def delStorage(db, identifier, key): db.c.execute("DELETE FROM storage WHERE identifier=? AND key=?", (identifier, key)) + DatabaseBackend.registerSub(StorageMethods) diff --git a/pyload/database/User.py b/pyload/database/User.py index e11961e32..2aedc3bba 100644 --- a/pyload/database/User.py +++ b/pyload/database/User.py @@ -30,7 +30,7 @@ class UserMethods(object): @style.queue def addUser(db, user, password): - salt = reduce(lambda x, y: x + y, [str(random.randint(0, 9)) for _i in range(0, 5)]) + salt = reduce(lambda x, y: x + y, [str(random.randint(0, 9)) for _i in xrange(0, 5)]) h = sha1(salt + password) password = salt + h.hexdigest() @@ -53,7 +53,7 @@ class UserMethods(object): pw = r[2][5:] h = sha1(salt + oldpw) if h.hexdigest() == pw: - salt = reduce(lambda x, y: x + y, [str(random.randint(0, 9)) for _i in range(0, 5)]) + salt = reduce(lambda x, y: x + y, [str(random.randint(0, 9)) for _i in xrange(0, 5)]) h = sha1(salt + newpw) password = salt + h.hexdigest() @@ -76,24 +76,18 @@ class UserMethods(object): @style.queue def listUsers(db): db.c.execute('SELECT name FROM users') - users = [] - for row in db.c: - users.append(row[0]) - return users + return [row[0] for row in db.c] @style.queue def getAllUserData(db): db.c.execute("SELECT name, permission, role, template, email FROM users") - user = {} - for r in db.c: - user[r[0]] = {"permission": r[1], "role": r[2], "template": r[3], "email": r[4]} - - return user + return {{"permission": r[1], "role": r[2], "template": r[3], "email": r[4]} for r in db.c} @style.queue def removeUser(db, user): db.c.execute('DELETE FROM users WHERE name=?', (user,)) + DatabaseBackend.registerSub(UserMethods) diff --git a/pyload/datatype/File.py b/pyload/datatype/File.py index 93aa636d7..05d515fd0 100644 --- a/pyload/datatype/File.py +++ b/pyload/datatype/File.py @@ -50,7 +50,7 @@ class PyFile(object): self.size = size self.status = status self.plugintype, self.pluginname = plugin - self.packageid = package # should not be used, use package() instead + self.packageid = package #: should not be used, use package() instead self.error = error self.order = order # database information ends here @@ -58,12 +58,12 @@ class PyFile(object): self.lock = RLock() self.plugin = None - #self.download = None + # self.download = None - self.waitUntil = 0 # time() + time to wait + self.waitUntil = 0 #: time() + time to wait # status attributes - self.active = False # obsolete? + self.active = False #: obsolete? self.abort = False self.reconnected = False diff --git a/pyload/manager/Account.py b/pyload/manager/Account.py index 44a5e5c65..ac9944134 100644 --- a/pyload/manager/Account.py +++ b/pyload/manager/Account.py @@ -126,8 +126,8 @@ class AccountManager(object): f.write("\n") f.write(plugin + ":\n") - for name,data in accounts.iteritems(): - f.write("\n\t%s:%s\n" % (name,data['password']) ) + for name, data in accounts.iteritems(): + f.write("\n\t%s:%s\n" % (name, data['password']) ) if data['options']: for option, values in data['options'].iteritems(): f.write("\t@%s %s\n" % (option, " ".join(values))) @@ -147,12 +147,12 @@ class AccountManager(object): @lock - def updateAccount(self, plugin , user, password=None, options={}): + def updateAccount(self, plugin, user, password=None, options={}): """add or update account""" if plugin in self.accounts: p = self.getAccountPlugin(plugin) updated = p.updateAccounts(user, password, options) - #since accounts is a ref in plugin self.accounts doesnt need to be updated here + # since accounts is a ref in plugin self.accounts doesnt need to be updated here self.saveAccounts() if updated: p.scheduleRefresh(user, force=False) diff --git a/pyload/manager/Addon.py b/pyload/manager/Addon.py index 5ac56a349..2a3bc4318 100644 --- a/pyload/manager/Addon.py +++ b/pyload/manager/Addon.py @@ -89,7 +89,7 @@ class AddonManager(object): def callRPC(self, plugin, func, args, parse): if not args: - args = tuple() + args = () if parse: args = tuple([literal_eval(x) for x in args]) plugin = self.pluginMap[plugin] @@ -169,7 +169,7 @@ class AddonManager(object): addon.deactivate() - #remove periodic call + # remove periodic call self.core.log.debug("Removed callback: %s" % self.core.scheduler.removeJob(addon.cb)) self.plugins.remove(addon) diff --git a/pyload/manager/Captcha.py b/pyload/manager/Captcha.py index 4a7582d65..ab9f79b37 100644 --- a/pyload/manager/Captcha.py +++ b/pyload/manager/Captcha.py @@ -13,8 +13,8 @@ class CaptchaManager(object): def __init__(self, core): self.lock = Lock() self.core = core - self.tasks = [] # task store, for outgoing tasks only - self.ids = 0 # only for internal purpose + self.tasks = [] #: task store, for outgoing tasks only + self.ids = 0 #: only for internal purpose def newTask(self, img, format, file, result_type): @@ -43,7 +43,7 @@ class CaptchaManager(object): def getTaskByID(self, tid): self.lock.acquire() for task in self.tasks: - if task.id == str(tid): # task ids are strings + if task.id == str(tid): #: task ids are strings self.lock.release() return task self.lock.release() @@ -81,9 +81,9 @@ class CaptchaTask(object): self.handler = [] #: the hook plugins that will take care of the solution self.result = None self.waitUntil = None - self.error = None # error message + self.error = None #: error message self.status = "init" - self.data = {} # handler can store data here + self.data = {} #: handler can store data here def getCaptcha(self): diff --git a/pyload/manager/Plugin.py b/pyload/manager/Plugin.py index bcaf06bde..c6ba5e81b 100644 --- a/pyload/manager/Plugin.py +++ b/pyload/manager/Plugin.py @@ -129,7 +129,7 @@ class PluginManager(object): module = f.replace(".pyc", "").replace(".py", "") # the plugin is loaded from user directory - plugins[name]['user'] = True if rootplugins else False + plugins[name]['user'] = bool(rootplugins) plugins[name]['name'] = module pattern = self.PATTERN.findall(content) @@ -165,13 +165,13 @@ class PluginManager(object): config = [list(config)] if folder not in ("account", "internal") and not [True for item in config if item[0] == "activated"]: - config.insert(0, ["activated", "bool", "Activated", False if folder in ("addon", "hook") else True]) + config.insert(0, ["activated", "bool", "Activated", not folder in ("addon", "hook")]) self.core.config.addPluginConfig("%s_%s" % (name, folder), config, desc) except Exception: self.core.log.error("Invalid config in %s: %s" % (name, config)) - elif folder in ("addon", "hook"): # force config creation + elif folder in ("addon", "hook"): #: force config creation desc = self.DESC.findall(content) desc = desc[0][1] if desc else "" config = (["activated", "bool", "Activated", False],) @@ -313,9 +313,9 @@ class PluginManager(object): def find_module(self, fullname, path=None): # redirecting imports if necesarry - if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): # seperate pyload plugins + if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #: seperate pyload plugins if fullname.startswith(self.USERROOT): user = 1 - else: user = 0 # used as bool and int + else: user = 0 #: used as bool and int split = fullname.split(".") if len(split) != 4 - user: @@ -332,7 +332,7 @@ class PluginManager(object): def load_module(self, name, replace=True): - if name not in sys.modules: # could be already in modules + if name not in sys.modules: #: could be already in modules if replace: if self.ROOT in name: newname = name.replace(self.ROOT, self.USERROOT) @@ -401,4 +401,4 @@ class PluginManager(object): def reloadPlugin(self, type_plugin): """ reload and reindex ONE plugin """ - return True if self.reloadPlugins(type_plugin) else False + return bool(self.reloadPlugins(type_plugin)) diff --git a/pyload/manager/Thread.py b/pyload/manager/Thread.py index a8550e504..a2a64c38d 100644 --- a/pyload/manager/Thread.py +++ b/pyload/manager/Thread.py @@ -33,7 +33,7 @@ class ThreadManager(object): self.reconnecting = Event() self.reconnecting.clear() - self.downloaded = 0 # number of files downloaded since last cleanup + self.downloaded = 0 #: number of files downloaded since last cleanup self.lock = Lock() @@ -51,7 +51,7 @@ class ThreadManager(object): pycurl.global_init(pycurl.GLOBAL_DEFAULT) - for _i in range(0, self.core.config.get("download", "max_downloads")): + for _i in xrange(0, self.core.config.get("download", "max_downloads")): self.createThread() @@ -206,7 +206,7 @@ class ThreadManager(object): ("http://checkip.dyndns.org/", ".*Current IP Address: (\S+)</body>.*")] ip = "" - for _i in range(10): + for _i in xrange(10): try: sv = choice(services) ip = getURL(sv[0]) diff --git a/pyload/manager/thread/Addon.py b/pyload/manager/thread/Addon.py index 1da164543..b176e4e0c 100644 --- a/pyload/manager/thread/Addon.py +++ b/pyload/manager/thread/Addon.py @@ -58,7 +58,7 @@ class AddonThread(PluginThread): self.kwargs['thread'] = self self.f(*self.args, **self.kwargs) except TypeError, e: - #dirty method to filter out exceptions + # dirty method to filter out exceptions if "unexpected keyword argument 'thread'" not in e.args[0]: raise diff --git a/pyload/manager/thread/Download.py b/pyload/manager/thread/Download.py index 21db61ca4..293014a2e 100644 --- a/pyload/manager/thread/Download.py +++ b/pyload/manager/thread/Download.py @@ -39,7 +39,7 @@ class DownloadThread(PluginThread): while True: del pyfile - self.active = False # sets the thread inactive when it is ready to get next job + self.active = False #: sets the thread inactive when it is ready to get next job self.active = self.queue.get() pyfile = self.active diff --git a/pyload/manager/thread/Info.py b/pyload/manager/thread/Info.py index 28a2e8e91..9d8a3ef5b 100644 --- a/pyload/manager/thread/Info.py +++ b/pyload/manager/thread/Info.py @@ -26,13 +26,13 @@ class InfoThread(PluginThread): PluginThread.__init__(self, manager) self.data = data - self.pid = pid # package id + self.pid = pid #: package id # [ .. (name, plugin) .. ] - self.rid = rid # result id - self.add = add # add packages instead of return result + self.rid = rid #: result id + self.add = add #: add packages instead of return result - self.cache = [] # accumulated data + self.cache = [] #: accumulated data self.start() @@ -83,7 +83,7 @@ class InfoThread(PluginThread): # empty cache del self.cache[:] - else: # post the results + else: #: post the results for name, url in container: # attach container content @@ -154,8 +154,8 @@ class InfoThread(PluginThread): def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None): try: - result = [] # result loaded from cache - process = [] # urls to process + result = [] #: result loaded from cache + process = [] #: urls to process for url in urls: if url in self.m.infoCache: result.append(self.m.infoCache[url]) diff --git a/pyload/manager/thread/Plugin.py b/pyload/manager/thread/Plugin.py index 08a2664da..d8319a2ce 100644 --- a/pyload/manager/thread/Plugin.py +++ b/pyload/manager/thread/Plugin.py @@ -1,6 +1,8 @@ # -*- coding: utf-8 -*- # @author: RaNaN +from __future__ import with_statement + from Queue import Queue from threading import Thread from os import listdir, stat @@ -64,9 +66,8 @@ class PluginThread(Thread): self.m.log.debug("Error creating zip file: %s" % e) dump_name = dump_name.replace(".zip", ".txt") - f = open(dump_name, "wb") - f.write(dump) - f.close() + with open(dump_name, "wb") as f: + f.write(dump) self.m.core.log.info("Debug Report written to %s" % dump_name) @@ -128,5 +129,5 @@ class PluginThread(Thread): def clean(self, pyfile): """ set thread unactive and release pyfile """ - self.active = True #release pyfile but lets the thread active + self.active = True #: release pyfile but lets the thread active pyfile.release() diff --git a/pyload/manager/thread/Server.py b/pyload/manager/thread/Server.py index 83e886253..97590013e 100644 --- a/pyload/manager/thread/Server.py +++ b/pyload/manager/thread/Server.py @@ -89,11 +89,13 @@ class WebServer(threading.Thread): def start_threaded(self): if self.https: - self.core.log.info(_("Starting threaded SSL webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port}) + self.core.log.info( + _("Starting threaded SSL webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port}) else: self.cert = "" self.key = "" - self.core.log.info(_("Starting threaded webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port}) + self.core.log.info( + _("Starting threaded webserver: %(host)s:%(port)d") % {"host": self.host, "port": self.port}) webinterface.run_threaded(host=self.host, port=self.port, cert=self.cert, key=self.key) @@ -111,7 +113,8 @@ class WebServer(threading.Thread): if self.https: log.warning(_("This server offers no SSL, please consider using threaded instead")) - self.core.log.info(_("Starting lightweight webserver (bjoern): %(host)s:%(port)d") % {"host": self.host, "port": self.port}) + self.core.log.info( + _("Starting lightweight webserver (bjoern): %(host)s:%(port)d") % {"host": self.host, "port": self.port}) webinterface.run_lightweight(host=self.host, port=self.port) diff --git a/pyload/network/Browser.py b/pyload/network/Browser.py index d8617fabc..482c2320a 100644 --- a/pyload/network/Browser.py +++ b/pyload/network/Browser.py @@ -24,7 +24,8 @@ class Browser(object): def renewHTTPRequest(self): - if hasattr(self, "http"): self.http.close() + if hasattr(self, "http"): + self.http.close() self.http = HTTPRequest(self.cj, self.options) @@ -61,16 +62,12 @@ class Browser(object): @property def arrived(self): - if self.dl: - return self.dl.arrived - return 0 + return self.dl.arrived if self.dl else 0 @property def percent(self): - if not self.size: - return 0 - return (self.arrived * 100) / self.size + return (self.arrived * 100) / self.size if self.size else 0 def clearCookies(self): @@ -95,7 +92,7 @@ class Browser(object): """ this can also download ftp """ self._size = 0 self.dl = HTTPDownload(url, filename, get, post, self.lastEffectiveURL if ref else None, - self.cj if cookies else None, self.bucket, self.options, progressNotify, disposition) + self.cj if cookies else None, self.bucket, self.options, progressNotify, disposition) name = self.dl.download(chunks, resume) self._size = self.dl.size @@ -124,7 +121,8 @@ class Browser(object): def removeAuth(self): - if "auth" in self.options: del self.options['auth'] + if "auth" in self.options: + del self.options['auth'] self.renewHTTPRequest() @@ -134,7 +132,8 @@ class Browser(object): def deleteOption(self, name): - if name in self.options: del self.options[name] + if name in self.options: + del self.options[name] def clearHeaders(self): diff --git a/pyload/network/Bucket.py b/pyload/network/Bucket.py index 5f8260384..2f957fcad 100644 --- a/pyload/network/Bucket.py +++ b/pyload/network/Bucket.py @@ -17,7 +17,7 @@ class Bucket(object): def __nonzero__(self): - return False if self.rate < MIN_RATE else True + return self.rate >= MIN_RATE def setRate(self, rate): @@ -35,10 +35,7 @@ class Bucket(object): self.calc_tokens() self.tokens -= amount - if self.tokens < 0: - time = -self.tokens/float(self.rate) - else: - time = 0 + time = -self.tokens / float(self.rate) if self.tokens < 0 else 0 self.lock.release() return time diff --git a/pyload/network/HTTPChunk.py b/pyload/network/HTTPChunk.py index 784b64349..85c20d519 100644 --- a/pyload/network/HTTPChunk.py +++ b/pyload/network/HTTPChunk.py @@ -30,7 +30,6 @@ class ChunkInfo(object): ret = "ChunkInfo: %s, %s\n" % (self.name, self.size) for i, c in enumerate(self.chunks): ret += "%s# %s\n" % (i, c[1]) - return ret @@ -51,7 +50,7 @@ class ChunkInfo(object): chunk_size = self.size / chunks current = 0 - for i in range(chunks): + for i in xrange(chunks): end = self.size - 1 if (i == chunks - 1) else current + chunk_size self.addChunk("%s.chunk%s" % (self.name, i), (current, end)) current += chunk_size + 1 @@ -222,7 +221,7 @@ class HTTPChunk(HTTPRequest): def writeBody(self, buf): - #ignore BOM, it confuses unrar + # ignore BOM, it confuses unrar if not self.BOMChecked: if [ord(b) for b in buf[:3]] == [239, 187, 191]: buf = buf[3:] @@ -310,7 +309,8 @@ class HTTPChunk(HTTPRequest): """ closes everything, unusable after this """ if self.fp: self.fp.close() self.c.close() - if hasattr(self, "p"): del self.p + if hasattr(self, "p"): + del self.p def charEnc(enc): diff --git a/pyload/network/HTTPDownload.py b/pyload/network/HTTPDownload.py index 13666195a..1e74d4476 100644 --- a/pyload/network/HTTPDownload.py +++ b/pyload/network/HTTPDownload.py @@ -1,14 +1,16 @@ # -*- coding: utf-8 -*- # @author: RaNaN +from __future__ import with_statement + +import pycurl + from os import remove, fsync from os.path import dirname from time import sleep, time from shutil import move from logging import getLogger -import pycurl - from pyload.network.HTTPChunk import ChunkInfo, HTTPChunk from pyload.network.HTTPRequest import BadHeader @@ -22,11 +24,11 @@ class HTTPDownload(object): def __init__(self, url, filename, get={}, post={}, referer=None, cj=None, bucket=None, options={}, progress=None, disposition=False): self.url = url - self.filename = filename #complete file destination, not only name + self.filename = filename #: complete file destination, not only name self.get = get self.post = post self.referer = referer - self.cj = cj #cookiejar if cookies are needed + self.cj = cj #: cookiejar if cookies are needed self.bucket = bucket self.options = options self.disposition = disposition @@ -51,7 +53,7 @@ class HTTPDownload(object): self.chunkSupport = True self.m = pycurl.CurlMulti() - #needed for speed calculation + # needed for speed calculation self.lastArrived = [] self.speeds = [] self.lastSpeeds = [0, 0] @@ -72,36 +74,30 @@ class HTTPDownload(object): @property def percent(self): - if not self.size: - return 0 - return (self.arrived * 100) / self.size - + return (self.arrived * 100) / self.size if self.size else 0 def _copyChunks(self): init = fs_encode(self.info.getChunkName(0)) #: initial chunk name if self.info.getCount() > 1: - fo = open(init, "rb+") #: first chunkfile - for i in range(1, self.info.getCount()): - #input file - fo.seek( - self.info.getChunkRange(i - 1)[1] + 1) #: seek to beginning of chunk, to get rid of overlapping chunks - fname = fs_encode("%s.chunk%d" % (self.filename, i)) - fi = open(fname, "rb") - buf = 32 * 1024 - while True: #: copy in chunks, consumes less memory - data = fi.read(buf) - if not data: - break - fo.write(data) - fi.close() - if fo.tell() < self.info.getChunkRange(i)[1]: - fo.close() - remove(init) - self.info.remove() #: there are probably invalid chunks - raise Exception("Downloaded content was smaller than expected. Try to reduce download connections.") - remove(fname) #: remove chunk - fo.close() + with open(init, "rb+") as fo: #: first chunkfile + for i in xrange(1, self.info.getCount()): + # input file + fo.seek( + self.info.getChunkRange(i - 1)[1] + 1) #: seek to beginning of chunk, to get rid of overlapping chunks + fname = fs_encode("%s.chunk%d" % (self.filename, i)) + with open(fname, "rb") as fi: + buf = 32 * 1024 + while True: #: copy in chunks, consumes less memory + data = fi.read(buf) + if not data: + break + fo.write(data) + if fo.tell() < self.info.getChunkRange(i)[1]: + remove(init) + self.info.remove() #: there are probably invalid chunks + raise Exception("Downloaded content was smaller than expected. Try to reduce download connections.") + remove(fname) #: remove chunk if self.nameDisposition and self.disposition: self.filename = fs_join(dirname(self.filename), self.nameDisposition) @@ -119,13 +115,13 @@ class HTTPDownload(object): try: self._download(chunks, resume) except pycurl.error, e: - #code 33 - no resume + # code 33 - no resume code = e.args[0] if resume is True and code == 33: # try again without resume self.log.debug("Errno 33 -> Restart without resume") - #remove old handles + # remove old handles for chunk in self.chunks: self.closeChunk(chunk) @@ -155,14 +151,14 @@ class HTTPDownload(object): lastFinishCheck = 0 lastTimeCheck = 0 - chunksDone = set() # list of curl handles that are finished + chunksDone = set() #: list of curl handles that are finished chunksCreated = False done = False if self.info.getCount() is 0: #: This is a resume, if we were chunked originally assume still can self.chunkSupport = False while 1: - #need to create chunks + # need to create chunks if not chunksCreated and self.chunkSupport and self.size: #: will be setted later by first chunk if not resume: @@ -174,7 +170,7 @@ class HTTPDownload(object): init.setRange(self.info.getChunkRange(0)) - for i in range(1, chunks): + for i in xrange(1, chunks): c = HTTPChunk(i, self, self.info.getChunkRange(i), resume) handle = c.getHandle() @@ -182,7 +178,7 @@ class HTTPDownload(object): self.chunks.append(c) self.m.add_handle(handle) else: - #close immediatly + # close immediatly self.log.debug("Invalid curl handle -> closed") c.close() @@ -216,7 +212,7 @@ class HTTPDownload(object): for c in err_list: curl, errno, msg = c chunk = self.findChunk(curl) - #test if chunk was finished + # test if chunk was finished if errno != 23 or "0 !=" not in msg: failed.append(chunk) ex = pycurl.error(errno, msg) @@ -238,14 +234,14 @@ class HTTPDownload(object): if failed and init not in failed and init.c not in chunksDone: self.log.error(_("Download chunks failed, fallback to single connection | %s" % (str(ex)))) - #list of chunks to clean and remove + # list of chunks to clean and remove to_clean = filter(lambda x: x is not init, self.chunks) for chunk in to_clean: self.closeChunk(chunk) self.chunks.remove(chunk) remove(fs_encode(self.info.getChunkName(chunk.id))) - #let first chunk load the rest and update the info file + # let first chunk load the rest and update the info file init.resetRange() self.info.clear() self.info.addChunk("%s.chunk0" % self.filename, (0, self.size)) @@ -258,7 +254,7 @@ class HTTPDownload(object): if len(chunksDone) >= len(self.chunks): if len(chunksDone) > len(self.chunks): self.log.warning("Finished download chunks size incorrect, please report bug.") - done = True #all chunks loaded + done = True #: all chunks loaded break diff --git a/pyload/network/HTTPRequest.py b/pyload/network/HTTPRequest.py index 62c0ef72b..92ce6ec4b 100644 --- a/pyload/network/HTTPRequest.py +++ b/pyload/network/HTTPRequest.py @@ -24,7 +24,7 @@ def myurlencode(data): data = dict(data) return urlencode(dict((encode(x), encode(y)) for x, y in data.iteritems())) -bad_headers = range(400, 404) + range(405, 418) + range(500, 506) +bad_headers = xrange(400, 404) + xrange(405, 418) + xrange(500, 506) class BadHeader(Exception): @@ -41,16 +41,16 @@ class HTTPRequest(object): self.c = pycurl.Curl() self.rep = StringIO() - self.cj = cookies # cookiejar + self.cj = cookies #: cookiejar self.lastURL = None self.lastEffectiveURL = None self.abort = False - self.code = 0 # last http code + self.code = 0 #: last http code self.header = "" - self.headers = [] # temporary request header + self.headers = [] #: temporary request header self.initHandle() self.setInterface(options) @@ -76,7 +76,7 @@ class HTTPRequest(object): if hasattr(pycurl, "USE_SSL"): self.c.setopt(pycurl.USE_SSL, pycurl.CURLUSESSL_TRY) - #self.c.setopt(pycurl.VERBOSE, 1) + # self.c.setopt(pycurl.VERBOSE, 1) self.c.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0") @@ -158,7 +158,7 @@ class HTTPRequest(object): self.c.setopt(pycurl.POST, 1) if not multipart: if type(post) == unicode: - post = str(post) # unicode not allowed + post = str(post) #: unicode not allowed elif type(post) == str: pass else: @@ -250,12 +250,12 @@ class HTTPRequest(object): def decodeResponse(self, rep): """ decode with correct encoding, relies on header """ header = self.header.splitlines() - encoding = "utf8" # default encoding + encoding = "utf8" #: default encoding for line in header: line = line.lower().replace(" ", "") - if not line.startswith("content-type:") or\ - ("text" not in line and "application" not in line): + if not line.startswith("content-type:") or \ + ("text" not in line and "application" not in line): continue none, delemiter, charset = line.rpartition("charset=") @@ -265,7 +265,7 @@ class HTTPRequest(object): encoding = charset[0] try: - #self.log.debug("Decoded %s" % encoding ) + # self.log.debug("Decoded %s" % encoding ) if lookup(encoding).name == 'utf-8' and rep.startswith(BOM_UTF8): encoding = 'utf-8-sig' diff --git a/pyload/network/JsEngine.py b/pyload/network/JsEngine.py index c64e8c490..b59d07dc4 100644 --- a/pyload/network/JsEngine.py +++ b/pyload/network/JsEngine.py @@ -112,7 +112,7 @@ class AbstractEngine(object): def __init__(self, force=False): self.setup() - self.available = True if force else self.find() + self.available = force or self.find() def setup(self): diff --git a/pyload/network/RequestFactory.py b/pyload/network/RequestFactory.py index 0591c5162..5e2c15f4b 100644 --- a/pyload/network/RequestFactory.py +++ b/pyload/network/RequestFactory.py @@ -7,7 +7,6 @@ from pyload.network.Browser import Browser from pyload.network.Bucket import Bucket from pyload.network.HTTPRequest import HTTPRequest from pyload.network.CookieJar import CookieJar - from pyload.network.XDCCRequest import XDCCRequest @@ -88,8 +87,10 @@ class RequestFactory(object): else: type = "http" setting = self.core.config.get("proxy", "type").lower() - if setting == "socks4": type = "socks4" - elif setting == "socks5": type = "socks5" + if setting == "socks4": + type = "socks4" + elif setting == "socks5": + type = "socks5" username = None if self.core.config.get("proxy", "username") and self.core.config.get("proxy", "username").lower() != "none": @@ -105,7 +106,7 @@ class RequestFactory(object): "port": self.core.config.get("proxy", "port"), "username": username, "password": pw, - } + } def getOptions(self): diff --git a/pyload/network/XDCCRequest.py b/pyload/network/XDCCRequest.py index dff500749..24146ccaa 100644 --- a/pyload/network/XDCCRequest.py +++ b/pyload/network/XDCCRequest.py @@ -2,15 +2,12 @@ # @author: jeix import socket -import re +import struct from os import remove from os.path import exists - -from time import time - -import struct from select import select +from time import time from pyload.plugin.Plugin import Abort @@ -145,9 +142,7 @@ class XDCCRequest(object): @property def percent(self): - if not self.filesize: - return 0 - return (self.recv * 100) / self.filesize + return (self.recv * 100) / self.filesize if elf.filesize else 0 def close(self): diff --git a/pyload/plugin/Account.py b/pyload/plugin/Account.py index 73554cc39..bb8f7d59a 100644 --- a/pyload/plugin/Account.py +++ b/pyload/plugin/Account.py @@ -96,7 +96,7 @@ class Account(Base): req.cj.clear() req.close() if user in self.infos: - del self.infos[user] # delete old information + del self.infos[user] #: delete old information return self._login(user, self.accounts[user]) @@ -112,7 +112,7 @@ class Account(Base): """ updates account and return true if anything changed """ if user in self.accounts: - self.accounts[user]['valid'] = True # do not remove or accounts will not login + self.accounts[user]['valid'] = True #: do not remove or accounts will not login if password: self.accounts[user]['password'] = password self.relogin(user) @@ -260,7 +260,7 @@ class Account(Base): def canUse(self): - return False if self.selectAccount() == (None, None) else True + return self.selectAccount() != (None, None) def parseTraffic(self, value, unit=None): #: return bytes diff --git a/pyload/plugin/Addon.py b/pyload/plugin/Addon.py index a43f2fd70..21a86ab05 100644 --- a/pyload/plugin/Addon.py +++ b/pyload/plugin/Addon.py @@ -56,7 +56,7 @@ class Addon(Base): #: `AddonManager` self.manager = manager - #register events + # register events if self.event_map: for event, funcs in self.event_map.iteritems(): if type(funcs) in (list, tuple): @@ -65,7 +65,7 @@ class Addon(Base): else: self.manager.addEvent(event, getattr(self, funcs)) - #delete for various reasons + # delete for various reasons self.event_map = None if self.event_list: @@ -115,7 +115,7 @@ class Addon(Base): self.unload() - def unload(self): # Deprecated, use method deactivate() instead + def unload(self): #: Deprecated, use method deactivate() instead pass @@ -134,7 +134,7 @@ class Addon(Base): self.coreReady() - def coreReady(self): # Deprecated, use method activate() instead + def coreReady(self): #: Deprecated, use method activate() instead pass @@ -144,7 +144,7 @@ class Addon(Base): self.coreExiting() - def coreExiting(self): # Deprecated, use method exit() instead + def coreExiting(self): #: Deprecated, use method exit() instead pass diff --git a/pyload/plugin/Hoster.py b/pyload/plugin/Hoster.py index df778c72f..64c635c45 100644 --- a/pyload/plugin/Hoster.py +++ b/pyload/plugin/Hoster.py @@ -4,7 +4,7 @@ from pyload.plugin.Plugin import Plugin def getInfo(self): - #result = [ .. (name, size, status, url) .. ] + # result = [ .. (name, size, status, url) .. ] return diff --git a/pyload/plugin/Plugin.py b/pyload/plugin/Plugin.py index 369d040fb..a14bb1e9c 100644 --- a/pyload/plugin/Plugin.py +++ b/pyload/plugin/Plugin.py @@ -220,10 +220,10 @@ class Plugin(Base): self.user, data = self.account.selectAccount() #: Browser instance, see `network.Browser` self.req = self.account.getAccountRequest(self.user) - self.chunkLimit = -1 # chunk limit, -1 for unlimited + self.chunkLimit = -1 #: chunk limit, -1 for unlimited #: enables resume (will be ignored if server dont accept chunks) self.resumeDownload = True - self.multiDL = True # every hoster with account should provide multiple downloads + self.multiDL = True #: every hoster with account should provide multiple downloads #: premium status self.premium = self.account.isPremium(self.user) else: @@ -232,7 +232,7 @@ class Plugin(Base): #: associated pyfile instance, see `PyFile` self.pyfile = pyfile - self.thread = None # holds thread in future + self.thread = None #: holds thread in future #: location where the last call to download was saved self.lastDownload = "" @@ -500,7 +500,7 @@ class Plugin(Base): captchaManager.removeTask(task) - if task.error and has_plugin: # ignore default error message since the user could use OCR + if task.error and has_plugin: #: ignore default error message since the user could use OCR self.fail(_("Pil and tesseract not installed and no Client connected for captcha decrypting")) elif task.error: self.fail(task.error) @@ -705,7 +705,7 @@ class Plugin(Base): content = f.read(read_size if read_size else -1) # produces encoding errors, better log to other file in the future? - #self.logDebug("Content: %s" % content) + # self.logDebug("Content: %s" % content) for name, rule in rules.iteritems(): if isinstance(rule, basestring): if rule in content: @@ -740,9 +740,9 @@ class Plugin(Base): for pyfile in self.core.files.cache.values(): if pyfile != self.pyfile and pyfile.name == self.pyfile.name and pyfile.package().folder == pack.folder: - if pyfile.status in (0, 12): # finished or downloading + if pyfile.status in (0, 12): #: finished or downloading raise SkipDownload(pyfile.pluginname) - elif pyfile.status in (5, 7) and starting: # a download is waiting/starting and was appenrently started before + elif pyfile.status in (5, 7) and starting: #: a download is waiting/starting and was appenrently started before raise SkipDownload(pyfile.pluginname) download_folder = self.core.config.get("general", "download_folder") diff --git a/pyload/plugin/account/AlldebridCom.py b/pyload/plugin/account/AlldebridCom.py index e3eb01ac2..efc5753f8 100644 --- a/pyload/plugin/account/AlldebridCom.py +++ b/pyload/plugin/account/AlldebridCom.py @@ -24,7 +24,7 @@ class AlldebridCom(Account): html = req.load("http://www.alldebrid.com/account/") soup = BeautifulSoup(html) - #Try to parse expiration date directly from the control panel page (better accuracy) + # Try to parse expiration date directly from the control panel page (better accuracy) try: time_text = soup.find('div', attrs={'class': 'remaining_time_text'}).strong.string @@ -35,7 +35,7 @@ class AlldebridCom(Account): exp_time = time.time() + int(exp_data[0]) * 24 * 60 * 60 + int( exp_data[1]) * 60 * 60 + (int(exp_data[2]) - 1) * 60 - #Get expiration date from API + # Get expiration date from API except Exception: data = self.getAccountData(user) html = req.load("http://www.alldebrid.com/api.php", diff --git a/pyload/plugin/account/CloudzillaTo.py b/pyload/plugin/account/CloudzillaTo.py index af3544da9..bee7c5a17 100644 --- a/pyload/plugin/account/CloudzillaTo.py +++ b/pyload/plugin/account/CloudzillaTo.py @@ -21,7 +21,7 @@ class CloudzillaTo(Account): def loadAccountInfo(self, user, req): html = req.load("http://www.cloudzilla.to/") - premium = True if re.search(self.PREMIUM_PATTERN, html) else False + premium = re.search(self.PREMIUM_PATTERN, html) is not None return {'validuntil': -1, 'trafficleft': -1, 'premium': premium} diff --git a/pyload/plugin/account/FastshareCz.py b/pyload/plugin/account/FastshareCz.py index 518746710..f1ed9d634 100644 --- a/pyload/plugin/account/FastshareCz.py +++ b/pyload/plugin/account/FastshareCz.py @@ -41,7 +41,7 @@ class FastshareCz(Account): def login(self, user, data, req): req.cj.setCookie("fastshare.cz", "lang", "en") - req.load('http://www.fastshare.cz/login') # Do not remove or it will not login + req.load('http://www.fastshare.cz/login') #: Do not remove or it will not login html = req.load("http://www.fastshare.cz/sql.php", post={'login': user, 'heslo': data['password']}, diff --git a/pyload/plugin/account/FilecloudIo.py b/pyload/plugin/account/FilecloudIo.py index 12e2d44f5..b07fe981a 100644 --- a/pyload/plugin/account/FilecloudIo.py +++ b/pyload/plugin/account/FilecloudIo.py @@ -30,7 +30,7 @@ class FilecloudIo(Account): return {"premium": False} akey = rep['akey'] - self.accounts[user]['akey'] = akey # Saved for hoster plugin + self.accounts[user]['akey'] = akey #: Saved for hoster plugin rep = req.load("http://api.filecloud.io/api-fetch_account_details.api", post={"akey": akey}) rep = json_loads(rep) diff --git a/pyload/plugin/account/FileserveCom.py b/pyload/plugin/account/FileserveCom.py index 46cb3cbad..d68285a33 100644 --- a/pyload/plugin/account/FileserveCom.py +++ b/pyload/plugin/account/FileserveCom.py @@ -38,7 +38,7 @@ class FileserveCom(Account): if not res['type']: self.wrongPassword() - #login at fileserv html + # login at fileserv html req.load("http://www.fileserve.com/login.php", post={"loginUserName": user, "loginUserPassword": data['password'], "autoLogin": "checked", "loginFormSubmit": "Login"}) diff --git a/pyload/plugin/account/HellshareCz.py b/pyload/plugin/account/HellshareCz.py index 444677c88..68843ee80 100644 --- a/pyload/plugin/account/HellshareCz.py +++ b/pyload/plugin/account/HellshareCz.py @@ -33,14 +33,14 @@ class HellshareCz(Account): premium = True try: if "." in credit: - #Time-based account + # Time-based account vt = [int(x) for x in credit.split('.')[:2]] lt = time.localtime() year = lt.tm_year + int(vt[1] < lt.tm_mon or (vt[1] == lt.tm_mon and vt[0] < lt.tm_mday)) validuntil = time.mktime(time.strptime("%s%d 23:59:59" % (credit, year), "%d.%m.%Y %H:%M:%S")) trafficleft = -1 else: - #Traffic-based account + # Traffic-based account trafficleft = self.parseTraffic(credit + "MB") validuntil = -1 except Exception, e: @@ -54,7 +54,7 @@ class HellshareCz(Account): def login(self, user, data, req): html = req.load('http://www.hellshare.com/', decode=True) if req.lastEffectiveURL != 'http://www.hellshare.com/': - #Switch to English + # Switch to English self.logDebug("Switch lang - URL: %s" % req.lastEffectiveURL) json = req.load("%s?do=locRouter-show" % req.lastEffectiveURL) diff --git a/pyload/plugin/account/Keep2ShareCc.py b/pyload/plugin/account/Keep2ShareCc.py index 6ee45c256..56ac5e9ab 100644 --- a/pyload/plugin/account/Keep2ShareCc.py +++ b/pyload/plugin/account/Keep2ShareCc.py @@ -46,7 +46,7 @@ class Keep2ShareCc(Account): self.logError(e) else: - premium = True if validuntil > time.mktime(time.gmtime()) else False + premium = validuntil > time.mktime(time.gmtime()) m = re.search(self.TRAFFIC_LEFT_PATTERN, html) if m: diff --git a/pyload/plugin/account/MegaRapidoNet.py b/pyload/plugin/account/MegaRapidoNet.py index 2bffff288..c4ee559da 100644 --- a/pyload/plugin/account/MegaRapidoNet.py +++ b/pyload/plugin/account/MegaRapidoNet.py @@ -29,7 +29,7 @@ class MegaRapidoNet(Account): validuntil = re.search(self.VALID_UNTIL_PATTERN, html) if validuntil: - #hier weitermachen!!! (müssen umbedingt die zeit richtig machen damit! (sollte aber möglich)) + # hier weitermachen!!! (müssen umbedingt die zeit richtig machen damit! (sollte aber möglich)) validuntil = time.time() + int(validuntil.group(1)) * 24 * 3600 + int(validuntil.group(2)) * 3600 + int(validuntil.group(3)) * 60 + int(validuntil.group(4)) trafficleft = -1 premium = True diff --git a/pyload/plugin/account/MegasharesCom.py b/pyload/plugin/account/MegasharesCom.py index 42ecfc17d..8920bb2db 100644 --- a/pyload/plugin/account/MegasharesCom.py +++ b/pyload/plugin/account/MegasharesCom.py @@ -20,10 +20,10 @@ class MegasharesCom(Account): def loadAccountInfo(self, user, req): - #self.relogin(user) + # self.relogin(user) html = req.load("http://d01.megashares.com/myms.php", decode=True) - premium = False if '>Premium Upgrade<' in html else True + premium = '>Premium Upgrade<' not in html validuntil = trafficleft = -1 try: diff --git a/pyload/plugin/account/MultishareCz.py b/pyload/plugin/account/MultishareCz.py index b743fd28a..66ab3dd47 100644 --- a/pyload/plugin/account/MultishareCz.py +++ b/pyload/plugin/account/MultishareCz.py @@ -20,12 +20,12 @@ class MultishareCz(Account): def loadAccountInfo(self, user, req): - #self.relogin(user) + # self.relogin(user) html = req.load("http://www.multishare.cz/profil/", decode=True) m = re.search(self.TRAFFIC_LEFT_PATTERN, html) trafficleft = self.parseTraffic(m.group('S') + m.group('U')) if m else 0 - self.premium = True if trafficleft else False + self.premium = bool(trafficleft) html = req.load("http://www.multishare.cz/", decode=True) mms_info = dict(re.findall(self.ACCOUNT_INFO_PATTERN, html)) diff --git a/pyload/plugin/account/QuickshareCz.py b/pyload/plugin/account/QuickshareCz.py index 67dd11ac3..2f71d9ae8 100644 --- a/pyload/plugin/account/QuickshareCz.py +++ b/pyload/plugin/account/QuickshareCz.py @@ -24,7 +24,7 @@ class QuickshareCz(Account): m = re.search(self.TRAFFIC_LEFT_PATTERN, html) if m: trafficleft = self.parseTraffic(m.group(1)) - premium = True if trafficleft else False + premium = bool(trafficleft) else: trafficleft = None premium = False diff --git a/pyload/plugin/account/RPNetBiz.py b/pyload/plugin/account/RPNetBiz.py index 2dfce536b..562436e85 100644 --- a/pyload/plugin/account/RPNetBiz.py +++ b/pyload/plugin/account/RPNetBiz.py @@ -26,7 +26,7 @@ class RPNetBiz(Account): account_info = {"validuntil": None, "trafficleft": None, "premium": False} except KeyError: - #handle wrong password exception + # handle wrong password exception account_info = {"validuntil": None, "trafficleft": None, "premium": False} return account_info diff --git a/pyload/plugin/account/UlozTo.py b/pyload/plugin/account/UlozTo.py index 34e56d325..c43246e7d 100644 --- a/pyload/plugin/account/UlozTo.py +++ b/pyload/plugin/account/UlozTo.py @@ -27,7 +27,7 @@ class UlozTo(Account): m = re.search(self.TRAFFIC_LEFT_PATTERN, html) trafficleft = float(m.group(1).replace(' ', '').replace(',', '.')) * 1000 * 1.048 if m else 0 - premium = True if trafficleft else False + premium = bool(trafficleft) return {'validuntil': -1, 'trafficleft': trafficleft, 'premium': premium} diff --git a/pyload/plugin/account/UploadedTo.py b/pyload/plugin/account/UploadedTo.py index 61765faf8..1b8ae5b27 100644 --- a/pyload/plugin/account/UploadedTo.py +++ b/pyload/plugin/account/UploadedTo.py @@ -28,7 +28,7 @@ class UploadedTo(Account): html = req.load("http://uploaded.net/me") - premium = True if re.search(self.PREMIUM_PATTERN, html) else False + premium = re.search(self.PREMIUM_PATTERN, html) is not None m = re.search(self.VALID_UNTIL_PATTERN, html, re.M) if m: diff --git a/pyload/plugin/account/UploadingCom.py b/pyload/plugin/account/UploadingCom.py index f5333dd4c..992e63615 100644 --- a/pyload/plugin/account/UploadingCom.py +++ b/pyload/plugin/account/UploadingCom.py @@ -28,7 +28,7 @@ class UploadingCom(Account): html = req.load("http://uploading.com/") - premium = False if re.search(self.PREMIUM_PATTERN, html) else True + premium = re.search(self.PREMIUM_PATTERN, html) is None m = re.search(self.VALID_UNTIL_PATTERN, html) if m: diff --git a/pyload/plugin/account/YibaishiwuCom.py b/pyload/plugin/account/YibaishiwuCom.py index ad9b089a9..e12e3f3f2 100644 --- a/pyload/plugin/account/YibaishiwuCom.py +++ b/pyload/plugin/account/YibaishiwuCom.py @@ -19,11 +19,11 @@ class YibaishiwuCom(Account): def loadAccountInfo(self, user, req): - #self.relogin(user) + # self.relogin(user) html = req.load("http://115.com/", decode=True) m = re.search(self.ACCOUNT_INFO_PATTERN, html, re.S) - premium = True if m and 'is_vip: 1' in m.group(1) else False + premium = m and 'is_vip: 1' in m.group(1) validuntil = trafficleft = (-1 if m else 0) return dict({"validuntil": validuntil, "trafficleft": trafficleft, "premium": premium}) diff --git a/pyload/plugin/addon/Checksum.py b/pyload/plugin/addon/Checksum.py index ed50b0f5e..750dd665e 100644 --- a/pyload/plugin/addon/Checksum.py +++ b/pyload/plugin/addon/Checksum.py @@ -105,8 +105,8 @@ class Checksum(Addon): self.checkFailed(pyfile, None, "No file downloaded") local_file = fs_encode(pyfile.plugin.lastDownload) - #download_folder = self.config.get("general", "download_folder") - #local_file = fs_encode(fs_join(download_folder, pyfile.package().folder, pyfile.name)) + # download_folder = self.config.get("general", "download_folder") + # local_file = fs_encode(fs_join(download_folder, pyfile.package().folder, pyfile.name)) if not isfile(local_file): self.checkFailed(pyfile, None, "File does not exist") diff --git a/pyload/plugin/addon/ExtractArchive.py b/pyload/plugin/addon/ExtractArchive.py index 7002e94f3..b0a24446a 100644 --- a/pyload/plugin/addon/ExtractArchive.py +++ b/pyload/plugin/addon/ExtractArchive.py @@ -9,7 +9,7 @@ import traceback from copy import copy # monkey patch bug in python 2.6 and lower -# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717 +# http://bugs.python.org/issue6122, http://bugs.python.org/issue1236, http://bugs.python.org/issue1731717 if sys.version_info < (2, 7) and os.name != "nt": import errno import subprocess diff --git a/pyload/plugin/addon/IRCInterface.py b/pyload/plugin/addon/IRCInterface.py index 73998b674..07a07e381 100644 --- a/pyload/plugin/addon/IRCInterface.py +++ b/pyload/plugin/addon/IRCInterface.py @@ -338,7 +338,7 @@ class IRCInterface(Thread, Addon): if not pack: return ["ERROR: Package doesn't exists."] - #TODO add links + # TODO add links return ["INFO: Added %d links to Package %s [#%d]" % (len(links), pack['name'], id)] diff --git a/pyload/plugin/addon/UpdateManager.py b/pyload/plugin/addon/UpdateManager.py index cf8349e79..84d282bde 100644 --- a/pyload/plugin/addon/UpdateManager.py +++ b/pyload/plugin/addon/UpdateManager.py @@ -120,7 +120,7 @@ class UpdateManager(Addon): reloads.append(id) self.mtimes[id] = mtime - return True if self.core.pluginManager.reloadPlugins(reloads) else False + return bool(self.core.pluginManager.reloadPlugins(reloads)) def server_response(self): diff --git a/pyload/plugin/addon/XMPPInterface.py b/pyload/plugin/addon/XMPPInterface.py index c977042e6..5ce5b5e8b 100644 --- a/pyload/plugin/addon/XMPPInterface.py +++ b/pyload/plugin/addon/XMPPInterface.py @@ -135,7 +135,7 @@ class XMPPInterface(IRCInterface, JabberClient): to_jid = stanza.get_from() from_jid = stanza.get_to() - #j = JID() + # j = JID() to_name = to_jid.as_utf8() from_name = from_jid.as_utf8() diff --git a/pyload/plugin/crypter/DataHu.py b/pyload/plugin/crypter/DataHu.py index dd817b5ce..ce480dacb 100644 --- a/pyload/plugin/crypter/DataHu.py +++ b/pyload/plugin/crypter/DataHu.py @@ -28,7 +28,7 @@ class DataHu(SimpleCrypter): def prepare(self): super(DataHu, self).prepare() - if u'K\xe9rlek add meg a jelsz\xf3t' in self.html: # Password protected + if u'K\xe9rlek add meg a jelsz\xf3t' in self.html: #: Password protected password = self.getPassword() if not password: self.fail(_("Password required")) @@ -37,5 +37,5 @@ class DataHu(SimpleCrypter): self.html = self.load(self.pyfile.url, post={'mappa_pass': password}, decode=True) - if u'Hib\xe1s jelsz\xf3' in self.html: # Wrong password + if u'Hib\xe1s jelsz\xf3' in self.html: #: Wrong password self.fail(_("Wrong password")) diff --git a/pyload/plugin/crypter/DevhostSt.py b/pyload/plugin/crypter/DevhostSt.py index 4fb82e0ad..46d33885f 100644 --- a/pyload/plugin/crypter/DevhostSt.py +++ b/pyload/plugin/crypter/DevhostSt.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# http://d-h.st/users/shine/?fld_id=37263#files +# http://d-h.st/users/shine/?fld_id=37263#files import re diff --git a/pyload/plugin/crypter/LinkCryptWs.py b/pyload/plugin/crypter/LinkCryptWs.py index 79a59889a..c997cbf9f 100644 --- a/pyload/plugin/crypter/LinkCryptWs.py +++ b/pyload/plugin/crypter/LinkCryptWs.py @@ -74,7 +74,7 @@ class LinkCryptWs(Crypter): # Get package name and folder package_name, folder_name = self.getPackageInfo() - #get the container definitions from script section + # get the container definitions from script section self.get_container_html() # Extract package links @@ -238,7 +238,7 @@ class LinkCryptWs(Crypter): self.logDebug('Search for %s Container links' % type.upper()) - if not type.isalnum(): # check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric) + if not type.isalnum(): #: check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric) self.fail(_("Unknown container type: %s") % type) #@TODO: Replace with self.error in 0.4.10 for line in self.container_html: diff --git a/pyload/plugin/crypter/MediafireCom.py b/pyload/plugin/crypter/MediafireCom.py index 479d1a70a..aae727a90 100644 --- a/pyload/plugin/crypter/MediafireCom.py +++ b/pyload/plugin/crypter/MediafireCom.py @@ -46,7 +46,7 @@ class MediafireCom(Crypter): get={'folder_key' : folder_key, 'response_format': "json", 'version' : 1})) - #self.logInfo(json_resp) + # self.logInfo(json_resp) if json_resp['response']['result'] == "Success": for link in json_resp['response']['folder_info']['files']: self.urls.append("http://www.mediafire.com/file/%s" % link['quickkey']) diff --git a/pyload/plugin/crypter/NCryptIn.py b/pyload/plugin/crypter/NCryptIn.py index d59fbd6a9..bc9702f21 100644 --- a/pyload/plugin/crypter/NCryptIn.py +++ b/pyload/plugin/crypter/NCryptIn.py @@ -69,7 +69,7 @@ class NCryptIn(Crypter): # Extract package links for link_source_type in self.links_source_order: package_links.extend(self.handleLinkSource(link_source_type)) - if package_links: # use only first source which provides links + if package_links: #: use only first source which provides links break package_links = set(package_links) diff --git a/pyload/plugin/crypter/RelinkUs.py b/pyload/plugin/crypter/RelinkUs.py index 6296e9f40..2b9a85401 100644 --- a/pyload/plugin/crypter/RelinkUs.py +++ b/pyload/plugin/crypter/RelinkUs.py @@ -89,7 +89,7 @@ class RelinkUs(Crypter): package_links = [] for sources in self.PREFERRED_LINK_SOURCES: package_links.extend(self.handleLinkSource(sources)) - if package_links: # use only first source which provides links + if package_links: #: use only first source which provides links break package_links = set(package_links) diff --git a/pyload/plugin/crypter/YoutubeComFolder.py b/pyload/plugin/crypter/YoutubeComFolder.py index 84277207a..220c1dfbb 100644 --- a/pyload/plugin/crypter/YoutubeComFolder.py +++ b/pyload/plugin/crypter/YoutubeComFolder.py @@ -43,7 +43,7 @@ class YoutubeComFolder(Crypter): return {"id": channel['id'], "title": channel['snippet']['title'], "relatedPlaylists": channel['contentDetails']['relatedPlaylists'], - "user": user} # One lone channel for user? + "user": user} #: One lone channel for user? def getPlaylist(self, p_id): diff --git a/pyload/plugin/extractor/SevenZip.py b/pyload/plugin/extractor/SevenZip.py index 9d01965e0..3c5e86602 100644 --- a/pyload/plugin/extractor/SevenZip.py +++ b/pyload/plugin/extractor/SevenZip.py @@ -134,11 +134,11 @@ class SevenZip(UnRar): def call_cmd(self, command, *xargs, **kwargs): args = [] - #overwrite flag + # overwrite flag if self.overwrite: args.append("-y") - #set a password + # set a password if "password" in kwargs and kwargs['password']: args.append("-p%s" % kwargs['password']) else: diff --git a/pyload/plugin/extractor/UnRar.py b/pyload/plugin/extractor/UnRar.py index cad58ff4f..38ae4e8f2 100644 --- a/pyload/plugin/extractor/UnRar.py +++ b/pyload/plugin/extractor/UnRar.py @@ -81,7 +81,7 @@ class UnRar(Extractor): @classmethod def isMultipart(cls, filename): - return True if cls.re_multipart.search(filename) else False + return cls.re_multipart.search(filename) is not None def verify(self, password): diff --git a/pyload/plugin/hook/BypassCaptcha.py b/pyload/plugin/hook/BypassCaptcha.py index 545ec5e31..95be5f3a6 100644 --- a/pyload/plugin/hook/BypassCaptcha.py +++ b/pyload/plugin/hook/BypassCaptcha.py @@ -57,7 +57,7 @@ class BypassCaptcha(Hook): def submit(self, captcha, captchaType="file", match=None): req = getRequest() - #raise timeout threshold + # raise timeout threshold req.c.setopt(LOW_SPEED_TIME, 80) try: diff --git a/pyload/plugin/hook/ExpertDecoders.py b/pyload/plugin/hook/ExpertDecoders.py index 2e2982d2d..843c786bc 100644 --- a/pyload/plugin/hook/ExpertDecoders.py +++ b/pyload/plugin/hook/ExpertDecoders.py @@ -54,7 +54,7 @@ class ExpertDecoders(Hook): data = f.read() req = getRequest() - #raise timeout threshold + # raise timeout threshold req.c.setopt(LOW_SPEED_TIME, 80) try: diff --git a/pyload/plugin/hook/ImageTyperz.py b/pyload/plugin/hook/ImageTyperz.py index 641016546..c134cdaeb 100644 --- a/pyload/plugin/hook/ImageTyperz.py +++ b/pyload/plugin/hook/ImageTyperz.py @@ -69,7 +69,7 @@ class ImageTyperz(Hook): def submit(self, captcha, captchaType="file", match=None): req = getRequest() - #raise timeout threshold + # raise timeout threshold req.c.setopt(LOW_SPEED_TIME, 80) try: diff --git a/pyload/plugin/hook/MegaDebridEu.py b/pyload/plugin/hook/MegaDebridEu.py index a069cbcdd..41abce37b 100644 --- a/pyload/plugin/hook/MegaDebridEu.py +++ b/pyload/plugin/hook/MegaDebridEu.py @@ -28,6 +28,6 @@ class MegaDebridEu(MultiHook): host_list = [element[0] for element in json_data['hosters']] else: self.logError(_("Unable to retrieve hoster list")) - host_list = list() + host_list = [] return host_list diff --git a/pyload/plugin/hook/XFileSharingPro.py b/pyload/plugin/hook/XFileSharingPro.py index 3c16c618a..7fee029da 100644 --- a/pyload/plugin/hook/XFileSharingPro.py +++ b/pyload/plugin/hook/XFileSharingPro.py @@ -32,11 +32,11 @@ class XFileSharingPro(Hook): "backin.net", "eyesfile.ca", "file4safe.com", "fileband.com", "filedwon.com", "fileparadox.in", "filevice.com", "hostingbulk.com", "junkyvideo.com", "linestorage.com", "ravishare.com", "ryushare.com", "salefiles.com", "sendmyway.com", "sharesix.com", "thefile.me", "verzend.be", "xvidstage.com", - #NOT TESTED: + # NOT TESTED: "101shared.com", "4upfiles.com", "filemaze.ws", "filenuke.com", "linkzhost.com", "mightyupload.com", "rockdizfile.com", "sharebeast.com", "sharerepo.com", "shareswift.com", "uploadbaz.com", "uploadc.com", "vidbull.com", "zalaa.com", "zomgupload.com", - #NOT WORKING: + # NOT WORKING: "amonshare.com", "banicrazy.info", "boosterking.com", "host4desi.com", "laoupload.com", "rd-fs.com"] CRYPTER_BUILTIN = ["junocloud.me", "rapidfileshare.net"] diff --git a/pyload/plugin/hoster/BezvadataCz.py b/pyload/plugin/hoster/BezvadataCz.py index 3eab28ae6..fa29c83d5 100644 --- a/pyload/plugin/hoster/BezvadataCz.py +++ b/pyload/plugin/hoster/BezvadataCz.py @@ -29,13 +29,13 @@ class BezvadataCz(SimpleHoster): def handleFree(self, pyfile): - #download button + # download button m = re.search(r'<a class="stahnoutSoubor".*?href="(.*?)"', self.html) if m is None: self.error(_("Page 1 URL not found")) url = "http://bezvadata.cz%s" % m.group(1) - #captcha form + # captcha form self.html = self.load(url) self.checkErrors() for _i in xrange(5): @@ -47,7 +47,7 @@ class BezvadataCz(SimpleHoster): if m is None: self.error(_("Wrong captcha image")) - #captcha image is contained in html page as base64encoded data but decryptCaptcha() expects image url + # captcha image is contained in html page as base64encoded data but decryptCaptcha() expects image url self.load, proper_load = self.loadcaptcha, self.load try: inputs['captcha'] = self.decryptCaptcha(m.group(1), imgtype='png') @@ -62,7 +62,7 @@ class BezvadataCz(SimpleHoster): else: self.fail(_("No valid captcha code entered")) - #download url + # download url self.html = self.load("http://bezvadata.cz%s" % action, post=inputs) self.checkErrors() m = re.search(r'<a class="stahnoutSoubor2" href="(.*?)">', self.html) @@ -71,7 +71,7 @@ class BezvadataCz(SimpleHoster): url = "http://bezvadata.cz%s" % m.group(1) self.logDebug("DL URL %s" % url) - #countdown + # countdown m = re.search(r'id="countdown">(\d\d):(\d\d)<', self.html) wait_time = (int(m.group(1)) * 60 + int(m.group(2))) if m else 120 self.wait(wait_time, False) diff --git a/pyload/plugin/hoster/CzshareCom.py b/pyload/plugin/hoster/CzshareCom.py index da9aa68d5..9926c8d74 100644 --- a/pyload/plugin/hoster/CzshareCom.py +++ b/pyload/plugin/hoster/CzshareCom.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# http://czshare.com/5278880/random.bin +# http://czshare.com/5278880/random.bin import re diff --git a/pyload/plugin/hoster/DataHu.py b/pyload/plugin/hoster/DataHu.py index 1f44e62e5..ba3576d10 100644 --- a/pyload/plugin/hoster/DataHu.py +++ b/pyload/plugin/hoster/DataHu.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# http://data.hu/get/6381232/random.bin +# http://data.hu/get/6381232/random.bin import re diff --git a/pyload/plugin/hoster/DlFreeFr.py b/pyload/plugin/hoster/DlFreeFr.py index 151d0ca9f..684da2b6d 100644 --- a/pyload/plugin/hoster/DlFreeFr.py +++ b/pyload/plugin/hoster/DlFreeFr.py @@ -117,7 +117,7 @@ class DlFreeFr(SimpleHoster): def getLastHeaders(self): - #parse header + # parse header header = {"code": self.req.code} for line in self.req.http.header.splitlines(): line = line.strip() diff --git a/pyload/plugin/hoster/FileomCom.py b/pyload/plugin/hoster/FileomCom.py index bac912c56..b01b34db0 100644 --- a/pyload/plugin/hoster/FileomCom.py +++ b/pyload/plugin/hoster/FileomCom.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# http://fileom.com/gycaytyzdw3g/random.bin.html +# http://fileom.com/gycaytyzdw3g/random.bin.html from pyload.plugin.internal.XFSHoster import XFSHoster diff --git a/pyload/plugin/hoster/FilepupNet.py b/pyload/plugin/hoster/FilepupNet.py index 72237285c..91d640e00 100644 --- a/pyload/plugin/hoster/FilepupNet.py +++ b/pyload/plugin/hoster/FilepupNet.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- # # Test links: -# http://www.filepup.net/files/k5w4ZVoF1410184283.html -# http://www.filepup.net/files/R4GBq9XH1410186553.html +# http://www.filepup.net/files/k5w4ZVoF1410184283.html +# http://www.filepup.net/files/R4GBq9XH1410186553.html import re diff --git a/pyload/plugin/hoster/FilerNet.py b/pyload/plugin/hoster/FilerNet.py index 86a5809da..fbefba8db 100644 --- a/pyload/plugin/hoster/FilerNet.py +++ b/pyload/plugin/hoster/FilerNet.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- # # Test links: -# http://filer.net/get/ivgf5ztw53et3ogd -# http://filer.net/get/hgo14gzcng3scbvv +# http://filer.net/get/ivgf5ztw53et3ogd +# http://filer.net/get/hgo14gzcng3scbvv import pycurl import re diff --git a/pyload/plugin/hoster/FilesMailRu.py b/pyload/plugin/hoster/FilesMailRu.py index 024da64cd..0d7f47536 100644 --- a/pyload/plugin/hoster/FilesMailRu.py +++ b/pyload/plugin/hoster/FilesMailRu.py @@ -49,17 +49,17 @@ class FilesMailRu(Hoster): self.html = self.load(pyfile.url) self.url_pattern = '<a href="(.+?)" onclick="return Act\(this\, \'dlink\'\, event\)">(.+?)</a>' - #marks the file as "offline" when the pattern was found on the html-page''' + # marks the file as "offline" when the pattern was found on the html-page''' if r'<div class="errorMessage mb10">' in self.html: self.offline() elif r'Page cannot be displayed' in self.html: self.offline() - #the filename that will be showed in the list (e.g. test.part1.rar)''' + # the filename that will be showed in the list (e.g. test.part1.rar)''' pyfile.name = self.getFileName() - #prepare and download''' + # prepare and download''' if not self.account: self.prepare() self.download(self.getFileUrl()) diff --git a/pyload/plugin/hoster/FileserveCom.py b/pyload/plugin/hoster/FileserveCom.py index 5f93cbd10..1d4179e5c 100644 --- a/pyload/plugin/hoster/FileserveCom.py +++ b/pyload/plugin/hoster/FileserveCom.py @@ -134,7 +134,7 @@ class FileserveCom(Hoster): self.wait() self.retry() - self.thread.m.reconnecting.wait(3) # Ease issue with later downloads appearing to be in parallel + self.thread.m.reconnecting.wait(3) #: Ease issue with later downloads appearing to be in parallel def doTimmer(self): @@ -185,7 +185,7 @@ class FileserveCom(Hoster): def handlePremium(self, pyfile): premium_url = None if self.getClassName() == "FileserveCom": - #try api download + # try api download res = self.load("http://app.fileserve.com/api/download/premium/", post={"username": self.user, "password": self.account.getAccountData(self.user)['password'], diff --git a/pyload/plugin/hoster/FreakshareCom.py b/pyload/plugin/hoster/FreakshareCom.py index 64d8f8308..6cf447128 100644 --- a/pyload/plugin/hoster/FreakshareCom.py +++ b/pyload/plugin/hoster/FreakshareCom.py @@ -87,7 +87,7 @@ class FreakshareCom(Hoster): def download_html(self): - self.load("http://freakshare.com/index.php", {"language": "EN"}) # Set english language in server session + self.load("http://freakshare.com/index.php", {"language": "EN"}) #: Set english language in server session self.html = self.load(self.pyfile.url) @@ -97,9 +97,9 @@ class FreakshareCom(Hoster): if not self.html: self.download_html() if not self.wantReconnect: - self.req_opts = self.get_download_options() # get the Post options for the Request - #file_url = self.pyfile.url - #return file_url + self.req_opts = self.get_download_options() #: get the Post options for the Request + # file_url = self.pyfile.url + # return file_url else: self.offline() @@ -163,11 +163,11 @@ class FreakshareCom(Hoster): def get_download_options(self): re_envelope = re.search(r".*?value=\"Free\sDownload\".*?\n*?(.*?<.*?>\n*)*?\n*\s*?</form>", - self.html).group(0) # get the whole request + self.html).group(0) #: get the whole request to_sort = re.findall(r"<input\stype=\"hidden\"\svalue=\"(.*?)\"\sname=\"(.*?)\"\s\/>", re_envelope) request_options = dict((n, v) for (v, n) in to_sort) - herewego = self.load(self.pyfile.url, None, request_options) # the actual download-Page + herewego = self.load(self.pyfile.url, None, request_options) #: the actual download-Page to_sort = re.findall(r"<input\stype=\".*?\"\svalue=\"(\S*?)\".*?name=\"(\S*?)\"\s.*?\/>", herewego) request_options = dict((n, v) for (v, n) in to_sort) diff --git a/pyload/plugin/hoster/Ftp.py b/pyload/plugin/hoster/Ftp.py index 42ef3c357..86049df04 100644 --- a/pyload/plugin/hoster/Ftp.py +++ b/pyload/plugin/hoster/Ftp.py @@ -64,12 +64,12 @@ class Ftp(Hoster): pyfile.size = int(m.group(1)) self.download(pyfile.url) else: - #Naive ftp directory listing + # Naive ftp directory listing if re.search(r'^25\d.*?"', self.req.http.header, re.M): pyfile.url = pyfile.url.rstrip('/') pkgname = "/".join(pyfile.package().name, urlparse(pyfile.url).path.rpartition('/')[2]) pyfile.url += '/' - self.req.http.c.setopt(48, 1) # CURLOPT_DIRLISTONLY + self.req.http.c.setopt(48, 1) #: CURLOPT_DIRLISTONLY res = self.load(pyfile.url, decode=False) links = [pyfile.url + quote(x) for x in res.splitlines()] self.logDebug("LINKS", links) diff --git a/pyload/plugin/hoster/GooIm.py b/pyload/plugin/hoster/GooIm.py index 8fd958660..322dd6101 100644 --- a/pyload/plugin/hoster/GooIm.py +++ b/pyload/plugin/hoster/GooIm.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# https://goo.im/devs/liquidsmooth/3.x/codina/Nightly/LS-KK-v3.2-2014-08-01-codina.zip +# https://goo.im/devs/liquidsmooth/3.x/codina/Nightly/LS-KK-v3.2-2014-08-01-codina.zip import re diff --git a/pyload/plugin/hoster/LetitbitNet.py b/pyload/plugin/hoster/LetitbitNet.py index 0cfc225e4..35f5f9cf4 100644 --- a/pyload/plugin/hoster/LetitbitNet.py +++ b/pyload/plugin/hoster/LetitbitNet.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- # # API Documentation: -# http://api.letitbit.net/reg/static/api.pdf +# http://api.letitbit.net/reg/static/api.pdf # # Test links: -# http://letitbit.net/download/07874.0b5709a7d3beee2408bb1f2eefce/random.bin.html +# http://letitbit.net/download/07874.0b5709a7d3beee2408bb1f2eefce/random.bin.html import re diff --git a/pyload/plugin/hoster/LoadTo.py b/pyload/plugin/hoster/LoadTo.py index 0b4d40fe9..3a625dbe3 100644 --- a/pyload/plugin/hoster/LoadTo.py +++ b/pyload/plugin/hoster/LoadTo.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- # # Test links: -# http://www.load.to/JWydcofUY6/random.bin -# http://www.load.to/oeSmrfkXE/random100.bin +# http://www.load.to/JWydcofUY6/random.bin +# http://www.load.to/oeSmrfkXE/random100.bin import re diff --git a/pyload/plugin/hoster/MegaCoNz.py b/pyload/plugin/hoster/MegaCoNz.py index 496d4503f..9dea99b23 100644 --- a/pyload/plugin/hoster/MegaCoNz.py +++ b/pyload/plugin/hoster/MegaCoNz.py @@ -126,8 +126,8 @@ class MegaCoNz(Hoster): except IOError, e: self.fail(e) - chunk_size = 2 ** 15 # buffer size, 32k - # file_mac = [0, 0, 0, 0] # calculate CBC-MAC for checksum + chunk_size = 2 ** 15 #: buffer size, 32k + # file_mac = [0, 0, 0, 0] #: calculate CBC-MAC for checksum chunks = os.path.getsize(file_crypted) / chunk_size + 1 for i in xrange(chunks): diff --git a/pyload/plugin/hoster/MegaRapidoNet.py b/pyload/plugin/hoster/MegaRapidoNet.py index 311189d1c..54a167b65 100644 --- a/pyload/plugin/hoster/MegaRapidoNet.py +++ b/pyload/plugin/hoster/MegaRapidoNet.py @@ -8,7 +8,7 @@ from pyload.plugin.internal.MultiHoster import MultiHoster def random_with_N_digits(n): rand = "0." not_zero = 0 - for _i in range(1, n + 1): + for _i in xrange(1, n + 1): r = randint(0, 9) if(r > 0): not_zero += 1 diff --git a/pyload/plugin/hoster/NetloadIn.py b/pyload/plugin/hoster/NetloadIn.py index 51bc2a600..f4421615f 100644 --- a/pyload/plugin/hoster/NetloadIn.py +++ b/pyload/plugin/hoster/NetloadIn.py @@ -120,7 +120,7 @@ class NetloadIn(Hoster): match = id_regex.search(url) if match: - #normalize url + # normalize url self.url = 'http://www.netload.in/datei%s.htm' % match.group('ID') self.logDebug("URL: %s" % self.url) else: @@ -152,9 +152,9 @@ class NetloadIn(Hoster): if self.api_data['status'] == "online": self.api_data['checksum'] = lines[4].strip() else: - self.api_data = False # check manually since api data is useless sometimes + self.api_data = False #: check manually since api data is useless sometimes - if lines[0] == lines[1] and lines[2] == "0": # useless api data + if lines[0] == lines[1] and lines[2] == "0": #: useless api data self.api_data = False else: self.api_data = False diff --git a/pyload/plugin/hoster/NovafileCom.py b/pyload/plugin/hoster/NovafileCom.py index 82f92959b..f76d77269 100644 --- a/pyload/plugin/hoster/NovafileCom.py +++ b/pyload/plugin/hoster/NovafileCom.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- # # Test links: -# http://novafile.com/vfun4z6o2cit -# http://novafile.com/s6zrr5wemuz4 +# http://novafile.com/vfun4z6o2cit +# http://novafile.com/s6zrr5wemuz4 from pyload.plugin.internal.XFSHoster import XFSHoster diff --git a/pyload/plugin/hoster/OboomCom.py b/pyload/plugin/hoster/OboomCom.py index 07c40a397..5b9b11485 100644 --- a/pyload/plugin/hoster/OboomCom.py +++ b/pyload/plugin/hoster/OboomCom.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# https://www.oboom.com/B7CYZIEB/10Mio.dat +# https://www.oboom.com/B7CYZIEB/10Mio.dat import re @@ -99,7 +99,7 @@ class OboomCom(Hoster): self.retry(5, 15 * 60, _("Service unavailable")) elif result[0] == 403: - if result[1] == -1: # another download is running + if result[1] == -1: #: another download is running self.setWait(15 * 60) else: self.setWait(result[1], True) diff --git a/pyload/plugin/hoster/PornhostCom.py b/pyload/plugin/hoster/PornhostCom.py index f6a63117c..103882166 100644 --- a/pyload/plugin/hoster/PornhostCom.py +++ b/pyload/plugin/hoster/PornhostCom.py @@ -47,7 +47,7 @@ class PornhostCom(Hoster): url = re.search(r'width: 894px; height: 675px">.*?<img src="(.*?)"', self.html) if url is None: url = re.search(r'"http://file\d+\.pornhost\.com/\d+/.*?"', - self.html) # TODO: fix this one since it doesn't match + self.html) #: TODO: fix this one since it doesn't match return url.group(1).strip() diff --git a/pyload/plugin/hoster/PremiumTo.py b/pyload/plugin/hoster/PremiumTo.py index c7ee09565..750e965d2 100644 --- a/pyload/plugin/hoster/PremiumTo.py +++ b/pyload/plugin/hoster/PremiumTo.py @@ -27,7 +27,7 @@ class PremiumTo(MultiHoster): def handlePremium(self, pyfile): - #raise timeout to 2min + # raise timeout to 2min self.download("http://premium.to/api/getfile.php", get={'username': self.account.username, 'password': self.account.password, diff --git a/pyload/plugin/hoster/PremiumizeMe.py b/pyload/plugin/hoster/PremiumizeMe.py index 809d27624..f577da90e 100644 --- a/pyload/plugin/hoster/PremiumizeMe.py +++ b/pyload/plugin/hoster/PremiumizeMe.py @@ -20,7 +20,7 @@ class PremiumizeMe(MultiHoster): def handlePremium(self, pyfile): # In some cases hostsers do not supply us with a filename at download, so we # are going to set a fall back filename (e.g. for freakshare or xfileshare) - pyfile.name = pyfile.name.split('/').pop() # Remove everthing before last slash + pyfile.name = pyfile.name.split('/').pop() #: Remove everthing before last slash # Correction for automatic assigned filename: Removing html at end if needed suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"] diff --git a/pyload/plugin/hoster/RPNetBiz.py b/pyload/plugin/hoster/RPNetBiz.py index 6788eebce..dc11eefb2 100644 --- a/pyload/plugin/hoster/RPNetBiz.py +++ b/pyload/plugin/hoster/RPNetBiz.py @@ -34,12 +34,12 @@ class RPNetBiz(MultiHoster): "links" : pyfile.url}) self.logDebug("JSON data: %s" % res) - link_status = json_loads(res)['links'][0] # get the first link... since we only queried one + link_status = json_loads(res)['links'][0] #: get the first link... since we only queried one # Check if we only have an id as a HDD link if 'id' in link_status: self.logDebug("Need to wait at least 30 seconds before requery") - self.setWait(30) # wait for 30 seconds + self.setWait(30) #: wait for 30 seconds self.wait() # Lets query the server again asking for the status on the link, # we need to keep doing this until we reach 100 @@ -66,7 +66,7 @@ class RPNetBiz(MultiHoster): self.wait() my_try += 1 - if my_try > max_tries: # We went over the limit! + if my_try > max_tries: #: We went over the limit! self.fail(_("Waited for about 15 minutes for download to finish but failed")) if 'generated' in link_status: diff --git a/pyload/plugin/hoster/RemixshareCom.py b/pyload/plugin/hoster/RemixshareCom.py index 6e376fd6d..ba61887cd 100644 --- a/pyload/plugin/hoster/RemixshareCom.py +++ b/pyload/plugin/hoster/RemixshareCom.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- # # Test links: -# http://remixshare.com/download/z8uli +# http://remixshare.com/download/z8uli # # Note: -# The remixshare.com website is very very slow, so -# if your download not starts because of pycurl timeouts: -# Adjust timeouts in /usr/share/pyload/pyload/network/HTTPRequest.py +# The remixshare.com website is very very slow, so +# if your download not starts because of pycurl timeouts: +# Adjust timeouts in /usr/share/pyload/pyload/network/HTTPRequest.py import re diff --git a/pyload/plugin/hoster/ShareplaceCom.py b/pyload/plugin/hoster/ShareplaceCom.py index 7d9630d20..08fb966b8 100644 --- a/pyload/plugin/hoster/ShareplaceCom.py +++ b/pyload/plugin/hoster/ShareplaceCom.py @@ -40,7 +40,7 @@ class ShareplaceCom(Hoster): if not self.html: self.download_html() - #var zzipitime = 15; + # var zzipitime = 15; m = re.search(r'var zzipitime = (\d+);', self.html) if m: sec = int(m.group(1)) diff --git a/pyload/plugin/hoster/SimplydebridCom.py b/pyload/plugin/hoster/SimplydebridCom.py index 2fa68b508..d703c3e52 100644 --- a/pyload/plugin/hoster/SimplydebridCom.py +++ b/pyload/plugin/hoster/SimplydebridCom.py @@ -19,7 +19,7 @@ class SimplydebridCom(MultiHoster): def handlePremium(self, pyfile): - #fix the links for simply-debrid.com! + # fix the links for simply-debrid.com! self.link = replace_patterns(pyfile.url, [("clz.to", "cloudzer.net/file") ("http://share-online", "http://www.share-online") ("ul.to", "uploaded.net/file") diff --git a/pyload/plugin/hoster/SmoozedCom.py b/pyload/plugin/hoster/SmoozedCom.py index 1ed3a539d..f216a95bc 100644 --- a/pyload/plugin/hoster/SmoozedCom.py +++ b/pyload/plugin/hoster/SmoozedCom.py @@ -20,7 +20,7 @@ class SmoozedCom(MultiHoster): def handlePremium(self, pyfile): # In some cases hostsers do not supply us with a filename at download, so we # are going to set a fall back filename (e.g. for freakshare or xfileshare) - pyfile.name = pyfile.name.split('/').pop() # Remove everthing before last slash + pyfile.name = pyfile.name.split('/').pop() #: Remove everthing before last slash # Correction for automatic assigned filename: Removing html at end if needed suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"] diff --git a/pyload/plugin/hoster/SolidfilesCom.py b/pyload/plugin/hoster/SolidfilesCom.py index 39e5dd010..9998f26ad 100644 --- a/pyload/plugin/hoster/SolidfilesCom.py +++ b/pyload/plugin/hoster/SolidfilesCom.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# http://www.solidfiles.com/d/609cdb4b1b +# http://www.solidfiles.com/d/609cdb4b1b from pyload.plugin.internal.SimpleHoster import SimpleHoster diff --git a/pyload/plugin/hoster/SpeedyshareCom.py b/pyload/plugin/hoster/SpeedyshareCom.py index 99626c765..b6d0a5898 100644 --- a/pyload/plugin/hoster/SpeedyshareCom.py +++ b/pyload/plugin/hoster/SpeedyshareCom.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# http://speedy.sh/ep2qY/Zapp-Brannigan.jpg +# http://speedy.sh/ep2qY/Zapp-Brannigan.jpg import re diff --git a/pyload/plugin/hoster/UlozTo.py b/pyload/plugin/hoster/UlozTo.py index b2e31dccf..49d5d2ac1 100644 --- a/pyload/plugin/hoster/UlozTo.py +++ b/pyload/plugin/hoster/UlozTo.py @@ -123,7 +123,7 @@ class UlozTo(SimpleHoster): "wrong_captcha": re.compile(r'<ul class="error">\s*<li>Error rewriting the text.</li>'), "offline" : re.compile(self.OFFLINE_PATTERN), "passwd" : self.PASSWD_PATTERN, - "server_error" : 'src="http://img.ulozto.cz/error403/vykricnik.jpg"', # paralell dl, server overload etc. + "server_error" : 'src="http://img.ulozto.cz/error403/vykricnik.jpg"', #: paralell dl, server overload etc. "not_found" : "<title>Ulož.to</title>" }) diff --git a/pyload/plugin/hoster/UploadedTo.py b/pyload/plugin/hoster/UploadedTo.py index 487c01576..c8b201ec6 100644 --- a/pyload/plugin/hoster/UploadedTo.py +++ b/pyload/plugin/hoster/UploadedTo.py @@ -57,7 +57,7 @@ class UploadedTo(SimpleHoster): def setup(self): self.multiDL = self.resumeDownload = self.premium - self.chunkLimit = 1 # critical problems with more chunks + self.chunkLimit = 1 #: critical problems with more chunks def checkErrors(self): @@ -68,14 +68,14 @@ class UploadedTo(SimpleHoster): elif "limit-size" in self.html: self.fail(_("File too big for free download")) - elif "limit-slot" in self.html: # Temporary restriction so just wait a bit + elif "limit-slot" in self.html: #: Temporary restriction so just wait a bit self.wait(30 * 60, True) self.retry() elif "limit-parallel" in self.html: self.fail(_("Cannot download in parallel")) - elif "limit-dl" in self.html or self.DL_LIMIT_ERROR in self.html: # limit-dl + elif "limit-dl" in self.html or self.DL_LIMIT_ERROR in self.html: #: limit-dl self.wait(3 * 60 * 60, True) self.retry() diff --git a/pyload/plugin/hoster/UploadheroCom.py b/pyload/plugin/hoster/UploadheroCom.py index 912f4c505..d1c9fd2c7 100644 --- a/pyload/plugin/hoster/UploadheroCom.py +++ b/pyload/plugin/hoster/UploadheroCom.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# http://uploadhero.co/dl/wQBRAVSM +# http://uploadhero.co/dl/wQBRAVSM import re diff --git a/pyload/plugin/hoster/VidPlayNet.py b/pyload/plugin/hoster/VidPlayNet.py index ab571f9ea..5d98d2fb3 100644 --- a/pyload/plugin/hoster/VidPlayNet.py +++ b/pyload/plugin/hoster/VidPlayNet.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # Test links: -# BigBuckBunny_320x180.mp4 - 61.7 Mb - http://vidplay.net/38lkev0h3jv0 +# http://vidplay.net/38lkev0h3jv0 from pyload.plugin.internal.XFSHoster import XFSHoster diff --git a/pyload/plugin/hoster/Xdcc.py b/pyload/plugin/hoster/Xdcc.py index 30c7b7c22..6943f495e 100644 --- a/pyload/plugin/hoster/Xdcc.py +++ b/pyload/plugin/hoster/Xdcc.py @@ -29,7 +29,7 @@ class Xdcc(Hoster): def setup(self): - self.debug = 0 # 0,1,2 + self.debug = 0 #: 0,1,2 self.timeout = 30 self.multiDL = False @@ -62,7 +62,7 @@ class Xdcc(Hoster): def doDownload(self, url): - self.pyfile.setStatus("waiting") # real link + self.pyfile.setStatus("waiting") #: real link m = re.match(r'xdcc://(.*?)/#?(.*?)/(.*?)/#?(\d+)/?', url) server = m.group(1) @@ -89,7 +89,7 @@ class Xdcc(Hoster): sock = socket.socket() sock.connect((host, int(port))) if nick == "pyload": - nick = "pyload-%d" % (time.time() % 1000) # last 3 digits + nick = "pyload-%d" % (time.time() % 1000) #: last 3 digits sock.send("NICK %s\r\n" % nick) sock.send("USER %s %s bla :%s\r\n" % (ident, host, real)) @@ -161,7 +161,7 @@ class Xdcc(Hoster): self.logDebug("Sending CTCP TIME") sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time())) elif msg['text'] == "\x01LAG\x01": - pass # don't know how to answer + pass #: don't know how to answer if not (bot == msg['origin'][0:len(bot)] and nick == msg['target'][0:len(nick)] diff --git a/pyload/plugin/hoster/YourfilesTo.py b/pyload/plugin/hoster/YourfilesTo.py index a3d5e310c..f5e778741 100644 --- a/pyload/plugin/hoster/YourfilesTo.py +++ b/pyload/plugin/hoster/YourfilesTo.py @@ -41,7 +41,7 @@ class YourfilesTo(Hoster): if not self.html: self.download_html() - #var zzipitime = 15; + # var zzipitime = 15; m = re.search(r'var zzipitime = (\d+);', self.html) if m: sec = int(m.group(1)) diff --git a/pyload/plugin/hoster/YoutubeCom.py b/pyload/plugin/hoster/YoutubeCom.py index 14503be03..5db9957f8 100644 --- a/pyload/plugin/hoster/YoutubeCom.py +++ b/pyload/plugin/hoster/YoutubeCom.py @@ -94,7 +94,7 @@ class YoutubeCom(Hoster): if "We have been receiving a large volume of requests from your network." in html: self.tempOffline() - #get config + # get config use3d = self.getConfig('3d') if use3d: @@ -113,7 +113,7 @@ class YoutubeCom(Hoster): self.logWarning(_("FMT %d unknown, using default") % desired_fmt) desired_fmt = 0 - #parse available streams + # parse available streams streams = re.search(r'"url_encoded_fmt_stream_map":"(.+?)",', html).group(1) streams = [x.split('\u0026') for x in streams.split(',')] streams = [dict((y.split('=', 1)) for y in x) for x in streams] @@ -123,7 +123,7 @@ class YoutubeCom(Hoster): self.logDebug("AVAILABLE STREAMS: %s" % [x[0] for x in streams]) - #build dictionary of supported itags (3D/2D) + # build dictionary of supported itags (3D/2D) allowed = lambda x: self.getConfig(self.formats[x][0]) streams = [x for x in streams if x[0] in self.formats and allowed(x[0])] @@ -136,11 +136,11 @@ class YoutubeCom(Hoster): (desired_fmt, "%s %dx%d Q:%d 3D:%s" % self.formats[desired_fmt], "" if desired_fmt in fmt_dict else "NOT ", "" if allowed(desired_fmt) else "NOT ")) - #return fmt nearest to quality index + # return fmt nearest to quality index if desired_fmt in fmt_dict and allowed(desired_fmt): fmt = desired_fmt else: - sel = lambda x: self.formats[x][3] # select quality index + sel = lambda x: self.formats[x][3] #: select quality index comp = lambda x, y: abs(sel(x) - sel(y)) self.logDebug("Choosing nearest fmt: %s" % [(x, allowed(x), comp(x, desired_fmt)) for x in fmt_dict.keys()]) @@ -154,7 +154,7 @@ class YoutubeCom(Hoster): self.logDebug("URL: %s" % url) - #set file name + # set file name file_suffix = self.formats[fmt][0] if fmt in self.formats else ".flv" file_name_pattern = '<meta name="title" content="(.+?)">' name = re.search(file_name_pattern, html).group(1).replace("/", "") diff --git a/pyload/plugin/hoster/ZippyshareCom.py b/pyload/plugin/hoster/ZippyshareCom.py index dd78071c9..7f91c04e5 100644 --- a/pyload/plugin/hoster/ZippyshareCom.py +++ b/pyload/plugin/hoster/ZippyshareCom.py @@ -68,7 +68,7 @@ class ZippyshareCom(SimpleHoster): def replElementById(element): id = element.group(1) #: id might be either 'x' (a real id) or x (a variable) - attr = element.group(4) # attr might be None + attr = element.group(4) #: attr might be None varName = re.sub(r'-', '', 'GVAR[%s+"_%s"]' %(id, attr)) diff --git a/pyload/plugin/internal/SimpleHoster.py b/pyload/plugin/internal/SimpleHoster.py index df98c1e67..75970d814 100644 --- a/pyload/plugin/internal/SimpleHoster.py +++ b/pyload/plugin/internal/SimpleHoster.py @@ -332,7 +332,7 @@ class SimpleHoster(Hoster): @classmethod def getInfo(cls, url="", html=""): info = cls.apiInfo(url) - online = False if info['status'] != 2 else True + online = info['status'] == 2 try: info['pattern'] = re.match(cls.__pattern, url).groupdict() #: pattern groups will be saved here diff --git a/pyload/plugin/internal/XFSAccount.py b/pyload/plugin/internal/XFSAccount.py index 13c2351ce..d8c5a91f1 100644 --- a/pyload/plugin/internal/XFSAccount.py +++ b/pyload/plugin/internal/XFSAccount.py @@ -67,7 +67,7 @@ class XFSAccount(Account): html = req.load(self.HOSTER_URL, get={'op': "my_account"}, decode=True) - premium = True if re.search(self.PREMIUM_PATTERN, html) else False + premium = re.search(self.PREMIUM_PATTERN, html) is not None m = re.search(self.VALID_UNTIL_PATTERN, html) if m: diff --git a/pyload/plugin/ocr/ShareonlineBiz.py b/pyload/plugin/ocr/ShareonlineBiz.py index b07653279..8efbdee35 100644 --- a/pyload/plugin/ocr/ShareonlineBiz.py +++ b/pyload/plugin/ocr/ShareonlineBiz.py @@ -23,8 +23,8 @@ class ShareonlineBiz(OCR): self.image = self.image.resize((160, 50)) self.pixels = self.image.load() self.threshold(1.85) - #self.eval_black_white(240) - #self.derotate_by_average() + # self.eval_black_white(240) + # self.derotate_by_average() letters = self.split_captcha_letters() @@ -36,4 +36,4 @@ class ShareonlineBiz(OCR): return final - #tesseract at 60% + # tesseract at 60% diff --git a/pyload/remote/ClickNLoadBackend.py b/pyload/remote/ClickNLoadBackend.py index 5c08986a7..2d3e29dbc 100644 --- a/pyload/remote/ClickNLoadBackend.py +++ b/pyload/remote/ClickNLoadBackend.py @@ -62,20 +62,20 @@ class CNLHandler(BaseHTTPRequestHandler): def do_GET(self): path = self.path.strip("/").lower() - #self.wfile.write(path+"\n") + # self.wfile.write(path+"\n") - self.map = [ (r"add$", self.add), - (r"addcrypted$", self.addcrypted), - (r"addcrypted2$", self.addcrypted2), - (r"flashgot", self.flashgot), - (r"crossdomain\.xml", self.crossdomain), - (r"checkSupportForUrl", self.checksupport), - (r"jdcheck.js", self.jdcheck), - (r"", self.flash) ] + self.map = [(r"add$", self.add), + (r"addcrypted$", self.addcrypted), + (r"addcrypted2$", self.addcrypted2), + (r"flashgot", self.flashgot), + (r"crossdomain\.xml", self.crossdomain), + (r"checkSupportForUrl", self.checksupport), + (r"jdcheck.js", self.jdcheck), + (r"", self.flash)] func = None for r, f in self.map: - if re.match(r"(flash(got)?/?)?"+r, path): + if re.match(r"(flash(got)?/?)?" + r, path): func = f break @@ -94,11 +94,11 @@ class CNLHandler(BaseHTTPRequestHandler): def do_POST(self): form = FieldStorage( - fp=self.rfile, - headers=self.headers, - environ={'REQUEST_METHOD':'POST', - 'CONTENT_TYPE':self.headers['Content-Type'], - }) + fp=self.rfile, + headers=self.headers, + environ={'REQUEST_METHOD': 'POST', + 'CONTENT_TYPE': self.headers['Content-Type'], + }) self.post = {} for name in form.keys(): diff --git a/pyload/remote/ThriftBackend.py b/pyload/remote/ThriftBackend.py index a6f1841e2..f71e264e2 100644 --- a/pyload/remote/ThriftBackend.py +++ b/pyload/remote/ThriftBackend.py @@ -31,14 +31,14 @@ class ThriftBackend(BackendBase): transport = ServerSocket(port, host, key, cert) -# tfactory = TransportFactoryCompressed() + # tfactory = TransportFactoryCompressed() tfactory = TransportFactory() pfactory = ProtocolFactory() self.server = TServer.TThreadedServer(processor, transport, tfactory, pfactory) - #self.server = TNonblockingServer.TNonblockingServer(processor, transport, tfactory, pfactory) + # self.server = TNonblockingServer.TNonblockingServer(processor, transport, tfactory, pfactory) - #server = TServer.TThreadPoolServer(processor, transport, tfactory, pfactory) + # server = TServer.TThreadPoolServer(processor, transport, tfactory, pfactory) def serve(self): diff --git a/pyload/remote/socketbackend/create_ttypes.py b/pyload/remote/socketbackend/create_ttypes.py index 9b001f1bf..00752dc6b 100644 --- a/pyload/remote/socketbackend/create_ttypes.py +++ b/pyload/remote/socketbackend/create_ttypes.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- +from __future__ import with_statement + import inspect import os import platform @@ -12,7 +14,6 @@ from pyload.remote.thriftbackend.thriftgen.pyload.Pyload import Iface def main(): - enums = [] classes = [] @@ -21,19 +22,19 @@ def main(): for name in dir(ttypes): klass = getattr(ttypes, name) - if name in ("TBase", "TExceptionBase") or name.startswith("_") or not (issubclass(klass, ttypes.TBase) or issubclass(klass, ttypes.TExceptionBase)): + if name in ("TBase", "TExceptionBase") or name.startswith("_") or not ( + issubclass(klass, ttypes.TBase) or issubclass(klass, ttypes.TExceptionBase)): continue if hasattr(klass, "thrift_spec"): - classes.append(klass) + classes.append(klass) else: enums.append(klass) - f = open(os.path.join(pypath, "pyload", "api", "types.py"), "wb") - - f.write( - """# -*- coding: utf-8 -*- + with open(os.path.join(pypath, "pyload", "api", "types.py"), "wb") as f: + f.write( +"""# -*- coding: utf-8 -*- # Autogenerated by pyload # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING @@ -43,7 +44,7 @@ class BaseObject(object): """) - ## generate enums + # generate enums for enum in enums: name = enum.__name__ f.write("class %s:\n" % name) @@ -59,10 +60,10 @@ class BaseObject(object): for klass in classes: name = klass.__name__ base = "Exception" if issubclass(klass, ttypes.TExceptionBase) else "BaseObject" - f.write("class %s(%s):\n" % (name, base)) + f.write("class %s(%s):\n" % (name, base)) f.write("\t__slots__ = %s\n\n" % klass.__slots__) - #create init + # create init args = ["self"] + ["%s=None" % x for x in klass.__slots__] f.write("\tdef __init__(%s):\n" % ", ".join(args)) diff --git a/pyload/remote/thriftbackend/Processor.py b/pyload/remote/thriftbackend/Processor.py index 7ccc2bee2..204047e2f 100644 --- a/pyload/remote/thriftbackend/Processor.py +++ b/pyload/remote/thriftbackend/Processor.py @@ -57,7 +57,7 @@ class Processor(Pyload.Processor): # api login self.authenticated[trans] = self._handler.checkAuth(args.username, args.password, trans.remoteaddr[0]) - result.success = True if self.authenticated[trans] else False + result.success = bool(self.authenticated[trans]) oprot.writeMessageBegin("login", Pyload.TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() diff --git a/pyload/remote/thriftbackend/Socket.py b/pyload/remote/thriftbackend/Socket.py index 3d2435a92..0ca9ed178 100644 --- a/pyload/remote/thriftbackend/Socket.py +++ b/pyload/remote/thriftbackend/Socket.py @@ -68,7 +68,7 @@ class Socket(TSocket): else: self.handle = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - #errno 104 connection reset + # errno 104 connection reset self.handle.settimeout(self._timeout) self.handle.connect((self.host, self.port)) @@ -94,7 +94,7 @@ class Socket(TSocket): if e.args == (-1, 'Unexpected EOF'): buff = '' elif e.args == ([('SSL routines', 'SSL23_GET_CLIENT_HELLO', 'unknown protocol')],): - #a socket not using ssl tried to connect + # a socket not using ssl tried to connect buff = '' else: raise diff --git a/pyload/remote/thriftbackend/ThriftClient.py b/pyload/remote/thriftbackend/ThriftClient.py index 4f1c8dcc2..b018fbcc4 100644 --- a/pyload/remote/thriftbackend/ThriftClient.py +++ b/pyload/remote/thriftbackend/ThriftClient.py @@ -49,10 +49,10 @@ class ThriftClient(object): correct = self.client.login(user, password) except error, e: if e.args and e.args[0] == 104: - #connection reset by peer, probably wants ssl + # connection reset by peer, probably wants ssl try: self.createConnection(host, port, True) - #set timeout or a ssl socket will block when querying none ssl server + # set timeout or a ssl socket will block when querying none ssl server self.socket.setTimeout(10) except ImportError: diff --git a/pyload/remote/thriftbackend/ThriftTest.py b/pyload/remote/thriftbackend/ThriftTest.py index 0c5ea4783..c95e060b8 100644 --- a/pyload/remote/thriftbackend/ThriftTest.py +++ b/pyload/remote/thriftbackend/ThriftTest.py @@ -25,7 +25,7 @@ import xmlrpclib def bench(f, *args, **kwargs): s = time() - ret = [f(*args, **kwargs) for _i in range(0, 100)] + ret = [f(*args, **kwargs) for _i in xrange(0, 100)] e = time() try: print "%s: %f s" % (f._Method__name, e-s) @@ -74,8 +74,8 @@ try: bench(client.getServerVersion) bench(client.statusServer) bench(client.statusDownloads) - #bench(client.getQueue) - #bench(client.getCollector) + # bench(client.getQueue) + # bench(client.getCollector) print print client.getServerVersion() diff --git a/pyload/utils/__init__.py b/pyload/utils/__init__.py index 2e23bf99b..3d26983b5 100644 --- a/pyload/utils/__init__.py +++ b/pyload/utils/__init__.py @@ -63,7 +63,7 @@ def remove_chars(string, repl): def safe_filename(name): """ remove bad chars """ - name = unquote(name).encode('ascii', 'replace') # Non-ASCII chars usually breaks file saving. Replacing. + name = unquote(name).encode('ascii', 'replace') #: Non-ASCII chars usually breaks file saving. Replacing. if os.name == 'nt': return remove_chars(name, u'\00\01\02\03\04\05\06\07\10\11\12\13\14\15\16\17\20\21\22\23\24\25\26\27\30\31\32' u'\33\34\35\36\37/?%*|"<>') @@ -95,15 +95,15 @@ if sys.getfilesystemencoding().startswith('ANSI'): def fs_encode(string): return safe_filename(encode(string)) - fs_decode = decode # decode utf8 + fs_decode = decode #: decode utf8 else: - fs_encode = fs_decode = lambda x: x # do nothing + fs_encode = fs_decode = lambda x: x #: do nothing def get_console_encoding(enc): if os.name == "nt": - if enc == "cp65001": # aka UTF-8 + if enc == "cp65001": #: aka UTF-8 print "WARNING: Windows codepage 65001 is not supported." enc = "cp850" else: @@ -178,7 +178,7 @@ def uniqify(seq): #: Originally by Dave Kirby return [x for x in seq if x not in seen and not seen_add(x)] -def parseFileSize(string, unit=None): # returns bytes +def parseFileSize(string, unit=None): #: returns bytes if not unit: m = re.match(r"([\d.,]+) *([a-zA-Z]*)", string.strip().lower()) if m: @@ -244,7 +244,7 @@ def fixup(m): except KeyError: pass - return text # leave as is + return text #: leave as is def has_method(obj, name): diff --git a/pyload/utils/packagetools.py b/pyload/utils/packagetools.py index 0ab68a869..8ed55d0f7 100644 --- a/pyload/utils/packagetools.py +++ b/pyload/utils/packagetools.py @@ -79,7 +79,7 @@ def parseNames(files): if len(split) > 1: name = split.pop(1) - #check if an already existing package may be ok for this file + # check if an already existing package may be ok for this file # found = False # for pack in packs: # if pack in file: diff --git a/pyload/utils/pylgettext.py b/pyload/utils/pylgettext.py index 86cfc586a..76bb268ec 100644 --- a/pyload/utils/pylgettext.py +++ b/pyload/utils/pylgettext.py @@ -37,7 +37,7 @@ def find(domain, localedir=None, languages=None, all=False): if _searchdirs is None: return origfind(domain, localedir, languages, all) searches = [localedir] + _searchdirs - results = list() + results = [] for dir in searches: res = origfind(domain, dir, languages, all) if all is False: diff --git a/pyload/webui/app/api.py b/pyload/webui/app/api.py index 0e36b7c1f..70d86c112 100644 --- a/pyload/webui/app/api.py +++ b/pyload/webui/app/api.py @@ -8,7 +8,6 @@ from bottle import route, request, response, HTTPError from pyload.webui.app.utils import toDict, set_session from pyload.webui import PYLOAD - from pyload.utils import json from SafeEval import const_eval as literal_eval from pyload.api import BaseObject diff --git a/pyload/webui/app/cnl.py b/pyload/webui/app/cnl.py index 73087ad2d..635d4030c 100644 --- a/pyload/webui/app/cnl.py +++ b/pyload/webui/app/cnl.py @@ -1,4 +1,7 @@ # -*- coding: utf-8 -*- + +from __future__ import with_statement + from os.path import join import re from urllib import unquote @@ -6,6 +9,7 @@ from base64 import standard_b64decode from binascii import unhexlify from bottle import route, request, HTTPError + from pyload.webui import PYLOAD, DL_ROOT, JS @@ -57,9 +61,8 @@ def addcrypted(): dlc = request.forms['crypted'].replace(" ", "+") dlc_path = join(DL_ROOT, package.replace("/", "").replace("\\", "").replace(":", "") + ".dlc") - dlc_file = open(dlc_path, "wb") - dlc_file.write(dlc) - dlc_file.close() + with open(dlc_path, "wb") as dlc_file: + dlc_file.write(dlc) try: PYLOAD.addPackage(package, [dlc_path], 0) @@ -85,7 +88,7 @@ def addcrypted2(): try: jk = re.findall(r"return ('|\")(.+)('|\")", jk)[0][1] except Exception: - ## Test for some known js functions to decode + # Test for some known js functions to decode if jk.find("dec") > -1 and jk.find("org") > -1: org = re.findall(r"var org = ('|\")([^\"']+)", jk)[0][1] jk = list(org) diff --git a/pyload/webui/app/json.py b/pyload/webui/app/json.py index 0805e9f5b..3b72cb7ce 100644 --- a/pyload/webui/app/json.py +++ b/pyload/webui/app/json.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- +from __future__ import with_statement + from os.path import join from traceback import print_exc from shutil import copyfileobj @@ -7,9 +9,7 @@ from shutil import copyfileobj from bottle import route, request, HTTPError from pyload.webui import PYLOAD - from pyload.webui.app.utils import login_required, render_to_response, toDict - from pyload.utils import decode, formatSize @@ -166,9 +166,8 @@ def add_package(): name = f.name fpath = join(PYLOAD.getConfigValue("general", "download_folder"), "tmp_" + f.filename) - destination = open(fpath, 'wb') - copyfileobj(f.file, destination) - destination.close() + with open(fpath, 'wb') as destination: + copyfileobj(f.file, destination) links.insert(0, fpath) except Exception: pass @@ -226,7 +225,7 @@ def set_captcha(): if task.tid >= 0: src = "data:image/%s;base64,%s" % (task.type, task.data) - return {'captcha': True, 'id': task.tid, 'src': src, 'result_type' : task.resultType} + return {'captcha': True, 'id': task.tid, 'src': src, 'result_type': task.resultType} else: return {'captcha': False} @@ -306,7 +305,6 @@ def update_accounts(): @route('/json/change_password', method='POST') def change_password(): - user = request.POST['user_login'] oldpw = request.POST['login_current_password'] newpw = request.POST['login_new_password'] diff --git a/pyload/webui/app/pyloadweb.py b/pyload/webui/app/pyloadweb.py index 154409655..7f2317bd1 100644 --- a/pyload/webui/app/pyloadweb.py +++ b/pyload/webui/app/pyloadweb.py @@ -274,12 +274,12 @@ def config(): data.trafficleft = formatSize(data.trafficleft) if data.validuntil == -1: - data.validuntil = _("unlimited") + data.validuntil = _("unlimited") elif not data.validuntil: - data.validuntil = _("not available") + data.validuntil = _("not available") else: t = time.localtime(data.validuntil) - data.validuntil = time.strftime("%d.%m.%Y - %H:%M:%S", t) + data.validuntil = time.strftime("%d.%m.%Y - %H:%M:%S", t) try: data.options['time'] = data.options['time'][0] @@ -292,7 +292,8 @@ def config(): data.options['limitdl'] = "0" return render_to_response('settings.html', - {'conf': {'plugin': plugin_menu, 'general': conf_menu, 'accs': accs}, 'types': PYLOAD.getAccountTypes()}, + {'conf': {'plugin': plugin_menu, 'general': conf_menu, 'accs': accs}, + 'types': PYLOAD.getAccountTypes()}, [pre_processor]) @@ -302,10 +303,7 @@ def config(): @route('/pathchooser/<path:path>') @login_required('STATUS') def path(file="", path=""): - if file: - type = "file" - else: - type = "folder" + type = "file" if file else "folder" path = os.path.normpath(unquotepath(path)) @@ -360,10 +358,7 @@ def path(file="", path=""): except Exception: continue - if os.path.isdir(join(cwd, f)): - data['type'] = 'dir' - else: - data['type'] = 'file' + data['type'] = 'dir' if os.path.isdir(join(cwd, f)) else 'file' if os.path.isfile(join(cwd, f)): data['size'] = os.path.getsize(join(cwd, f)) @@ -434,7 +429,7 @@ def logs(item=-1): if item < 1 or type(item) is not int: item = 1 if len(log) - perpage + 1 < 1 else len(log) - perpage + 1 - if type(fro) is datetime: # we will search for datetime + if type(fro) is datetime: #: we will search for datetime item = -1 data = [] @@ -454,16 +449,16 @@ def logs(item=-1): level = '?' message = l if item == -1 and dtime is not None and fro <= dtime: - item = counter # found our datetime + item = counter #: found our datetime if item >= 0: data.append({'line': counter, 'date': date + " " + time, 'level': level, 'message': message}) perpagecheck += 1 - if fro is None and dtime is not None: # if fro not set set it to first showed line + if fro is None and dtime is not None: #: if fro not set set it to first showed line fro = dtime if perpagecheck >= perpage > 0: break - if fro is None: # still not set, empty log? + if fro is None: #: still not set, empty log? fro = datetime.now() if reversed: data.reverse() @@ -486,7 +481,7 @@ def admin(): for data in user.itervalues(): data['perms'] = {} get_permission(data['perms'], data['permission']) - data['perms']['admin'] = True if data['role'] is 0 else False + data['perms']['admin'] = data['role'] is 0 s = request.environ.get('beaker.session') if request.environ.get('REQUEST_METHOD', "GET") == "POST": @@ -520,11 +515,7 @@ def setup(): @route('/info') def info(): conf = PYLOAD.getConfigDict() - - if hasattr(os, "uname"): - extra = os.uname() - else: - extra = tuple() + extra = os.uname() if hasattr(os, "uname") else tuple() data = {"python" : sys.version, "os" : " ".join((os.name, sys.platform) + extra), diff --git a/pyload/webui/app/utils.py b/pyload/webui/app/utils.py index 69067d8fe..2753b7feb 100644 --- a/pyload/webui/app/utils.py +++ b/pyload/webui/app/utils.py @@ -80,8 +80,8 @@ def set_session(request, info): def parse_userdata(session): - return {"name": session.get("name", "Anonymous"), - "is_admin": True if session.get("role", 1) == 0 else False, + return {"name" : session.get("name", "Anonymous"), + "is_admin" : session.get("role", 1) == 0, "is_authenticated": session.get("authenticated", False)} @@ -115,10 +115,7 @@ def login_required(perm=None): def toDict(obj): - ret = {} - for att in obj.__slots__: - ret[att] = getattr(obj, att) - return ret + return {att: getattr(obj, att) for att in obj.__slots__} class CherryPyWSGI(ServerAdapter): diff --git a/pyload/webui/filters.py b/pyload/webui/filters.py index ea4b159fa..e11944c94 100644 --- a/pyload/webui/filters.py +++ b/pyload/webui/filters.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + import os from os.path import abspath, commonprefix, join diff --git a/pyload/webui/servers/lighttpd_default.conf b/pyload/webui/servers/lighttpd_default.conf index 444ef39c5..9ccd264db 100644 --- a/pyload/webui/servers/lighttpd_default.conf +++ b/pyload/webui/servers/lighttpd_default.conf @@ -108,7 +108,7 @@ mimetype.assign = ( ) # Use the "Content-Type" extended attribute to obtain mime type if possible -#mimetype.use-xattr = "enable" +# mimetype.use-xattr = "enable" #### accesslog module accesslog.filename = "%(path)/access.log" @@ -125,7 +125,7 @@ server.pid-file = "%(path)/lighttpd.pid" server.bind = "%(host)" server.port = %(port) -#server.document-root = "/home/user/public_html" +# server.document-root = "/home/user/public_html" fastcgi.server = ( "/pyload.fcgi" => ( @@ -34,13 +34,13 @@ setup( platforms=['Any'], - #package_dir={'pyload': 'src'}, + # package_dir={'pyload': 'src'}, packages=['pyload'], - #package_data=find_package_data(), + # package_data=find_package_data(), - #data_files=[], + # data_files=[], include_package_data=True, @@ -74,11 +74,11 @@ setup( 'JSON speedup' : ["simplejson"] }, - #setup_requires=["setuptools_hg"], + # setup_requires=["setuptools_hg"], - #test_suite='nose.collector', + # test_suite='nose.collector', - #tests_require=['nose', 'websocket-client >= 0.8.0', 'requests >= 1.2.2'], + # tests_require=['nose', 'websocket-client >= 0.8.0', 'requests >= 1.2.2'], entry_points={ 'console_scripts': [ diff --git a/tests/APIExerciser.py b/tests/APIExerciser.py index d17f81ae2..f4b082479 100644 --- a/tests/APIExerciser.py +++ b/tests/APIExerciser.py @@ -1,6 +1,8 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +from __future__ import with_statement + import string from threading import Thread from random import choice, sample, randint @@ -15,7 +17,7 @@ from pyload.remote.thriftbackend.ThriftClient import ThriftClient, Destination def createURLs(): """ create some urls, some may fail """ urls = [] - for x in range(0, randint(20, 100)): + for x in xrange(0, randint(20, 100)): name = "DEBUG_API" if randint(0, 5) == 5: name = "" #: this link will fail @@ -32,7 +34,7 @@ sumCalled = 0 def startApiExerciser(core, n): - for _i in range(n): + for _i in xrange(n): APIExerciser(core).start() @@ -60,32 +62,32 @@ class APIExerciser(Thread): self.core.log.info("API Excerciser started %d" % self.id) - out = open("error.log", "ab") - # core errors are not logged of course - out.write("\n" + "Starting\n") - out.flush() - - while True: - try: - self.testAPI() - except Exception: - self.core.log.error("Excerciser %d throw an execption" % self.id) - print_exc() - out.write(format_exc() + 2 * "\n") - out.flush() - - if not self.count % 100: - self.core.log.info("Exerciser %d tested %d api calls" % (self.id, self.count)) - if not self.count % 1000: - out.flush() - - if not sumCalled % 1000: #: not thread safe - self.core.log.info("Exercisers tested %d api calls" % sumCalled) - persec = sumCalled / (time() - self.time) - self.core.log.info("Approx. %.2f calls per second." % persec) - self.core.log.info("Approx. %.2f ms per call." % (1000 / persec)) - self.core.log.info("Collected garbage: %d" % gc.collect()) - # sleep(random() / 500) + with open("error.log", "ab") as out: + # core errors are not logged of course + out.write("\n" + "Starting\n") + out.flush() + + while True: + try: + self.testAPI() + except Exception: + self.core.log.error("Excerciser %d throw an execption" % self.id) + print_exc() + out.write(format_exc() + 2 * "\n") + out.flush() + + if not self.count % 100: + self.core.log.info("Exerciser %d tested %d api calls" % (self.id, self.count)) + if not self.count % 1000: + out.flush() + + if not sumCalled % 1000: #: not thread safe + self.core.log.info("Exercisers tested %d api calls" % sumCalled) + persec = sumCalled / (time() - self.time) + self.core.log.info("Approx. %.2f calls per second." % persec) + self.core.log.info("Approx. %.2f ms per call." % (1000 / persec)) + self.core.log.info("Collected garbage: %d" % gc.collect()) + # sleep(random() / 500) def testAPI(self): diff --git a/tests/clonedigger.sh b/tests/clonedigger.sh index 4c53eab0d..e7fd17eb6 100644 --- a/tests/clonedigger.sh +++ b/tests/clonedigger.sh @@ -1,4 +1,4 @@ #!/bin/sh -PYLOAD="../pyload" # Check pyload directory +PYLOAD="../pyload" #: Check pyload directory clonedigger -o cpd.xml --cpd-output --fast --ignore-dir="remote" ${PYLOAD} diff --git a/tests/code_analysis.sh b/tests/code_analysis.sh index cba614929..aaf6bb6a4 100644 --- a/tests/code_analysis.sh +++ b/tests/code_analysis.sh @@ -1,6 +1,6 @@ #!/bin/sh -PYLOAD="../pyload" # Check pyload directory +PYLOAD="../pyload" #: Check pyload directory echo "Running sloccount ..." REPORT="sloccount.sc" diff --git a/tests/test_api.py b/tests/test_api.py index 13e783d54..1e02d8aa3 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -20,5 +20,5 @@ class TestApi(object): @nottest def test_random(self): - for _i in range(0, 100): + for _i in xrange(0, 100): self.api.testAPI() |