diff options
author | Walter Purcaro <vuolter@users.noreply.github.com> | 2015-04-20 22:56:34 +0200 |
---|---|---|
committer | Walter Purcaro <vuolter@users.noreply.github.com> | 2015-04-20 23:02:08 +0200 |
commit | 892b7cbd4981b764bed30b2ccc5ca73d791c39f2 (patch) | |
tree | 1f5f142ad7e21de9e88f0c15957bbc7e03a9d1ff | |
parent | Spare code cosmetics (8) (diff) | |
download | pyload-892b7cbd4981b764bed30b2ccc5ca73d791c39f2.tar.xz |
Spare code cosmetics (9)
46 files changed, 126 insertions, 126 deletions
diff --git a/docs/write_plugins.rst b/docs/write_plugins.rst index af35a8d55..8d42aea7f 100644 --- a/docs/write_plugins.rst +++ b/docs/write_plugins.rst @@ -51,7 +51,7 @@ An example ``process`` function could look like this :: """ def process(self, pyfile): - html = self.load(pyfile.url) # load the content of the orginal pyfile.url to html + html = self.load(pyfile.url) #: load the content of the orginal pyfile.url to html # parse the name from the site and set attribute in pyfile pyfile.name = self.myFunctionToParseTheName(html) diff --git a/pyload/Core.py b/pyload/Core.py index ae6bde4a9..e833314f0 100755 --- a/pyload/Core.py +++ b/pyload/Core.py @@ -198,7 +198,7 @@ class Core(object): if not pid or os.name == "nt": return False try: - os.kill(pid, 0) # 0 - default signal (does nothing) + os.kill(pid, 0) #: 0 - default signal (does nothing) except Exception: return 0 diff --git a/pyload/api/__init__.py b/pyload/api/__init__.py index 1c292d541..442e9ef95 100644 --- a/pyload/api/__init__.py +++ b/pyload/api/__init__.py @@ -51,20 +51,20 @@ urlmatcher = re.compile(r"((https?|ftps?|xdcc|sftp):((//)|(\\\\))+[\w\d:#@%/;$() class PERMS(object): - ALL = 0 # requires no permission, but login - ADD = 1 # can add packages - DELETE = 2 # can delete packages - STATUS = 4 # see and change server status - LIST = 16 # see queue and collector - MODIFY = 32 # moddify some attribute of downloads - DOWNLOAD = 64 # can download from webinterface - SETTINGS = 128 # can access settings - ACCOUNTS = 256 # can access accounts - LOGS = 512 # can see server logs + ALL = 0 #: requires no permission, but login + ADD = 1 #: can add packages + DELETE = 2 #: can delete packages + STATUS = 4 #: see and change server status + LIST = 16 #: see queue and collector + MODIFY = 32 #: moddify some attribute of downloads + DOWNLOAD = 64 #: can download from webinterface + SETTINGS = 128 #: can access settings + ACCOUNTS = 256 #: can access accounts + LOGS = 512 #: can see server logs class ROLE(object): - ADMIN = 0 # admin has all permissions implicit + ADMIN = 0 #: admin has all permissions implicit USER = 1 @@ -86,7 +86,7 @@ class Api(Iface): These can be configured via webinterface. Admin user have all permissions, and are the only ones who can access the methods with no specific permission. """ - EXTERNAL = Iface # let the json api know which methods are external + EXTERNAL = Iface #: let the json api know which methods are external def __init__(self, core): @@ -149,7 +149,7 @@ class Api(Iface): self.core.addonManager.dispatchEvent("config-changed", category, option, value, section) if section == "core": self.core.config[category][option] = value - if option in ("limit_speed", "max_speed"): # not so nice to update the limit + if option in ("limit_speed", "max_speed"): #: not so nice to update the limit self.core.requestFactory.updateBucket() elif section == "plugin": self.core.config.setPlugin(category, option, value) @@ -232,7 +232,7 @@ class Api(Iface): not self.core.threadManager.pause and self.isTimeDownload(), self.core.config.get("reconnect", "activated") and self.isTimeReconnect()) for pyfile in [x.active for x in self.core.threadManager.threads if x.active and isinstance(x.active, PyFile)]: - serverStatus.speed += pyfile.getSpeed() # bytes/s + serverStatus.speed += pyfile.getSpeed() #: bytes/s return serverStatus @@ -776,7 +776,7 @@ class Api(Iface): order = {} for pid in packs: pack = self.core.files.getPackageData(int(pid)) - while pack['order'] in order.keys(): # just in case + while pack['order'] in order.keys(): #: just in case pack['order'] += 1 order[pack['order']] = pack['id'] return order @@ -792,7 +792,7 @@ class Api(Iface): rawdata = self.core.files.getPackageData(int(pid)) order = {} for id, pyfile in rawdata['links'].iteritems(): - while pyfile['order'] in order.keys(): # just in case + while pyfile['order'] in order.keys(): #: just in case pyfile['order'] += 1 order[pyfile['order']] = pyfile['id'] return order diff --git a/pyload/cli/Cli.py b/pyload/cli/Cli.py index cd3252220..fc5236ff9 100644 --- a/pyload/cli/Cli.py +++ b/pyload/cli/Cli.py @@ -55,8 +55,8 @@ class Cli(object): self.lock = Lock() # processor funcions, these will be changed dynamically depending on control flow - self.headerHandler = self # the download status - self.bodyHandler = self # the menu section + self.headerHandler = self #: the download status + self.bodyHandler = self #: the menu section self.inputHandler = self os.system("clear") @@ -158,7 +158,7 @@ class Cli(object): line += 1 for download in data: - if download.status == 12: # downloading + if download.status == 12: #: downloading percent = download.percent z = percent / 4 speed += download.speed @@ -265,7 +265,7 @@ class Cli(object): print "No downloads running." for download in files: - if download.status == 12: # downloading + if download.status == 12: #: downloading print print_status(download) print "\tDownloading: %s @ %s/s\t %s (%s%%)" % ( download.format_eta, formatSize(download.speed), formatSize(download.size - download.bleft), diff --git a/pyload/cli/ManageFiles.py b/pyload/cli/ManageFiles.py index 2e7f725dd..3833b2c48 100644 --- a/pyload/cli/ManageFiles.py +++ b/pyload/cli/ManageFiles.py @@ -15,9 +15,9 @@ class ManageFiles(Handler): def init(self): self.target = Destination.Queue - self.pos = 0 # position in queue - self.package = -1 # choosen package - self.mode = "" # move/delete/restart + self.pos = 0 #: position in queue + self.package = -1 #: choosen package + self.mode = "" #: move/delete/restart self.cache = None self.links = None diff --git a/pyload/database/File.py b/pyload/database/File.py index a49ba6d3a..3e930ebcc 100644 --- a/pyload/database/File.py +++ b/pyload/database/File.py @@ -28,8 +28,8 @@ class FileHandler(object): _("temp. offline"), _("starting"), _("failed"), _("aborted"), _("decrypting"), _("custom"), _("downloading"), _("processing"), _("unknown")] - self.cache = {} # holds instances for files - self.packageCache = {} # same for packages + self.cache = {} #: holds instances for files + self.packageCache = {} #: same for packages #@TODO: purge the cache self.jobCache = {} @@ -37,9 +37,9 @@ class FileHandler(object): self.lock = RLock() #@TODO: should be a Lock w/o R # self.lock._Verbose__verbose = True - self.filecount = -1 # if an invalid value is set get current value from db - self.queuecount = -1 # number of package to be loaded - self.unchanged = False # determines if any changes was made since last call + self.filecount = -1 #: if an invalid value is set get current value from db + self.queuecount = -1 #: number of package to be loaded + self.unchanged = False #: determines if any changes was made since last call self.db = self.core.db @@ -339,7 +339,7 @@ class FileHandler(object): pyfile = self.getFile(self.jobCache[occ].pop()) else: - self.jobCache = {} # better not caching to much + self.jobCache = {} #: better not caching to much jobs = self.db.getJob(occ) jobs.reverse() self.jobCache[occ] = jobs @@ -763,7 +763,7 @@ class FileMethods(object): 'queue': r[5], 'order': r[6], 'sizetotal': int(r[7]), - 'sizedone': r[8] if r[8] else 0, # these can be None + 'sizedone': r[8] if r[8] else 0, #: these can be None 'linksdone': r[9] if r[9] else 0, 'linkstotal': r[10], 'links': {} @@ -923,7 +923,7 @@ class FileMethods(object): """return pyfile ids, which are suitable for download and dont use a occupied plugin""" #@TODO: improve this hardcoded method - pre = "('CCF', 'DLC', 'LinkList', 'RSDF', 'TXT')" # plugins which are processed in collector + pre = "('CCF', 'DLC', 'LinkList', 'RSDF', 'TXT')" #: plugins which are processed in collector cmd = "(" for i, item in enumerate(occ): @@ -934,7 +934,7 @@ class FileMethods(object): cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE ((p.queue=1 AND l.plugin NOT IN %s) OR l.plugin IN %s) AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % (cmd, pre) - self.c.execute(cmd) # very bad! + self.c.execute(cmd) #: very bad! return [x[0] for x in self.c] @@ -944,7 +944,7 @@ class FileMethods(object): """returns pyfile ids with suited plugins""" cmd = "SELECT l.id FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE l.plugin IN %s AND l.status IN (2, 3, 14) ORDER BY p.packageorder ASC, l.linkorder ASC LIMIT 5" % plugins - self.c.execute(cmd) # very bad! + self.c.execute(cmd) #: very bad! return [x[0] for x in self.c] diff --git a/pyload/datatype/File.py b/pyload/datatype/File.py index 0b6b5de46..05d515fd0 100644 --- a/pyload/datatype/File.py +++ b/pyload/datatype/File.py @@ -50,7 +50,7 @@ class PyFile(object): self.size = size self.status = status self.plugintype, self.pluginname = plugin - self.packageid = package # should not be used, use package() instead + self.packageid = package #: should not be used, use package() instead self.error = error self.order = order # database information ends here @@ -60,10 +60,10 @@ class PyFile(object): self.plugin = None # self.download = None - self.waitUntil = 0 # time() + time to wait + self.waitUntil = 0 #: time() + time to wait # status attributes - self.active = False # obsolete? + self.active = False #: obsolete? self.abort = False self.reconnected = False diff --git a/pyload/manager/Addon.py b/pyload/manager/Addon.py index a46b99b2e..2a3bc4318 100644 --- a/pyload/manager/Addon.py +++ b/pyload/manager/Addon.py @@ -89,7 +89,7 @@ class AddonManager(object): def callRPC(self, plugin, func, args, parse): if not args: - args = tuple() + args = () if parse: args = tuple([literal_eval(x) for x in args]) plugin = self.pluginMap[plugin] diff --git a/pyload/manager/Captcha.py b/pyload/manager/Captcha.py index 4a7582d65..ab9f79b37 100644 --- a/pyload/manager/Captcha.py +++ b/pyload/manager/Captcha.py @@ -13,8 +13,8 @@ class CaptchaManager(object): def __init__(self, core): self.lock = Lock() self.core = core - self.tasks = [] # task store, for outgoing tasks only - self.ids = 0 # only for internal purpose + self.tasks = [] #: task store, for outgoing tasks only + self.ids = 0 #: only for internal purpose def newTask(self, img, format, file, result_type): @@ -43,7 +43,7 @@ class CaptchaManager(object): def getTaskByID(self, tid): self.lock.acquire() for task in self.tasks: - if task.id == str(tid): # task ids are strings + if task.id == str(tid): #: task ids are strings self.lock.release() return task self.lock.release() @@ -81,9 +81,9 @@ class CaptchaTask(object): self.handler = [] #: the hook plugins that will take care of the solution self.result = None self.waitUntil = None - self.error = None # error message + self.error = None #: error message self.status = "init" - self.data = {} # handler can store data here + self.data = {} #: handler can store data here def getCaptcha(self): diff --git a/pyload/manager/Plugin.py b/pyload/manager/Plugin.py index 9417a6935..118dea8f0 100644 --- a/pyload/manager/Plugin.py +++ b/pyload/manager/Plugin.py @@ -168,7 +168,7 @@ class PluginManager(object): except Exception: self.core.log.error("Invalid config in %s: %s" % (name, config)) - elif folder in ("addon", "hook"): # force config creation + elif folder in ("addon", "hook"): #: force config creation desc = self.DESC.findall(content) desc = desc[0][1] if desc else "" config = (["activated", "bool", "Activated", False],) @@ -310,9 +310,9 @@ class PluginManager(object): def find_module(self, fullname, path=None): # redirecting imports if necesarry - if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): # seperate pyload plugins + if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #: seperate pyload plugins if fullname.startswith(self.USERROOT): user = 1 - else: user = 0 # used as bool and int + else: user = 0 #: used as bool and int split = fullname.split(".") if len(split) != 4 - user: @@ -329,7 +329,7 @@ class PluginManager(object): def load_module(self, name, replace=True): - if name not in sys.modules: # could be already in modules + if name not in sys.modules: #: could be already in modules if replace: if self.ROOT in name: newname = name.replace(self.ROOT, self.USERROOT) diff --git a/pyload/manager/Thread.py b/pyload/manager/Thread.py index 782cf7b2a..a2a64c38d 100644 --- a/pyload/manager/Thread.py +++ b/pyload/manager/Thread.py @@ -33,7 +33,7 @@ class ThreadManager(object): self.reconnecting = Event() self.reconnecting.clear() - self.downloaded = 0 # number of files downloaded since last cleanup + self.downloaded = 0 #: number of files downloaded since last cleanup self.lock = Lock() diff --git a/pyload/manager/thread/Download.py b/pyload/manager/thread/Download.py index 21db61ca4..293014a2e 100644 --- a/pyload/manager/thread/Download.py +++ b/pyload/manager/thread/Download.py @@ -39,7 +39,7 @@ class DownloadThread(PluginThread): while True: del pyfile - self.active = False # sets the thread inactive when it is ready to get next job + self.active = False #: sets the thread inactive when it is ready to get next job self.active = self.queue.get() pyfile = self.active diff --git a/pyload/manager/thread/Info.py b/pyload/manager/thread/Info.py index 28a2e8e91..9d8a3ef5b 100644 --- a/pyload/manager/thread/Info.py +++ b/pyload/manager/thread/Info.py @@ -26,13 +26,13 @@ class InfoThread(PluginThread): PluginThread.__init__(self, manager) self.data = data - self.pid = pid # package id + self.pid = pid #: package id # [ .. (name, plugin) .. ] - self.rid = rid # result id - self.add = add # add packages instead of return result + self.rid = rid #: result id + self.add = add #: add packages instead of return result - self.cache = [] # accumulated data + self.cache = [] #: accumulated data self.start() @@ -83,7 +83,7 @@ class InfoThread(PluginThread): # empty cache del self.cache[:] - else: # post the results + else: #: post the results for name, url in container: # attach container content @@ -154,8 +154,8 @@ class InfoThread(PluginThread): def fetchForPlugin(self, pluginname, plugin, urls, cb, err=None): try: - result = [] # result loaded from cache - process = [] # urls to process + result = [] #: result loaded from cache + process = [] #: urls to process for url in urls: if url in self.m.infoCache: result.append(self.m.infoCache[url]) diff --git a/pyload/manager/thread/Plugin.py b/pyload/manager/thread/Plugin.py index 348f005a5..d8319a2ce 100644 --- a/pyload/manager/thread/Plugin.py +++ b/pyload/manager/thread/Plugin.py @@ -129,5 +129,5 @@ class PluginThread(Thread): def clean(self, pyfile): """ set thread unactive and release pyfile """ - self.active = True #release pyfile but lets the thread active + self.active = True #: release pyfile but lets the thread active pyfile.release() diff --git a/pyload/network/HTTPDownload.py b/pyload/network/HTTPDownload.py index 90169571c..1e74d4476 100644 --- a/pyload/network/HTTPDownload.py +++ b/pyload/network/HTTPDownload.py @@ -24,11 +24,11 @@ class HTTPDownload(object): def __init__(self, url, filename, get={}, post={}, referer=None, cj=None, bucket=None, options={}, progress=None, disposition=False): self.url = url - self.filename = filename #complete file destination, not only name + self.filename = filename #: complete file destination, not only name self.get = get self.post = post self.referer = referer - self.cj = cj #cookiejar if cookies are needed + self.cj = cj #: cookiejar if cookies are needed self.bucket = bucket self.options = options self.disposition = disposition @@ -151,7 +151,7 @@ class HTTPDownload(object): lastFinishCheck = 0 lastTimeCheck = 0 - chunksDone = set() # list of curl handles that are finished + chunksDone = set() #: list of curl handles that are finished chunksCreated = False done = False if self.info.getCount() is 0: #: This is a resume, if we were chunked originally assume still can @@ -254,7 +254,7 @@ class HTTPDownload(object): if len(chunksDone) >= len(self.chunks): if len(chunksDone) > len(self.chunks): self.log.warning("Finished download chunks size incorrect, please report bug.") - done = True # all chunks loaded + done = True #: all chunks loaded break diff --git a/pyload/network/HTTPRequest.py b/pyload/network/HTTPRequest.py index 1c645f898..92ce6ec4b 100644 --- a/pyload/network/HTTPRequest.py +++ b/pyload/network/HTTPRequest.py @@ -41,16 +41,16 @@ class HTTPRequest(object): self.c = pycurl.Curl() self.rep = StringIO() - self.cj = cookies # cookiejar + self.cj = cookies #: cookiejar self.lastURL = None self.lastEffectiveURL = None self.abort = False - self.code = 0 # last http code + self.code = 0 #: last http code self.header = "" - self.headers = [] # temporary request header + self.headers = [] #: temporary request header self.initHandle() self.setInterface(options) @@ -158,7 +158,7 @@ class HTTPRequest(object): self.c.setopt(pycurl.POST, 1) if not multipart: if type(post) == unicode: - post = str(post) # unicode not allowed + post = str(post) #: unicode not allowed elif type(post) == str: pass else: @@ -250,7 +250,7 @@ class HTTPRequest(object): def decodeResponse(self, rep): """ decode with correct encoding, relies on header """ header = self.header.splitlines() - encoding = "utf8" # default encoding + encoding = "utf8" #: default encoding for line in header: line = line.lower().replace(" ", "") diff --git a/pyload/plugin/Account.py b/pyload/plugin/Account.py index f025170b3..fda76a78f 100644 --- a/pyload/plugin/Account.py +++ b/pyload/plugin/Account.py @@ -96,7 +96,7 @@ class Account(Base): req.cj.clear() req.close() if user in self.infos: - del self.infos[user] # delete old information + del self.infos[user] #: delete old information return self._login(user, self.accounts[user]) @@ -112,7 +112,7 @@ class Account(Base): """ updates account and return true if anything changed """ if user in self.accounts: - self.accounts[user]['valid'] = True # do not remove or accounts will not login + self.accounts[user]['valid'] = True #: do not remove or accounts will not login if password: self.accounts[user]['password'] = password self.relogin(user) diff --git a/pyload/plugin/Addon.py b/pyload/plugin/Addon.py index 43de6732a..e8cc03de2 100644 --- a/pyload/plugin/Addon.py +++ b/pyload/plugin/Addon.py @@ -112,7 +112,7 @@ class Addon(Base): self.unload() - def unload(self): # Deprecated, use method deactivate() instead + def unload(self): #: Deprecated, use method deactivate() instead pass @@ -130,7 +130,7 @@ class Addon(Base): self.coreReady() - def coreReady(self): # Deprecated, use method activate() instead + def coreReady(self): #: Deprecated, use method activate() instead pass @@ -140,7 +140,7 @@ class Addon(Base): self.coreExiting() - def coreExiting(self): # Deprecated, use method exit() instead + def coreExiting(self): #: Deprecated, use method exit() instead pass diff --git a/pyload/plugin/Plugin.py b/pyload/plugin/Plugin.py index 0604547a3..8b7e6fbc2 100644 --- a/pyload/plugin/Plugin.py +++ b/pyload/plugin/Plugin.py @@ -220,10 +220,10 @@ class Plugin(Base): self.user, data = self.account.selectAccount() #: Browser instance, see `network.Browser` self.req = self.account.getAccountRequest(self.user) - self.chunkLimit = -1 # chunk limit, -1 for unlimited + self.chunkLimit = -1 #: chunk limit, -1 for unlimited #: enables resume (will be ignored if server dont accept chunks) self.resumeDownload = True - self.multiDL = True # every hoster with account should provide multiple downloads + self.multiDL = True #: every hoster with account should provide multiple downloads #: premium status self.premium = self.account.isPremium(self.user) else: @@ -232,7 +232,7 @@ class Plugin(Base): #: associated pyfile instance, see `PyFile` self.pyfile = pyfile - self.thread = None # holds thread in future + self.thread = None #: holds thread in future #: location where the last call to download was saved self.lastDownload = "" @@ -500,7 +500,7 @@ class Plugin(Base): captchaManager.removeTask(task) - if task.error and has_plugin: # ignore default error message since the user could use OCR + if task.error and has_plugin: #: ignore default error message since the user could use OCR self.fail(_("Pil and tesseract not installed and no Client connected for captcha decrypting")) elif task.error: self.fail(task.error) @@ -740,9 +740,9 @@ class Plugin(Base): for pyfile in self.core.files.cache.values(): if pyfile != self.pyfile and pyfile.name == self.pyfile.name and pyfile.package().folder == pack.folder: - if pyfile.status in (0, 12): # finished or downloading + if pyfile.status in (0, 12): #: finished or downloading raise SkipDownload(pyfile.pluginname) - elif pyfile.status in (5, 7) and starting: # a download is waiting/starting and was appenrently started before + elif pyfile.status in (5, 7) and starting: #: a download is waiting/starting and was appenrently started before raise SkipDownload(pyfile.pluginname) download_folder = self.core.config.get("general", "download_folder") diff --git a/pyload/plugin/account/FastshareCz.py b/pyload/plugin/account/FastshareCz.py index 518746710..f1ed9d634 100644 --- a/pyload/plugin/account/FastshareCz.py +++ b/pyload/plugin/account/FastshareCz.py @@ -41,7 +41,7 @@ class FastshareCz(Account): def login(self, user, data, req): req.cj.setCookie("fastshare.cz", "lang", "en") - req.load('http://www.fastshare.cz/login') # Do not remove or it will not login + req.load('http://www.fastshare.cz/login') #: Do not remove or it will not login html = req.load("http://www.fastshare.cz/sql.php", post={'login': user, 'heslo': data['password']}, diff --git a/pyload/plugin/account/FilecloudIo.py b/pyload/plugin/account/FilecloudIo.py index 12e2d44f5..b07fe981a 100644 --- a/pyload/plugin/account/FilecloudIo.py +++ b/pyload/plugin/account/FilecloudIo.py @@ -30,7 +30,7 @@ class FilecloudIo(Account): return {"premium": False} akey = rep['akey'] - self.accounts[user]['akey'] = akey # Saved for hoster plugin + self.accounts[user]['akey'] = akey #: Saved for hoster plugin rep = req.load("http://api.filecloud.io/api-fetch_account_details.api", post={"akey": akey}) rep = json_loads(rep) diff --git a/pyload/plugin/crypter/DataHu.py b/pyload/plugin/crypter/DataHu.py index dd817b5ce..ce480dacb 100644 --- a/pyload/plugin/crypter/DataHu.py +++ b/pyload/plugin/crypter/DataHu.py @@ -28,7 +28,7 @@ class DataHu(SimpleCrypter): def prepare(self): super(DataHu, self).prepare() - if u'K\xe9rlek add meg a jelsz\xf3t' in self.html: # Password protected + if u'K\xe9rlek add meg a jelsz\xf3t' in self.html: #: Password protected password = self.getPassword() if not password: self.fail(_("Password required")) @@ -37,5 +37,5 @@ class DataHu(SimpleCrypter): self.html = self.load(self.pyfile.url, post={'mappa_pass': password}, decode=True) - if u'Hib\xe1s jelsz\xf3' in self.html: # Wrong password + if u'Hib\xe1s jelsz\xf3' in self.html: #: Wrong password self.fail(_("Wrong password")) diff --git a/pyload/plugin/crypter/LinkCryptWs.py b/pyload/plugin/crypter/LinkCryptWs.py index b26d6830d..c997cbf9f 100644 --- a/pyload/plugin/crypter/LinkCryptWs.py +++ b/pyload/plugin/crypter/LinkCryptWs.py @@ -238,7 +238,7 @@ class LinkCryptWs(Crypter): self.logDebug('Search for %s Container links' % type.upper()) - if not type.isalnum(): # check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric) + if not type.isalnum(): #: check to prevent broken re-pattern (cnl2,rsdf,ccf,dlc,web are all alpha-numeric) self.fail(_("Unknown container type: %s") % type) #@TODO: Replace with self.error in 0.4.10 for line in self.container_html: diff --git a/pyload/plugin/crypter/NCryptIn.py b/pyload/plugin/crypter/NCryptIn.py index d59fbd6a9..bc9702f21 100644 --- a/pyload/plugin/crypter/NCryptIn.py +++ b/pyload/plugin/crypter/NCryptIn.py @@ -69,7 +69,7 @@ class NCryptIn(Crypter): # Extract package links for link_source_type in self.links_source_order: package_links.extend(self.handleLinkSource(link_source_type)) - if package_links: # use only first source which provides links + if package_links: #: use only first source which provides links break package_links = set(package_links) diff --git a/pyload/plugin/crypter/RelinkUs.py b/pyload/plugin/crypter/RelinkUs.py index 6296e9f40..2b9a85401 100644 --- a/pyload/plugin/crypter/RelinkUs.py +++ b/pyload/plugin/crypter/RelinkUs.py @@ -89,7 +89,7 @@ class RelinkUs(Crypter): package_links = [] for sources in self.PREFERRED_LINK_SOURCES: package_links.extend(self.handleLinkSource(sources)) - if package_links: # use only first source which provides links + if package_links: #: use only first source which provides links break package_links = set(package_links) diff --git a/pyload/plugin/crypter/YoutubeComFolder.py b/pyload/plugin/crypter/YoutubeComFolder.py index 84277207a..220c1dfbb 100644 --- a/pyload/plugin/crypter/YoutubeComFolder.py +++ b/pyload/plugin/crypter/YoutubeComFolder.py @@ -43,7 +43,7 @@ class YoutubeComFolder(Crypter): return {"id": channel['id'], "title": channel['snippet']['title'], "relatedPlaylists": channel['contentDetails']['relatedPlaylists'], - "user": user} # One lone channel for user? + "user": user} #: One lone channel for user? def getPlaylist(self, p_id): diff --git a/pyload/plugin/hook/MegaDebridEu.py b/pyload/plugin/hook/MegaDebridEu.py index a069cbcdd..41abce37b 100644 --- a/pyload/plugin/hook/MegaDebridEu.py +++ b/pyload/plugin/hook/MegaDebridEu.py @@ -28,6 +28,6 @@ class MegaDebridEu(MultiHook): host_list = [element[0] for element in json_data['hosters']] else: self.logError(_("Unable to retrieve hoster list")) - host_list = list() + host_list = [] return host_list diff --git a/pyload/plugin/hoster/FileserveCom.py b/pyload/plugin/hoster/FileserveCom.py index 686bff101..4530f8ff8 100644 --- a/pyload/plugin/hoster/FileserveCom.py +++ b/pyload/plugin/hoster/FileserveCom.py @@ -134,7 +134,7 @@ class FileserveCom(Hoster): self.wait() self.retry() - self.thread.m.reconnecting.wait(3) # Ease issue with later downloads appearing to be in parallel + self.thread.m.reconnecting.wait(3) #: Ease issue with later downloads appearing to be in parallel def doTimmer(self): diff --git a/pyload/plugin/hoster/FreakshareCom.py b/pyload/plugin/hoster/FreakshareCom.py index 1b1d25826..6cf447128 100644 --- a/pyload/plugin/hoster/FreakshareCom.py +++ b/pyload/plugin/hoster/FreakshareCom.py @@ -87,7 +87,7 @@ class FreakshareCom(Hoster): def download_html(self): - self.load("http://freakshare.com/index.php", {"language": "EN"}) # Set english language in server session + self.load("http://freakshare.com/index.php", {"language": "EN"}) #: Set english language in server session self.html = self.load(self.pyfile.url) @@ -97,7 +97,7 @@ class FreakshareCom(Hoster): if not self.html: self.download_html() if not self.wantReconnect: - self.req_opts = self.get_download_options() # get the Post options for the Request + self.req_opts = self.get_download_options() #: get the Post options for the Request # file_url = self.pyfile.url # return file_url else: @@ -163,11 +163,11 @@ class FreakshareCom(Hoster): def get_download_options(self): re_envelope = re.search(r".*?value=\"Free\sDownload\".*?\n*?(.*?<.*?>\n*)*?\n*\s*?</form>", - self.html).group(0) # get the whole request + self.html).group(0) #: get the whole request to_sort = re.findall(r"<input\stype=\"hidden\"\svalue=\"(.*?)\"\sname=\"(.*?)\"\s\/>", re_envelope) request_options = dict((n, v) for (v, n) in to_sort) - herewego = self.load(self.pyfile.url, None, request_options) # the actual download-Page + herewego = self.load(self.pyfile.url, None, request_options) #: the actual download-Page to_sort = re.findall(r"<input\stype=\".*?\"\svalue=\"(\S*?)\".*?name=\"(\S*?)\"\s.*?\/>", herewego) request_options = dict((n, v) for (v, n) in to_sort) diff --git a/pyload/plugin/hoster/Ftp.py b/pyload/plugin/hoster/Ftp.py index 4f6a01d22..86049df04 100644 --- a/pyload/plugin/hoster/Ftp.py +++ b/pyload/plugin/hoster/Ftp.py @@ -69,7 +69,7 @@ class Ftp(Hoster): pyfile.url = pyfile.url.rstrip('/') pkgname = "/".join(pyfile.package().name, urlparse(pyfile.url).path.rpartition('/')[2]) pyfile.url += '/' - self.req.http.c.setopt(48, 1) # CURLOPT_DIRLISTONLY + self.req.http.c.setopt(48, 1) #: CURLOPT_DIRLISTONLY res = self.load(pyfile.url, decode=False) links = [pyfile.url + quote(x) for x in res.splitlines()] self.logDebug("LINKS", links) diff --git a/pyload/plugin/hoster/MegaCoNz.py b/pyload/plugin/hoster/MegaCoNz.py index 496d4503f..9dea99b23 100644 --- a/pyload/plugin/hoster/MegaCoNz.py +++ b/pyload/plugin/hoster/MegaCoNz.py @@ -126,8 +126,8 @@ class MegaCoNz(Hoster): except IOError, e: self.fail(e) - chunk_size = 2 ** 15 # buffer size, 32k - # file_mac = [0, 0, 0, 0] # calculate CBC-MAC for checksum + chunk_size = 2 ** 15 #: buffer size, 32k + # file_mac = [0, 0, 0, 0] #: calculate CBC-MAC for checksum chunks = os.path.getsize(file_crypted) / chunk_size + 1 for i in xrange(chunks): diff --git a/pyload/plugin/hoster/NetloadIn.py b/pyload/plugin/hoster/NetloadIn.py index a3d0b44b1..f4421615f 100644 --- a/pyload/plugin/hoster/NetloadIn.py +++ b/pyload/plugin/hoster/NetloadIn.py @@ -152,9 +152,9 @@ class NetloadIn(Hoster): if self.api_data['status'] == "online": self.api_data['checksum'] = lines[4].strip() else: - self.api_data = False # check manually since api data is useless sometimes + self.api_data = False #: check manually since api data is useless sometimes - if lines[0] == lines[1] and lines[2] == "0": # useless api data + if lines[0] == lines[1] and lines[2] == "0": #: useless api data self.api_data = False else: self.api_data = False diff --git a/pyload/plugin/hoster/OboomCom.py b/pyload/plugin/hoster/OboomCom.py index c37e89339..5b9b11485 100644 --- a/pyload/plugin/hoster/OboomCom.py +++ b/pyload/plugin/hoster/OboomCom.py @@ -99,7 +99,7 @@ class OboomCom(Hoster): self.retry(5, 15 * 60, _("Service unavailable")) elif result[0] == 403: - if result[1] == -1: # another download is running + if result[1] == -1: #: another download is running self.setWait(15 * 60) else: self.setWait(result[1], True) diff --git a/pyload/plugin/hoster/PornhostCom.py b/pyload/plugin/hoster/PornhostCom.py index f6a63117c..103882166 100644 --- a/pyload/plugin/hoster/PornhostCom.py +++ b/pyload/plugin/hoster/PornhostCom.py @@ -47,7 +47,7 @@ class PornhostCom(Hoster): url = re.search(r'width: 894px; height: 675px">.*?<img src="(.*?)"', self.html) if url is None: url = re.search(r'"http://file\d+\.pornhost\.com/\d+/.*?"', - self.html) # TODO: fix this one since it doesn't match + self.html) #: TODO: fix this one since it doesn't match return url.group(1).strip() diff --git a/pyload/plugin/hoster/PremiumizeMe.py b/pyload/plugin/hoster/PremiumizeMe.py index 809d27624..f577da90e 100644 --- a/pyload/plugin/hoster/PremiumizeMe.py +++ b/pyload/plugin/hoster/PremiumizeMe.py @@ -20,7 +20,7 @@ class PremiumizeMe(MultiHoster): def handlePremium(self, pyfile): # In some cases hostsers do not supply us with a filename at download, so we # are going to set a fall back filename (e.g. for freakshare or xfileshare) - pyfile.name = pyfile.name.split('/').pop() # Remove everthing before last slash + pyfile.name = pyfile.name.split('/').pop() #: Remove everthing before last slash # Correction for automatic assigned filename: Removing html at end if needed suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"] diff --git a/pyload/plugin/hoster/RPNetBiz.py b/pyload/plugin/hoster/RPNetBiz.py index 6788eebce..dc11eefb2 100644 --- a/pyload/plugin/hoster/RPNetBiz.py +++ b/pyload/plugin/hoster/RPNetBiz.py @@ -34,12 +34,12 @@ class RPNetBiz(MultiHoster): "links" : pyfile.url}) self.logDebug("JSON data: %s" % res) - link_status = json_loads(res)['links'][0] # get the first link... since we only queried one + link_status = json_loads(res)['links'][0] #: get the first link... since we only queried one # Check if we only have an id as a HDD link if 'id' in link_status: self.logDebug("Need to wait at least 30 seconds before requery") - self.setWait(30) # wait for 30 seconds + self.setWait(30) #: wait for 30 seconds self.wait() # Lets query the server again asking for the status on the link, # we need to keep doing this until we reach 100 @@ -66,7 +66,7 @@ class RPNetBiz(MultiHoster): self.wait() my_try += 1 - if my_try > max_tries: # We went over the limit! + if my_try > max_tries: #: We went over the limit! self.fail(_("Waited for about 15 minutes for download to finish but failed")) if 'generated' in link_status: diff --git a/pyload/plugin/hoster/SmoozedCom.py b/pyload/plugin/hoster/SmoozedCom.py index 1ed3a539d..f216a95bc 100644 --- a/pyload/plugin/hoster/SmoozedCom.py +++ b/pyload/plugin/hoster/SmoozedCom.py @@ -20,7 +20,7 @@ class SmoozedCom(MultiHoster): def handlePremium(self, pyfile): # In some cases hostsers do not supply us with a filename at download, so we # are going to set a fall back filename (e.g. for freakshare or xfileshare) - pyfile.name = pyfile.name.split('/').pop() # Remove everthing before last slash + pyfile.name = pyfile.name.split('/').pop() #: Remove everthing before last slash # Correction for automatic assigned filename: Removing html at end if needed suffix_to_remove = ["html", "htm", "php", "php3", "asp", "shtm", "shtml", "cfml", "cfm"] diff --git a/pyload/plugin/hoster/UlozTo.py b/pyload/plugin/hoster/UlozTo.py index b2e31dccf..49d5d2ac1 100644 --- a/pyload/plugin/hoster/UlozTo.py +++ b/pyload/plugin/hoster/UlozTo.py @@ -123,7 +123,7 @@ class UlozTo(SimpleHoster): "wrong_captcha": re.compile(r'<ul class="error">\s*<li>Error rewriting the text.</li>'), "offline" : re.compile(self.OFFLINE_PATTERN), "passwd" : self.PASSWD_PATTERN, - "server_error" : 'src="http://img.ulozto.cz/error403/vykricnik.jpg"', # paralell dl, server overload etc. + "server_error" : 'src="http://img.ulozto.cz/error403/vykricnik.jpg"', #: paralell dl, server overload etc. "not_found" : "<title>Ulož.to</title>" }) diff --git a/pyload/plugin/hoster/UploadedTo.py b/pyload/plugin/hoster/UploadedTo.py index 487c01576..c8b201ec6 100644 --- a/pyload/plugin/hoster/UploadedTo.py +++ b/pyload/plugin/hoster/UploadedTo.py @@ -57,7 +57,7 @@ class UploadedTo(SimpleHoster): def setup(self): self.multiDL = self.resumeDownload = self.premium - self.chunkLimit = 1 # critical problems with more chunks + self.chunkLimit = 1 #: critical problems with more chunks def checkErrors(self): @@ -68,14 +68,14 @@ class UploadedTo(SimpleHoster): elif "limit-size" in self.html: self.fail(_("File too big for free download")) - elif "limit-slot" in self.html: # Temporary restriction so just wait a bit + elif "limit-slot" in self.html: #: Temporary restriction so just wait a bit self.wait(30 * 60, True) self.retry() elif "limit-parallel" in self.html: self.fail(_("Cannot download in parallel")) - elif "limit-dl" in self.html or self.DL_LIMIT_ERROR in self.html: # limit-dl + elif "limit-dl" in self.html or self.DL_LIMIT_ERROR in self.html: #: limit-dl self.wait(3 * 60 * 60, True) self.retry() diff --git a/pyload/plugin/hoster/Xdcc.py b/pyload/plugin/hoster/Xdcc.py index 42491404f..e1a4c72b2 100644 --- a/pyload/plugin/hoster/Xdcc.py +++ b/pyload/plugin/hoster/Xdcc.py @@ -29,7 +29,7 @@ class Xdcc(Hoster): def setup(self): - self.debug = 0 # 0,1,2 + self.debug = 0 #: 0,1,2 self.timeout = 30 self.multiDL = False @@ -62,7 +62,7 @@ class Xdcc(Hoster): def doDownload(self, url): - self.pyfile.setStatus("waiting") # real link + self.pyfile.setStatus("waiting") #: real link m = re.match(r'xdcc://(.*?)/#?(.*?)/(.*?)/#?(\d+)/?', url) server = m.group(1) @@ -89,7 +89,7 @@ class Xdcc(Hoster): sock = socket.socket() sock.connect((host, int(port))) if nick == "pyload": - nick = "pyload-%d" % (time.time() % 1000) # last 3 digits + nick = "pyload-%d" % (time.time() % 1000) #: last 3 digits sock.send("NICK %s\r\n" % nick) sock.send("USER %s %s bla :%s\r\n" % (ident, host, real)) @@ -161,7 +161,7 @@ class Xdcc(Hoster): self.logDebug("Sending CTCP TIME") sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time())) elif msg['text'] == "\x01LAG\x01": - pass # don't know how to answer + pass #: don't know how to answer if not (bot == msg['origin'][0:len(bot)] and nick == msg['target'][0:len(nick)] diff --git a/pyload/plugin/hoster/ZippyshareCom.py b/pyload/plugin/hoster/ZippyshareCom.py index dd78071c9..7f91c04e5 100644 --- a/pyload/plugin/hoster/ZippyshareCom.py +++ b/pyload/plugin/hoster/ZippyshareCom.py @@ -68,7 +68,7 @@ class ZippyshareCom(SimpleHoster): def replElementById(element): id = element.group(1) #: id might be either 'x' (a real id) or x (a variable) - attr = element.group(4) # attr might be None + attr = element.group(4) #: attr might be None varName = re.sub(r'-', '', 'GVAR[%s+"_%s"]' %(id, attr)) diff --git a/pyload/utils/__init__.py b/pyload/utils/__init__.py index 2e23bf99b..3d26983b5 100644 --- a/pyload/utils/__init__.py +++ b/pyload/utils/__init__.py @@ -63,7 +63,7 @@ def remove_chars(string, repl): def safe_filename(name): """ remove bad chars """ - name = unquote(name).encode('ascii', 'replace') # Non-ASCII chars usually breaks file saving. Replacing. + name = unquote(name).encode('ascii', 'replace') #: Non-ASCII chars usually breaks file saving. Replacing. if os.name == 'nt': return remove_chars(name, u'\00\01\02\03\04\05\06\07\10\11\12\13\14\15\16\17\20\21\22\23\24\25\26\27\30\31\32' u'\33\34\35\36\37/?%*|"<>') @@ -95,15 +95,15 @@ if sys.getfilesystemencoding().startswith('ANSI'): def fs_encode(string): return safe_filename(encode(string)) - fs_decode = decode # decode utf8 + fs_decode = decode #: decode utf8 else: - fs_encode = fs_decode = lambda x: x # do nothing + fs_encode = fs_decode = lambda x: x #: do nothing def get_console_encoding(enc): if os.name == "nt": - if enc == "cp65001": # aka UTF-8 + if enc == "cp65001": #: aka UTF-8 print "WARNING: Windows codepage 65001 is not supported." enc = "cp850" else: @@ -178,7 +178,7 @@ def uniqify(seq): #: Originally by Dave Kirby return [x for x in seq if x not in seen and not seen_add(x)] -def parseFileSize(string, unit=None): # returns bytes +def parseFileSize(string, unit=None): #: returns bytes if not unit: m = re.match(r"([\d.,]+) *([a-zA-Z]*)", string.strip().lower()) if m: @@ -244,7 +244,7 @@ def fixup(m): except KeyError: pass - return text # leave as is + return text #: leave as is def has_method(obj, name): diff --git a/pyload/utils/pylgettext.py b/pyload/utils/pylgettext.py index 86cfc586a..76bb268ec 100644 --- a/pyload/utils/pylgettext.py +++ b/pyload/utils/pylgettext.py @@ -37,7 +37,7 @@ def find(domain, localedir=None, languages=None, all=False): if _searchdirs is None: return origfind(domain, localedir, languages, all) searches = [localedir] + _searchdirs - results = list() + results = [] for dir in searches: res = origfind(domain, dir, languages, all) if all is False: diff --git a/pyload/webui/app/pyloadweb.py b/pyload/webui/app/pyloadweb.py index 705895310..7f2317bd1 100644 --- a/pyload/webui/app/pyloadweb.py +++ b/pyload/webui/app/pyloadweb.py @@ -429,7 +429,7 @@ def logs(item=-1): if item < 1 or type(item) is not int: item = 1 if len(log) - perpage + 1 < 1 else len(log) - perpage + 1 - if type(fro) is datetime: # we will search for datetime + if type(fro) is datetime: #: we will search for datetime item = -1 data = [] @@ -449,16 +449,16 @@ def logs(item=-1): level = '?' message = l if item == -1 and dtime is not None and fro <= dtime: - item = counter # found our datetime + item = counter #: found our datetime if item >= 0: data.append({'line': counter, 'date': date + " " + time, 'level': level, 'message': message}) perpagecheck += 1 - if fro is None and dtime is not None: # if fro not set set it to first showed line + if fro is None and dtime is not None: #: if fro not set set it to first showed line fro = dtime if perpagecheck >= perpage > 0: break - if fro is None: # still not set, empty log? + if fro is None: #: still not set, empty log? fro = datetime.now() if reversed: data.reverse() diff --git a/tests/clonedigger.sh b/tests/clonedigger.sh index 4c53eab0d..e7fd17eb6 100644 --- a/tests/clonedigger.sh +++ b/tests/clonedigger.sh @@ -1,4 +1,4 @@ #!/bin/sh -PYLOAD="../pyload" # Check pyload directory +PYLOAD="../pyload" #: Check pyload directory clonedigger -o cpd.xml --cpd-output --fast --ignore-dir="remote" ${PYLOAD} diff --git a/tests/code_analysis.sh b/tests/code_analysis.sh index cba614929..aaf6bb6a4 100644 --- a/tests/code_analysis.sh +++ b/tests/code_analysis.sh @@ -1,6 +1,6 @@ #!/bin/sh -PYLOAD="../pyload" # Check pyload directory +PYLOAD="../pyload" #: Check pyload directory echo "Running sloccount ..." REPORT="sloccount.sc" |