summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGravatar X3n0m0rph59 <X3n0m0rph59@googlemail.com> 2012-04-22 19:56:17 +0200
committerGravatar X3n0m0rph59 <X3n0m0rph59@googlemail.com> 2012-04-22 19:56:17 +0200
commitb40b32ee05f611323a7827fad2a25fa0a28dcb24 (patch)
tree60f7f00e4be25942230668f43cb11a30b6fd10e6
parentFixed spelling in the source (diff)
downloadpyload-b40b32ee05f611323a7827fad2a25fa0a28dcb24.tar.xz
a huge pile of spelling fixes
-rw-r--r--module/Api.py20
-rw-r--r--module/cli/ManageFiles.py6
-rw-r--r--module/common/APIExerciser.py4
-rw-r--r--module/common/packagetools.py4
-rw-r--r--module/config/ConfigParser.py20
-rw-r--r--module/database/DatabaseBackend.py6
-rw-r--r--module/database/FileDatabase.py12
-rw-r--r--module/interaction/EventManager.py16
-rw-r--r--module/interaction/InteractionManager.py4
-rw-r--r--module/network/Browser.py2
-rw-r--r--module/network/Bucket.py2
-rw-r--r--module/network/HTTPChunk.py8
-rw-r--r--module/network/HTTPDownload.py8
-rw-r--r--module/network/RequestFactory.py2
-rw-r--r--module/network/XDCCRequest.py2
-rw-r--r--module/plugins/Account.py32
-rw-r--r--module/plugins/Addon.py8
-rw-r--r--module/plugins/Base.py10
-rw-r--r--module/plugins/Crypter.py14
-rw-r--r--module/plugins/Hoster.py14
-rw-r--r--module/plugins/PluginManager.py4
-rw-r--r--module/plugins/addons/Ev0InFetcher.py4
-rw-r--r--module/plugins/hoster/NetloadIn.py4
-rw-r--r--module/plugins/internal/AbstractExtractor.py12
-rw-r--r--module/plugins/internal/SimpleHoster.py2
-rw-r--r--module/remote/thriftbackend/Socket.py2
-rw-r--r--module/threads/BaseThread.py2
-rw-r--r--module/threads/DownloadThread.py4
-rw-r--r--module/threads/ThreadManager.py10
-rw-r--r--module/utils/fs.py2
-rw-r--r--module/web/api_app.py2
31 files changed, 121 insertions, 121 deletions
diff --git a/module/Api.py b/module/Api.py
index 6d7ac75b6..d85680cd5 100644
--- a/module/Api.py
+++ b/module/Api.py
@@ -89,10 +89,10 @@ class Api(Iface):
This is accessible either internal via core.api, thrift backend or json api.
see Thrift specification file remote/thriftbackend/pyload.thrift\
- for information about data structures and what methods are usuable with rpc.
+ for information about data structures and what methods are usable with rpc.
Most methods requires specific permissions, please look at the source code if you need to know.\
- These can be configured via webinterface.
+ These can be configured via web interface.
Admin user have all permissions, and are the only ones who can access the methods with no specific permission.
"""
@@ -128,7 +128,7 @@ class Api(Iface):
@permission(PERMS.STATUS)
def pauseServer(self):
- """Pause server: Tt wont start any new downloads, but nothing gets aborted."""
+ """Pause server: It won't start any new downloads, but nothing gets aborted."""
self.core.threadManager.pause = True
@permission(PERMS.STATUS)
@@ -307,7 +307,7 @@ class Api(Iface):
@permission(PERMS.ADD)
def parseURLs(self, html=None, url=None):
- """Parses html content or any arbitaty text for links and returns result of `checkURLs`
+ """Parses html content or any arbitrary text for links and returns result of `checkURLs`
:param html: html source
:return:
@@ -327,7 +327,7 @@ class Api(Iface):
@permission(PERMS.ADD)
def checkURLs(self, urls):
- """ Gets urls and returns pluginname mapped to list of matches urls.
+ """ Gets urls and returns pluginname mapped to list of matching urls.
:param urls:
:return: {plugin: urls}
@@ -369,7 +369,7 @@ class Api(Iface):
@permission(PERMS.ADD)
def checkOnlineStatusContainer(self, urls, container, data):
- """ checks online status of urls and a submited container file
+ """ checks online status of urls and a submitted container file
:param urls: list of urls
:param container: container file name
@@ -387,7 +387,7 @@ class Api(Iface):
""" Polls the result available for ResultID
:param rid: `ResultID`
- :return: `OnlineCheck`, if rid is -1 then no more data available
+ :return: `OnlineCheck`, if rid is -1 then there is no more data available
"""
result = self.core.threadManager.getInfoResult(rid)
@@ -434,7 +434,7 @@ class Api(Iface):
:param name: display name of the package
:param folder: folder name or relative path, abs path are not allowed
:param root: package id of root package, -1 for top level package
- :param password: single pw or list of passwords seperated with new line
+ :param password: single pw or list of passwords separated with new line
:param site: arbitrary url to site for more information
:param comment: arbitrary comment
:param paused: No downloads will be started when True
@@ -454,7 +454,7 @@ class Api(Iface):
@permission(PERMS.ADD)
def addPackage(self, name, links, password=""):
- """Convenient method to add a package to top-level and adding links.
+ """Convenient method to add a package to the top-level and for adding links.
:return: package id
"""
@@ -484,7 +484,7 @@ class Api(Iface):
@permission(PERMS.ADD)
def addLinks(self, pid, links):
- """Adds links to specific package. Automatical starts online status fetching.
+ """Adds links to specific package. Initiates online status fetching.
:param pid: package id
:param links: list of urls
diff --git a/module/cli/ManageFiles.py b/module/cli/ManageFiles.py
index 4d0377d9d..c133d9959 100644
--- a/module/cli/ManageFiles.py
+++ b/module/cli/ManageFiles.py
@@ -32,7 +32,7 @@ class ManageFiles(Handler):
def init(self):
self.target = Destination.Queue
self.pos = 0 #position in queue
- self.package = -1 #choosen package
+ self.package = -1 #chosen package
self.mode = "" # move/delete/restart
self.cache = None
@@ -107,10 +107,10 @@ class ManageFiles(Handler):
elif self.mode == "r":
println(line, _("What do you want to restart?"))
- println(line + 1, "Enter single number, comma seperated numbers or ranges. eg. 1,2,3 or 1-3.")
+ println(line + 1, "Enter a single number, comma separated numbers or ranges. e.g.: 1,2,3 or 1-3.")
line += 2
else:
- println(line, _("Choose what yout want to do or enter package number."))
+ println(line, _("Choose what you want to do, or enter package number."))
println(line + 1, ("%s - %%s, %s - %%s, %s - %%s" % (mag("d"), mag("m"), mag("r"))) % (
_("delete"), _("move"), _("restart")))
line += 2
diff --git a/module/common/APIExerciser.py b/module/common/APIExerciser.py
index 657e83c78..ac6bc6c15 100644
--- a/module/common/APIExerciser.py
+++ b/module/common/APIExerciser.py
@@ -59,7 +59,7 @@ class APIExerciser(Thread):
def run(self):
- self.core.log.info("API Excerciser started %d" % self.id)
+ self.core.log.info("API Exerciser started %d" % self.id)
out = open("error.log", "ab")
#core errors are not logged of course
@@ -70,7 +70,7 @@ class APIExerciser(Thread):
try:
self.testAPI()
except Exception:
- self.core.log.error("Excerciser %d throw an execption" % self.id)
+ self.core.log.error("Exerciser %d throw an exception" % self.id)
print_exc()
out.write(format_exc() + 2 * "\n")
out.flush()
diff --git a/module/common/packagetools.py b/module/common/packagetools.py
index 5bfbcba95..791a46d51 100644
--- a/module/common/packagetools.py
+++ b/module/common/packagetools.py
@@ -21,7 +21,7 @@ def parseNames(files):
""" Generates packages names from name, data lists
:param files: list of (name, data)
- :return: packagenames mapt to data lists (eg. urls)
+ :return: packagenames mapped to data lists (eg. urls)
"""
packs = {}
@@ -64,7 +64,7 @@ def parseNames(files):
if len(split) > 1:
name = split.pop(1)
- #check if a already existing package may be ok for this file
+ #check if an already existing package may be ok for this file
# found = False
# for pack in packs:
# if pack in file:
diff --git a/module/config/ConfigParser.py b/module/config/ConfigParser.py
index a9e74dd20..9cc9f1fbe 100644
--- a/module/config/ConfigParser.py
+++ b/module/config/ConfigParser.py
@@ -17,8 +17,8 @@ ConfigData = namedtuple("ConfigData", "name type description default")
class ConfigParser:
"""
- Holds and manage the configuration + meta data.
- Actually only the values are read from disk, all meta data have to be provided first via addConfigSection.
+ Holds and manages the configuration + meta data.
+ Actually only the values are read from disk, all meta data has to be provided first via addConfigSection.
"""
CONFIG = "pyload.conf"
@@ -46,9 +46,9 @@ class ConfigParser:
make_config(self)
def checkVersion(self):
- """Determines if config need to be deleted"""
+ """Determines if config needs to be deleted"""
e = None
- # workaround conflict, with GUI (which also access the config) so try read in 3 times
+ # workaround conflict, with GUI (which also accesses the config) so try read in 3 times
for i in range(0, 3):
try:
for conf in (self.CONFIG, self.PLUGIN):
@@ -92,7 +92,7 @@ class ConfigParser:
section = line.replace("[", "").replace("]", "")
if section not in self.config:
- print "Unrecognzied section", section
+ print "Unrecognized section", section
section = ""
else:
@@ -113,7 +113,7 @@ class ConfigParser:
def save(self):
"""saves config to filename"""
- # seperate pyload and plugin conf
+ # separate pyload and plugin conf
configs = []
for c in (self.CONFIG, self.PLUGIN):
f = open(c, "wb")
@@ -140,7 +140,7 @@ class ConfigParser:
[f.close() for f in configs]
def __getitem__(self, section):
- """provides dictonary like access: c['section']['option']"""
+ """provides dictionary like access: c['section']['option']"""
return Section(self, section)
def get(self, section, option):
@@ -156,7 +156,7 @@ class ConfigParser:
data = self.config[section].config[option]
value = from_string(value, data.type)
- # only save when different to defaul values
+ # only save when different to default values
if value != data.default or (option in self.values[section] and value != self.values[section][option]):
self.values[section][option] = value
if sync:
@@ -191,10 +191,10 @@ class ConfigParser:
return
def addConfigSection(self, section, name, desc, long_desc, config, base=False):
- """Adds a section to the config. `config` is a list of config tuples as used in plugin api definied as:
+ """Adds a section to the config. `config` is a list of config tuples as used in plugin api defined as:
Either (name, type, verbose_name, default_value) or
(name, type, verbose_name, short_description, default_value)
- The ordner of the config elements are preserved with OrdererDict
+ The order of the config elements is preserved with OrderedDict
"""
d = OrderedDict()
diff --git a/module/database/DatabaseBackend.py b/module/database/DatabaseBackend.py
index 8159446bd..97ecec3ab 100644
--- a/module/database/DatabaseBackend.py
+++ b/module/database/DatabaseBackend.py
@@ -131,7 +131,7 @@ class DatabaseBackend(Thread):
Thread.__init__(self)
self.setDaemon(True)
self.core = core
- self.manager = None # setted later
+ self.manager = None # set later
self.running = Event()
self.jobs = Queue()
@@ -162,9 +162,9 @@ class DatabaseBackend(Thread):
self.conn.close()
try:
- self.manager.core.log.warning(_("Filedatabase was deleted due to incompatible version."))
+ self.manager.core.log.warning(_("File database was deleted due to incompatible version."))
except:
- print "Filedatabase was deleted due to incompatible version."
+ print "File database was deleted due to incompatible version."
remove(self.VERSION_FILE)
move(self.DB_FILE, self.DB_FILE + ".backup")
diff --git a/module/database/FileDatabase.py b/module/database/FileDatabase.py
index 08b18765d..19dca84c7 100644
--- a/module/database/FileDatabase.py
+++ b/module/database/FileDatabase.py
@@ -40,27 +40,27 @@ class FileMethods(DatabaseMethods):
@queue
def processcount(self, fid):
- """ number of files which have to be proccessed """
+ """ number of files which have to be processed """
# status in online, queued, starting, waiting, downloading
self.c.execute("SELECT COUNT(*) FROM files as WHERE dlstatus IN (2,3,8,9,10) AND fid != ?", (str(fid), ))
return self.c.fetchone()[0]
@queue
def addLink(self, url, name, plugin, package):
- # mark filestatus initially as missing, dlstatus - queued
+ # mark file status initially as missing, dlstatus - queued
self.c.execute('INSERT INTO files(url, name, plugin, status, dlstatus, package) VALUES(?,?,?,1,3,?)',
(url, name, plugin, package))
return self.c.lastrowid
@async
def addLinks(self, links, package):
- """ links is a list of tupels (url, plugin)"""
+ """ links is a list of tuples (url, plugin)"""
links = [(x[0], x[0], x[1], package) for x in links]
self.c.executemany('INSERT INTO files(url, name, plugin, status, dlstatus, package) VALUES(?,?,?,1,3,?)', links)
@queue
def addFile(self, name, size, media, package):
- # filestatus - ok, dl status NA
+ # file status - ok, dl status NA
self.c.execute('INSERT INTO files(name, size, media, package) VALUES(?,?,?,?)',
(name, size, media, package))
return self.c.lastrowid
@@ -225,7 +225,7 @@ class FileMethods(DatabaseMethods):
@queue
def getPackageInfo(self, pid, stats=True):
- """get data for specific package, optional with package stats"""
+ """get data for a specific package, optionally with package stats"""
if stats:
stats = self.getPackageStats(pid=pid)
@@ -242,7 +242,7 @@ class FileMethods(DatabaseMethods):
@async
def updateLinkInfo(self, data):
- """ data is list of tupels (name, size, status,[ hash,] url)"""
+ """ data is list of tuples (name, size, status,[ hash,] url)"""
if data and len(data[0]) == 4:
self.c.executemany('UPDATE files SET name=?, size=?, dlstatus=? WHERE url=? AND dlstatus IN (0,1,2,3,14)',
data)
diff --git a/module/interaction/EventManager.py b/module/interaction/EventManager.py
index 02ecb82fb..976a92413 100644
--- a/module/interaction/EventManager.py
+++ b/module/interaction/EventManager.py
@@ -8,20 +8,20 @@ from module.utils import lock
class EventManager:
"""
- Handles all Event related task, also stores an Event queue for clients, so they can retrieve them later.
+ Handles all event-related tasks, also stores an event queue for clients, so they can retrieve them later.
**Known Events:**
- Most addon methods exists as events. These are some additional known events.
+ Most addon methods exist as events. These are some additional known events.
===================== ================ ===========================================================
Name Arguments Description
===================== ================ ===========================================================
- metaEvent eventName, *args Called for every event, with eventName and orginal args
+ metaEvent eventName, *args Called for every event, with eventName and original args
downloadPreparing fid A download was just queued and will be prepared now.
- downloadStarts fid A plugin will immediately starts the download afterwards.
- linksAdded links, pid Someone just added links, you are able to modify the links.
- allDownloadsProcessed Every link was handled, pyload would idle afterwards.
- allDownloadsFinished Every download in queue is finished.
+ downloadStarts fid A plugin will immediately start the download afterwards.
+ linksAdded links, pid Someone just added links, you are able to modify these links.
+ allDownloadsProcessed All links were handled, pyLoad would idle afterwards.
+ allDownloadsFinished All downloads in the queue are finished.
unrarFinished folder, fname An Unrar job finished
configChanged sec, opt, value The config was changed.
===================== ================ ===========================================================
@@ -44,7 +44,7 @@ class EventManager:
self.lock = Lock()
def getEvents(self, uuid):
- """ Get accumulated events for uuid since last call, this also registeres new client """
+ """ Get accumulated events for uuid since last call, this also registers a new client """
if uuid not in self.clients:
self.clients[uuid] = Client()
return self.clients[uuid].get()
diff --git a/module/interaction/InteractionManager.py b/module/interaction/InteractionManager.py
index 0c125bdd4..1d26b1665 100644
--- a/module/interaction/InteractionManager.py
+++ b/module/interaction/InteractionManager.py
@@ -28,8 +28,8 @@ from InteractionTask import InteractionTask
class InteractionManager:
"""
Class that gives ability to interact with the user.
- Arbitary task with predefined output and input type can be set off.
- Asyncronous callbacks and default values keeps the ability to fallback if no user is present.
+ Arbitrary tasks with predefined output and input types can be set off.
+ Asynchronous callbacks and default values keep the ability to fallback if no user is present.
"""
# number of seconds a client is classified as active
diff --git a/module/network/Browser.py b/module/network/Browser.py
index 3452184d8..9cf6c2f30 100644
--- a/module/network/Browser.py
+++ b/module/network/Browser.py
@@ -16,7 +16,7 @@ class Browser(object):
self.options = options #holds pycurl options
self.bucket = bucket
- self.cj = None # needs to be setted later
+ self.cj = None # needs to be set later
self._size = 0
self.renewHTTPRequest()
diff --git a/module/network/Bucket.py b/module/network/Bucket.py
index ff80bda55..db67faa4a 100644
--- a/module/network/Bucket.py
+++ b/module/network/Bucket.py
@@ -39,7 +39,7 @@ class Bucket:
self.lock.release()
def consumed(self, amount):
- """ return time the process have to sleep, after consumed specified amount """
+ """ return the time the process has to sleep, after it consumed a specified amount """
if self.rate < MIN_RATE: return 0 #May become unresponsive otherwise
self.lock.acquire()
diff --git a/module/network/HTTPChunk.py b/module/network/HTTPChunk.py
index 3380fb733..d17177ee7 100644
--- a/module/network/HTTPChunk.py
+++ b/module/network/HTTPChunk.py
@@ -207,7 +207,7 @@ class HTTPChunk(HTTPRequest):
def writeHeader(self, buf):
self.header += buf
- #@TODO forward headers?, this is possibly unneeeded, when we just parse valid 200 headers
+ #@TODO forward headers?, this is possibly unneeded, when we just parse valid 200 headers
# as first chunk, we will parse the headers
if not self.range and self.header.endswith("\r\n\r\n"):
self.parseHeader()
@@ -236,8 +236,8 @@ class HTTPChunk(HTTPRequest):
sleep(self.p.bucket.consumed(size))
else:
# Avoid small buffers, increasing sleep time slowly if buffer size gets smaller
- # otherwise reduce sleep time percentual (values are based on tests)
- # So in general cpu time is saved without reducing bandwith too much
+ # otherwise reduce sleep time percentile (values are based on tests)
+ # So in general cpu time is saved without reducing bandwidth too much
if size < self.lastSize:
self.sleep += 0.002
@@ -253,7 +253,7 @@ class HTTPChunk(HTTPRequest):
def parseHeader(self):
- """parse data from recieved header"""
+ """parse data from received header"""
for orgline in self.decodeResponse(self.header).splitlines():
line = orgline.strip().lower()
if line.startswith("accept-ranges") and "bytes" in line:
diff --git a/module/network/HTTPDownload.py b/module/network/HTTPDownload.py
index 05ca44406..c6d2e1547 100644
--- a/module/network/HTTPDownload.py
+++ b/module/network/HTTPDownload.py
@@ -34,7 +34,7 @@ from module.utils.fs import save_join, fs_encode
# TODO: save content-disposition for resuming
class HTTPDownload():
- """ loads a url http + ftp """
+ """ loads an url, http + ftp supported """
def __init__(self, url, filename, get={}, post={}, referer=None, cj=None, bucket=None,
options={}, progressNotify=None, disposition=False):
@@ -174,7 +174,7 @@ class HTTPDownload():
while 1:
#need to create chunks
- if not chunksCreated and self.chunkSupport and self.size: #will be setted later by first chunk
+ if not chunksCreated and self.chunkSupport and self.size: #will be set later by first chunk
if not resume:
self.info.setSize(self.size)
@@ -193,7 +193,7 @@ class HTTPDownload():
self.chunks.append(c)
self.m.add_handle(handle)
else:
- #close immediatly
+ #close immediately
self.log.debug("Invalid curl handle -> closed")
c.close()
@@ -291,7 +291,7 @@ class HTTPDownload():
if self.abort:
raise Abort()
- #sleep(0.003) #supress busy waiting - limits dl speed to (1 / x) * buffersize
+ #sleep(0.003) #suppress busy waiting - limits dl speed to (1 / x) * buffersize
self.m.select(1)
for chunk in self.chunks:
diff --git a/module/network/RequestFactory.py b/module/network/RequestFactory.py
index 12fd66c95..932184678 100644
--- a/module/network/RequestFactory.py
+++ b/module/network/RequestFactory.py
@@ -46,7 +46,7 @@ class RequestFactory():
return req
def getHTTPRequest(self, **kwargs):
- """ returns a http request, dont forget to close it ! """
+ """ returns a http request, don't forget to close it ! """
options = self.getOptions()
options.update(kwargs) # submit kwargs as additional options
return HTTPRequest(CookieJar(None), options)
diff --git a/module/network/XDCCRequest.py b/module/network/XDCCRequest.py
index f03798c17..7a1a98cb5 100644
--- a/module/network/XDCCRequest.py
+++ b/module/network/XDCCRequest.py
@@ -119,7 +119,7 @@ class XDCCRequest():
fh.write(data)
- # acknowledge data by sending number of recceived bytes
+ # acknowledge data by sending number of received bytes
dccsock.send(struct.pack('!I', self.recv))
dccsock.close()
diff --git a/module/plugins/Account.py b/module/plugins/Account.py
index 28d1387fd..7c24298e7 100644
--- a/module/plugins/Account.py
+++ b/module/plugins/Account.py
@@ -16,10 +16,10 @@ class WrongPassword(Exception):
#noinspection PyUnresolvedReferences
class Account(Base, AccountInfo):
"""
- Base class for every Account plugin.
- Just overwrite `login` and cookies will be stored and account becomes accessible in\
+ Base class for every account plugin.
+ Just overwrite `login` and cookies will be stored and the account becomes accessible in\
associated hoster plugin. Plugin should also provide `loadAccountInfo`. \
- A instance of this class is created for every entered account, it holds all \
+ An instance of this class is created for every entered account, it holds all \
fields of AccountInfo ttype, and can be set easily at runtime.
"""
@@ -78,7 +78,7 @@ class Account(Base, AccountInfo):
pass
def login(self, req):
- """login into account, the cookies will be saved so user can be recognized
+ """login into account, the cookies will be saved so the user can be recognized
:param req: `Request` instance
"""
@@ -101,7 +101,7 @@ class Account(Base, AccountInfo):
try:
self.login(req)
except TypeError: #TODO: temporary
- self.logDebug("Deprecated .login(...) signature ommit user, data")
+ self.logDebug("Deprecated .login(...) signature omit user, data")
self.login(self.loginname, {"password": self.password}, req)
@@ -129,10 +129,10 @@ class Account(Base, AccountInfo):
self.premium = Account.premium
def update(self, password=None, options=None):
- """ updates account and return true if anything changed """
+ """ updates the account and returns true if anything changed """
self.login_ts = 0
- self.valid = True #set valid so it will be retried to login
+ self.valid = True #set valid, so the login will be retried
if "activated" in options:
self.activated = from_string(options["avtivated"], "bool")
@@ -163,8 +163,8 @@ class Account(Base, AccountInfo):
@lock
def getAccountInfo(self, force=False):
- """retrieve account infos for an user, do **not** overwrite this method!\\
- just use it to retrieve infos in hoster plugins. see `loadAccountInfo`
+ """retrieve account info's for an user, do **not** overwrite this method!\\
+ just use it to retrieve info's in hoster plugins. see `loadAccountInfo`
:param name: username
:param force: reloads cached account information
@@ -180,7 +180,7 @@ class Account(Base, AccountInfo):
try:
infos = self.loadAccountInfo(req)
except TypeError: #TODO: temporary
- self.logDebug("Deprecated .loadAccountInfo(...) signature, ommit user argument.")
+ self.logDebug("Deprecated .loadAccountInfo(...) signature, omit user argument.")
infos = self.loadAccountInfo(self.loginname, req)
except Exception, e:
infos = {"error": str(e)}
@@ -221,7 +221,7 @@ class Account(Base, AccountInfo):
return self.premium
def isUsable(self):
- """Check several contraints to determine if account should be used"""
+ """Check several constraints to determine if account should be used"""
if not self.valid or not self.activated: return False
if self.options["time"]:
@@ -232,11 +232,11 @@ class Account(Base, AccountInfo):
if not compare_time(start.split(":"), end.split(":")):
return False
except:
- self.logWarning(_("Your Time %s has wrong format, use: 1:22-3:44") % time_data)
+ self.logWarning(_("Your Time %s has a wrong format, use: 1:22-3:44") % time_data)
if 0 <= self.validuntil < time():
return False
- if self.trafficleft is 0: # test explicity for 0
+ if self.trafficleft is 0: # test explicitly for 0
return False
return True
@@ -269,15 +269,15 @@ class Account(Base, AccountInfo):
self.scheduleRefresh(60 * 60)
def scheduleRefresh(self, time=0, force=True):
- """ add task to refresh account info to sheduler """
+ """ add a task for refreshing the account info to the scheduler """
self.logDebug("Scheduled Account refresh for %s in %s seconds." % (self.loginname, time))
self.core.scheduler.addJob(time, self.getAccountInfo, [force])
@lock
def checkLogin(self, req):
- """ checks if user is still logged in """
+ """ checks if the user is still logged in """
if self.login_ts + self.login_timeout * 60 < time():
- if self.login_ts: # seperate from fresh login to have better debug logs
+ if self.login_ts: # separate from fresh login to have better debug logs
self.logDebug("Reached login timeout for %s" % self.loginname)
else:
self.logDebug("Login with %s" % self.loginname)
diff --git a/module/plugins/Addon.py b/module/plugins/Addon.py
index fe9ae4817..3fc4eb467 100644
--- a/module/plugins/Addon.py
+++ b/module/plugins/Addon.py
@@ -81,20 +81,20 @@ def threaded(f):
class Addon(Base):
"""
- Base class for addon plugins. Use @threaded decorator for all longer running task.
+ Base class for addon plugins. Use @threaded decorator for all longer running tasks.
- Decorate methods with @Expose, @AddventListener, @ConfigHandler
+ Decorate methods with @Expose, @AddEventListener, @ConfigHandler
"""
- #: automatically register event listeners for functions, attribute will be deleted dont use it yourself
+ #: automatically register event listeners for functions, attribute will be deleted don't use it yourself
event_map = None
# Alternative to event_map
#: List of events the plugin can handle, name the functions exactly like eventname.
event_list = None # dont make duplicate entries in event_map
- #: periodic call interval in secondc
+ #: periodic call interval in seconds
interval = 60
def __init__(self, core, manager):
diff --git a/module/plugins/Base.py b/module/plugins/Base.py
index 61fa211f4..4649a2b08 100644
--- a/module/plugins/Base.py
+++ b/module/plugins/Base.py
@@ -95,8 +95,8 @@ class Base(object):
def logInfo(self, *args, **kwargs):
""" Print args to log at specific level
- :param args: Arbitary object which should be logged
- :param kwargs: sep=(how to seperate arguments), default = " | "
+ :param args: Arbitrary object which should be logged
+ :param kwargs: sep=(how to separate arguments), default = " | "
"""
self._log("info", *args, **kwargs)
@@ -173,7 +173,7 @@ class Base(object):
return False
def checkAbort(self):
- """ Will be overwriten to determine if control flow should be aborted """
+ """ Will be overwritten to determine if control flow should be aborted """
if self.abort: raise Abort()
def load(self, url, get={}, post={}, ref=True, cookies=True, just_header=False, decode=False):
@@ -185,7 +185,7 @@ class Base(object):
:param ref: Set HTTP_REFERER header
:param cookies: use saved cookies
:param just_header: if True only the header will be retrieved and returned as dict
- :param decode: Wether to decode the output according to http header, should be True in most cases
+ :param decode: Whether to decode the output according to http header, should be True in most cases
:return: Loaded content
"""
if not hasattr(self, "req"): raise Exception("Plugin type does not have Request attribute.")
@@ -308,7 +308,7 @@ class Base(object):
elif task.error:
self.fail(task.error)
elif not task.result:
- self.fail(_("No captcha result obtained in appropiate time by any of the plugins."))
+ self.fail(_("No captcha result obtained in appropriate time by any of the plugins."))
result = task.result
self.log.debug("Received captcha result: %s" % str(result))
diff --git a/module/plugins/Crypter.py b/module/plugins/Crypter.py
index 15feea8e0..920009f44 100644
--- a/module/plugins/Crypter.py
+++ b/module/plugins/Crypter.py
@@ -9,7 +9,7 @@ from module.utils.fs import exists, remove, fs_encode
from Base import Base, Retry
class Package:
- """ Container that indicates new package should be created """
+ """ Container that indicates that a new package should be created """
def __init__(self, name, urls=None):
self.name = name
self.urls = urls if urls else []
@@ -102,14 +102,14 @@ class Crypter(Base):
Base.__init__(self, core)
self.req = core.requestFactory.getRequest(self.__name__)
- # Package the plugin was initialized for, dont use this, its not guaranteed to be set
+ # Package the plugin was initialized for, don't use this, its not guaranteed to be set
self.package = package
#: Password supplied by user
self.password = password
#: Propose a renaming of the owner package
self.rename = None
- # For old style decrypter, do not use these !
+ # For old style decrypter, do not use these!
self.packages = []
self.urls = []
self.pyfile = None
@@ -120,7 +120,7 @@ class Crypter(Base):
"""More init stuff if needed"""
def setup(self):
- """Called everytime before decrypting. A Crypter plugin will be most likly used for several jobs."""
+ """Called everytime before decrypting. A Crypter plugin will be most likely used for several jobs."""
def decryptURL(self, url):
"""Decrypt a single url
@@ -150,7 +150,7 @@ class Crypter(Base):
raise NotImplementedError
def generatePackages(self, urls):
- """Generates :class:`Package` instances and names from urls. Usefull for many different links and no\
+ """Generates :class:`Package` instances and names from urls. Useful for many different links and no\
given package name.
:param urls: list of urls
@@ -166,7 +166,7 @@ class Crypter(Base):
"""
cls = self.__class__
- # seperate local and remote files
+ # separate local and remote files
content, urls = self.getLocalContent(urls)
if has_method(cls, "decryptURLs"):
@@ -214,7 +214,7 @@ class Crypter(Base):
return []
def getLocalContent(self, urls):
- """Load files from disk and seperate to file content and url list
+ """Load files from disk and separate to file content and url list
:param urls:
:return: list of (filename, content), remote urls
diff --git a/module/plugins/Hoster.py b/module/plugins/Hoster.py
index b330743e6..737bdcdb4 100644
--- a/module/plugins/Hoster.py
+++ b/module/plugins/Hoster.py
@@ -48,7 +48,7 @@ class Hoster(Base):
def getInfo(urls):
"""This method is used to retrieve the online status of files for hoster plugins.
It has to *yield* list of tuples with the result in this format (name, size, status, url),
- where status is one of API pyfile statusses.
+ where status is one of API pyfile statuses.
:param urls: List of urls
:return: yield list of tuple with results (name, size, status, url)
@@ -108,11 +108,11 @@ class Hoster(Base):
self.init()
def getMultiDL(self):
- self.logDebug("Deprectated attribute multiDL, use limitDL instead")
+ self.logDebug("Deprecated attribute multiDL, use limitDL instead")
return self.limitDL <= 0
def setMultiDL(self, val):
- self.logDebug("Deprectated attribute multiDL, use limitDL instead")
+ self.logDebug("Deprecated attribute multiDL, use limitDL instead")
self.limitDL = 0 if val else 1
multiDL = property(getMultiDL, setMultiDL)
@@ -142,7 +142,7 @@ class Hoster(Base):
pass
def setup(self):
- """ setup for enviroment and other things, called before downloading (possibly more than one time)"""
+ """ setup for environment and other things, called before downloading (possibly more than one time)"""
pass
def preprocessing(self, thread):
@@ -150,7 +150,7 @@ class Hoster(Base):
self.thread = thread
if self.account:
- # will force a relogin or reload of account info if necessary
+ # will force a re-login or reload of account info if necessary
self.account.getAccountInfo()
else:
self.req.clearCookies()
@@ -169,7 +169,7 @@ class Hoster(Base):
return self.pyfile.abort
def resetAccount(self):
- """ dont use account and retry download """
+ """ don't use account and retry download """
self.account = None
self.req = self.core.requestFactory.getRequest(self.__name__)
self.retry()
@@ -372,7 +372,7 @@ class Hoster(Base):
if pyfile.status in (0, 12): #finished or downloading
raise SkipDownload(pyfile.pluginname)
elif pyfile.status in (
- 5, 7) and starting: #a download is waiting/starting and was appenrently started before
+ 5, 7) and starting: #a download is waiting/starting and was apparently started before
raise SkipDownload(pyfile.pluginname)
download_folder = self.config['general']['download_folder']
diff --git a/module/plugins/PluginManager.py b/module/plugins/PluginManager.py
index 733cd2c5d..f42bd08c6 100644
--- a/module/plugins/PluginManager.py
+++ b/module/plugins/PluginManager.py
@@ -221,7 +221,7 @@ class PluginManager:
def parseUrls(self, urls):
- """parse plugins for given list of urls, seperate to crypter and hoster"""
+ """parse plugins for given list of urls, separate to crypter and hoster"""
res = {"hoster": [], "crypter": []} # tupels of (url, plugin)
@@ -313,7 +313,7 @@ class PluginManager:
def find_module(self, fullname, path=None):
#redirecting imports if necesarry
- if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #seperate pyload plugins
+ if fullname.startswith(self.ROOT) or fullname.startswith(self.USERROOT): #separate pyload plugins
if fullname.startswith(self.USERROOT): user = 1
else: user = 0 #used as bool and int
diff --git a/module/plugins/addons/Ev0InFetcher.py b/module/plugins/addons/Ev0InFetcher.py
index aeb46320a..608baf217 100644
--- a/module/plugins/addons/Ev0InFetcher.py
+++ b/module/plugins/addons/Ev0InFetcher.py
@@ -27,9 +27,9 @@ class Ev0InFetcher(Addon):
__config__ = [("activated", "bool", "Activated", "False"),
("interval", "int", "Check interval in minutes", "10"),
("queue", "bool", "Move new shows directly to Queue", False),
- ("shows", "str", "Shows to check for (comma seperated)", ""),
+ ("shows", "str", "Shows to check for (comma separated)", ""),
("quality", "xvid;x264;rmvb", "Video Format", "xvid"),
- ("hoster", "str", "Hoster to use (comma seperated)", "NetloadIn,RapidshareCom,MegauploadCom,HotfileCom")]
+ ("hoster", "str", "Hoster to use (comma separated)", "NetloadIn,RapidshareCom,MegauploadCom,HotfileCom")]
__author_name__ = ("mkaay")
__author_mail__ = ("mkaay@mkaay.de")
diff --git a/module/plugins/hoster/NetloadIn.py b/module/plugins/hoster/NetloadIn.py
index 382328496..d768090e8 100644
--- a/module/plugins/hoster/NetloadIn.py
+++ b/module/plugins/hoster/NetloadIn.py
@@ -10,7 +10,7 @@ from module.plugins.Hoster import Hoster
from module.network.RequestFactory import getURL
def getInfo(urls):
- ## returns list of tupels (name, size (in bytes), status (see FileDatabase), url)
+ ## returns list of tuples (name, size (in bytes), status (see FileDatabase), url)
apiurl = "http://api.netload.in/info.php?auth=Zf9SnQh9WiReEsb18akjvQGqT0I830e8&bz=1&md5=1&file_id="
@@ -196,7 +196,7 @@ class NetloadIn(Hoster):
file_id = re.search('<input name="file_id" type="hidden" value="(.*)" />', page).group(1)
if not captchawaited:
wait = self.get_wait_time(page)
- if i == 0: self.pyfile.waitUntil = time() # dont wait contrary to time on website
+ if i == 0: self.pyfile.waitUntil = time() # don't wait contrary to time on web site
else: self.pyfile.waitUntil = t
self.log.info(_("Netload: waiting for captcha %d s.") % (self.pyfile.waitUntil - time()))
#self.setWait(wait)
diff --git a/module/plugins/internal/AbstractExtractor.py b/module/plugins/internal/AbstractExtractor.py
index ceb188193..3cd635eff 100644
--- a/module/plugins/internal/AbstractExtractor.py
+++ b/module/plugins/internal/AbstractExtractor.py
@@ -13,7 +13,7 @@ class WrongPassword(Exception):
class AbtractExtractor:
@staticmethod
def checkDeps():
- """ Check if system statisfy dependencies
+ """ Check if system satisfies dependencies
:return: boolean
"""
return True
@@ -21,7 +21,7 @@ class AbtractExtractor:
@staticmethod
def getTargets(files_ids):
""" Filter suited targets from list of filename id tuple list
- :param files_ids: List of filepathes
+ :param files_ids: List of file paths
:return: List of targets, id tuple list
"""
raise NotImplementedError
@@ -30,10 +30,10 @@ class AbtractExtractor:
def __init__(self, m, file, out, fullpath, overwrite, renice):
"""Initialize extractor for specific file
- :param m: ExtractArchive Addon plugin
- :param file: Absolute filepath
+ :param m: ExtractArchive addon plugin
+ :param file: Absolute file path
:param out: Absolute path to destination directory
- :param fullpath: extract to fullpath
+ :param fullpath: Extract to fullpath
:param overwrite: Overwrite existing archives
:param renice: Renice value
"""
@@ -52,7 +52,7 @@ class AbtractExtractor:
def checkArchive(self):
- """Check if password if needed. Raise ArchiveError if integrity is
+ """Check if password is needed. Raise ArchiveError if integrity is
questionable.
:return: boolean
diff --git a/module/plugins/internal/SimpleHoster.py b/module/plugins/internal/SimpleHoster.py
index 69909a8a1..20263064a 100644
--- a/module/plugins/internal/SimpleHoster.py
+++ b/module/plugins/internal/SimpleHoster.py
@@ -103,7 +103,7 @@ class SimpleHoster(Hoster):
or FILE_NAME_INFO = r'(?P<N>file_name)'
and FILE_SIZE_INFO = r'(?P<S>file_size) (?P<U>units)'
FILE_OFFLINE_PATTERN = r'File (deleted|not found)'
- TEMP_OFFLINE_PATTERN = r'Server maintainance'
+ TEMP_OFFLINE_PATTERN = r'Server maintenance'
"""
FILE_SIZE_REPLACEMENTS = []
diff --git a/module/remote/thriftbackend/Socket.py b/module/remote/thriftbackend/Socket.py
index c38c39198..2a84004ea 100644
--- a/module/remote/thriftbackend/Socket.py
+++ b/module/remote/thriftbackend/Socket.py
@@ -74,7 +74,7 @@ class Socket(TSocket):
except socket.error, e:
if (e.args[0] == errno.ECONNRESET and
(sys.platform == 'darwin' or sys.platform.startswith('freebsd'))):
- # freebsd and Mach don't follow POSIX semantic of recv
+ # freebsd and Mach don't follow POSIX semantics of recv
# and fail with ECONNRESET if peer performed shutdown.
# See corresponding comment and code in TSocket::read()
# in lib/cpp/src/transport/TSocket.cpp.
diff --git a/module/threads/BaseThread.py b/module/threads/BaseThread.py
index f6fac46a0..7a0ee5ee4 100644
--- a/module/threads/BaseThread.py
+++ b/module/threads/BaseThread.py
@@ -131,6 +131,6 @@ class BaseThread(Thread):
return ""
def clean(self, pyfile):
- """ set thread unactive and release pyfile """
+ """ set thread inactive and release pyfile """
self.active = False
pyfile.release()
diff --git a/module/threads/DownloadThread.py b/module/threads/DownloadThread.py
index 8166191af..7555a82ce 100644
--- a/module/threads/DownloadThread.py
+++ b/module/threads/DownloadThread.py
@@ -58,7 +58,7 @@ class DownloadThread(BaseThread):
try:
if not pyfile.hasPlugin(): continue
- #this pyfile was deleted while queueing
+ #this pyfile was deleted while queuing
pyfile.plugin.checkForSameFiles(starting=True)
self.log.info(_("Download starts: %s" % pyfile.name))
@@ -212,7 +212,7 @@ class DownloadThread(BaseThread):
def put(self, job):
- """assing job to thread"""
+ """assign a job to the thread"""
self.queue.put(job)
diff --git a/module/threads/ThreadManager.py b/module/threads/ThreadManager.py
index b3a1e8c6c..c3da13430 100644
--- a/module/threads/ThreadManager.py
+++ b/module/threads/ThreadManager.py
@@ -82,7 +82,7 @@ class ThreadManager:
self.threads.append(thread)
def createInfoThread(self, data, pid):
- """ start a thread whichs fetches online status and other infos """
+ """ start a thread which fetches online status and other info's """
self.timestamp = time() + 5 * 60
if data: InfoThread(self, data, pid)
@@ -134,7 +134,7 @@ class ThreadManager:
def work(self):
- """run all task which have to be done (this is for repetivive call by core)"""
+ """run all task which have to be done (this is for repetetive call by core)"""
try:
self.tryReconnect()
except Exception, e:
@@ -231,7 +231,7 @@ class ThreadManager:
return ip
def checkThreadCount(self):
- """checks if there are need for increasing or reducing thread count"""
+ """checks if there is a need for increasing or reducing thread count"""
if len(self.threads) == self.core.config.get("download", "max_downloads"):
return True
@@ -244,7 +244,7 @@ class ThreadManager:
def cleanPycurl(self):
- """ make a global curl cleanup (currently ununused) """
+ """ make a global curl cleanup (currently unused) """
if self.processingIds():
return False
pycurl.global_cleanup()
@@ -255,7 +255,7 @@ class ThreadManager:
def assignJob(self):
- """assing a job to a thread if possible"""
+ """assign a job to a thread if possible"""
if self.pause or not self.core.api.isTimeDownload(): return
diff --git a/module/utils/fs.py b/module/utils/fs.py
index 276ff04b5..631b25002 100644
--- a/module/utils/fs.py
+++ b/module/utils/fs.py
@@ -6,7 +6,7 @@ from os.path import join
from . import decode, remove_chars
# File System Encoding functions:
-# Use fs_encode before accesing files on disk, it will encode the string properly
+# Use fs_encode before accessing files on disk, it will encode the string properly
if sys.getfilesystemencoding().startswith('ANSI'):
def fs_encode(string):
diff --git a/module/web/api_app.py b/module/web/api_app.py
index 6c93266fc..7a9eb8558 100644
--- a/module/web/api_app.py
+++ b/module/web/api_app.py
@@ -30,7 +30,7 @@ def add_header(r):
r.headers.append("Access-Control-Allow-Origin", "*") # allow xhr requests
# accepting positional arguments, as well as kwargs via post and get
-# only forbidden path symbol are "?", which is used to seperate GET data and #
+# only forbidden path symbol are "?", which is used to separate GET data and #
@route("/api/<func><args:re:[^#?]*>")
@route("/api/<func><args:re:[^#?]*>", method="POST")
def call_api(func, args=""):