summaryrefslogtreecommitdiffstats
path: root/module
diff options
context:
space:
mode:
authorGravatar zoidberg10 <zoidberg@mujmail.cz> 2011-12-05 16:49:19 +0100
committerGravatar zoidberg10 <zoidberg@mujmail.cz> 2011-12-05 16:49:19 +0100
commitcba56c6859eace6fbcabf0690f388ceeabd8c689 (patch)
tree90f4af3832fc0814d05f39671bfc6a78bf2f0eb1 /module
parenttemp. offline status for fs (diff)
downloadpyload-cba56c6859eace6fbcabf0690f388ceeabd8c689.tar.xz
fix filesystem encoding issues on windows and synology nas
Diffstat (limited to 'module')
-rw-r--r--module/Utils.py14
-rw-r--r--module/network/HTTPChunk.py23
-rw-r--r--module/network/HTTPDownload.py10
-rw-r--r--module/plugins/Plugin.py18
4 files changed, 34 insertions, 31 deletions
diff --git a/module/Utils.py b/module/Utils.py
index 955d14b27..f626eef10 100644
--- a/module/Utils.py
+++ b/module/Utils.py
@@ -47,21 +47,19 @@ def save_join(*args):
if i:
path = path.replace(":", "")
- path = decode(path)
-
- tmp = fs_encode(path)
- paths.append(tmp)
+ paths.append(unicode(path))
return join(*paths)
def fs_encode(string):
- """ Encodes with filesystem encoding
+ """ Encodes string with utf-8 if locale support seems to be missing
:param string: string to decode
:return:
"""
- try:
- return string.encode(sys.getfilesystemencoding(), "replace")
- except:
+ try:
+ if sys.getfilesystemencoding() == 'ANSI_X3.4-1968':
+ string = string.encode('utf-8')
+ finally:
return string
def fs_decode(string):
diff --git a/module/network/HTTPChunk.py b/module/network/HTTPChunk.py
index 582067aa8..b637aef32 100644
--- a/module/network/HTTPChunk.py
+++ b/module/network/HTTPChunk.py
@@ -20,7 +20,8 @@ from os import remove, stat, fsync
from os.path import exists
from time import sleep
from re import search
-
+from module.utils import fs_encode
+import codecs
import pycurl
from HTTPRequest import HTTPRequest
@@ -31,7 +32,7 @@ class WrongFormat(Exception):
class ChunkInfo():
def __init__(self, name):
- self.name = name
+ self.name = unicode(name)
self.size = 0
self.resume = False
self.chunks = []
@@ -64,7 +65,8 @@ class ChunkInfo():
def save(self):
- fh = open("%s.chunks" % self.name, "w")
+ fs_name = fs_encode("%s.chunks" % self.name)
+ fh = codecs.open(fs_name, "w", "utf_8")
fh.write("name:%s\n" % self.name)
fh.write("size:%s\n" % self.size)
for i, c in enumerate(self.chunks):
@@ -75,9 +77,10 @@ class ChunkInfo():
@staticmethod
def load(name):
- if not exists("%s.chunks" % name):
+ fs_name = fs_encode("%s.chunks" % name)
+ if not exists(fs_name):
raise IOError()
- fh = open("%s.chunks" % name, "r")
+ fh = codecs.open(fs_name, "r", "utf_8")
name = fh.readline()[:-1]
size = fh.readline()[:-1]
if name.startswith("name:") and size.startswith("size:"):
@@ -105,7 +108,8 @@ class ChunkInfo():
return ci
def remove(self):
- if exists("%s.chunks" % self.name): remove("%s.chunks" % self.name)
+ fs_name = fs_encode("%s.chunks" % self.name)
+ if exists(fs_name): remove(fs_name)
def getCount(self):
return len(self.chunks)
@@ -162,11 +166,12 @@ class HTTPChunk(HTTPRequest):
# request all bytes, since some servers in russia seems to have a defect arihmetic unit
+ fs_name = fs_encode(self.p.info.getChunkName(self.id))
if self.resume:
- self.fp = open(self.p.info.getChunkName(self.id), "ab")
+ self.fp = open(fs_name, "ab")
self.arrived = self.fp.tell()
if not self.arrived:
- self.arrived = stat(self.p.info.getChunkName(self.id)).st_size
+ self.arrived = stat(fs_name).st_size
if self.range:
#do nothing if chunk already finished
@@ -193,7 +198,7 @@ class HTTPChunk(HTTPRequest):
self.log.debug("Chunked with range %s" % range)
self.c.setopt(pycurl.RANGE, range)
- self.fp = open(self.p.info.getChunkName(self.id), "wb")
+ self.fp = open(fs_name, "wb")
return self.c
diff --git a/module/network/HTTPDownload.py b/module/network/HTTPDownload.py
index ec742596c..fe8075539 100644
--- a/module/network/HTTPDownload.py
+++ b/module/network/HTTPDownload.py
@@ -29,7 +29,7 @@ from HTTPChunk import ChunkInfo, HTTPChunk
from HTTPRequest import BadHeader
from module.plugins.Plugin import Abort
-from module.utils import save_join
+from module.utils import save_join, fs_encode
class HTTPDownload():
""" loads a url http + ftp """
@@ -88,7 +88,7 @@ class HTTPDownload():
return (self.arrived * 100) / self.size
def _copyChunks(self):
- init = self.info.getChunkName(0) #initial chunk name
+ init = fs_encode(self.info.getChunkName(0)) #initial chunk name
if self.info.getCount() > 1:
fo = open(init, "rb+") #first chunkfile
@@ -96,7 +96,7 @@ class HTTPDownload():
#input file
fo.seek(
self.info.getChunkRange(i - 1)[1] + 1) #seek to beginning of chunk, to get rid of overlapping chunks
- fname = "%s.chunk%d" % (self.filename, i)
+ fname = fs_encode("%s.chunk%d" % (self.filename, i))
fi = open(fname, "rb")
buf = 32 * 1024
while True: #copy in chunks, consumes less memory
@@ -116,7 +116,7 @@ class HTTPDownload():
if self.nameDisposition and self.disposition:
self.filename = save_join(dirname(self.filename), self.nameDisposition)
- move(init, self.filename)
+ move(init, fs_encode(self.filename))
self.info.remove() #remove info file
def download(self, chunks=1, resume=False):
@@ -249,7 +249,7 @@ class HTTPDownload():
for chunk in to_clean:
self.closeChunk(chunk)
self.chunks.remove(chunk)
- remove(self.info.getChunkName(chunk.id))
+ remove(fs_encode(self.info.getChunkName(chunk.id)))
#let first chunk load the rest and update the info file
init.resetRange()
diff --git a/module/plugins/Plugin.py b/module/plugins/Plugin.py
index 1abf02bbe..e2aadb38e 100644
--- a/module/plugins/Plugin.py
+++ b/module/plugins/Plugin.py
@@ -491,7 +491,7 @@ class Plugin(Base):
name = save_path(self.pyfile.name)
- filename = join(location, fs_encode(name))
+ filename = join(location, name)
self.core.hookManager.dispatchEvent("downloadStarts", self.pyfile, url, filename)
@@ -505,7 +505,7 @@ class Plugin(Base):
if disposition and newname and newname != name: #triple check, just to be sure
self.log.info("%(name)s saved as %(newname)s" % {"name": name, "newname": newname})
self.pyfile.name = newname
- filename = join(location, fs_encode(newname))
+ filename = join(location, newname)
if self.core.config["permission"]["change_file"]:
chmod(filename, int(self.core.config["permission"]["file"], 8))
@@ -532,16 +532,16 @@ class Plugin(Base):
:param read_size: amount of bytes to read from files larger then max_size
:return: dictionary key of the first rule that matched
"""
+ fs_name = fs_encode(self.lastDownload)
+ if not exists(fs_name): return None
- if not exists(self.lastDownload): return None
-
- size = stat(self.lastDownload)
+ size = stat(fs_name)
size = size.st_size
if api_size and api_size <= size: return None
elif size > max_size and not read_size: return None
self.log.debug("Download Check triggered")
- f = open(self.lastDownload, "rb")
+ f = open(fs_name, "rb")
content = f.read(read_size if read_size else -1)
f.close()
#produces encoding errors, better log to other file in the future?
@@ -550,13 +550,13 @@ class Plugin(Base):
if type(rule) in (str, unicode):
if rule in content:
if delete:
- remove(self.lastDownload)
+ remove(fs_name)
return name
elif hasattr(rule, "search"):
m = rule.search(content)
if m:
if delete:
- remove(self.lastDownload)
+ remove(fs_name)
self.lastCheck = m
return name
@@ -586,7 +586,7 @@ class Plugin(Base):
raise SkipDownload(pyfile.pluginname)
download_folder = self.config['general']['download_folder']
- location = save_join(download_folder, pack.folder, self.pyfile.name)
+ location = fs_encode(save_join(download_folder, pack.folder, self.pyfile.name))
if starting and self.core.config['download']['skip_existing'] and exists(location):
size = os.stat(location).st_size