Make sftp.open handle binary and text, more type conversion

This commit is contained in:
Scott Maxwell 2013-11-02 14:56:43 -07:00
parent 7decda3297
commit 45e65b6e1e
12 changed files with 82 additions and 67 deletions

View File

@ -378,8 +378,8 @@ class SSHClient (object):
chan.settimeout(timeout) chan.settimeout(timeout)
chan.exec_command(command) chan.exec_command(command)
stdin = chan.makefile('wb', bufsize) stdin = chan.makefile('wb', bufsize)
stdout = chan.makefile('rb', bufsize) stdout = chan.makefile('r', bufsize)
stderr = chan.makefile_stderr('rb', bufsize) stderr = chan.makefile_stderr('r', bufsize)
return stdin, stdout, stderr return stdin, stdout, stderr
def invoke_shell(self, term='vt100', width=80, height=24, width_pixels=0, def invoke_shell(self, term='vt100', width=80, height=24, width_pixels=0,

View File

@ -127,12 +127,16 @@ zero_byte = byte_chr(0)
one_byte = byte_chr(1) one_byte = byte_chr(1)
four_byte = byte_chr(4) four_byte = byte_chr(4)
max_byte = byte_chr(0xff) max_byte = byte_chr(0xff)
newline_byte = byte_chr(10) cr_byte = byte_chr(13)
linefeed_byte = byte_chr(10)
crlf = cr_byte + linefeed_byte
if PY3: if PY3:
cr_byte = 13 cr_byte_value = 13
linefeed_byte_value = 10
else: else:
cr_byte = '\r' cr_byte_value = cr_byte
linefeed_byte_value = linefeed_byte
def asbytes(s): def asbytes(s):

View File

@ -47,8 +47,8 @@ class BufferedFile (object):
self.newlines = None self.newlines = None
self._flags = 0 self._flags = 0
self._bufsize = self._DEFAULT_BUFSIZE self._bufsize = self._DEFAULT_BUFSIZE
self._wbuffer = StringIO() self._wbuffer = BytesIO()
self._rbuffer = '' self._rbuffer = bytes()
self._at_trailing_cr = False self._at_trailing_cr = False
self._closed = False self._closed = False
# pos - position within the file, according to the user # pos - position within the file, according to the user
@ -89,7 +89,7 @@ class BufferedFile (object):
buffering is not turned on. buffering is not turned on.
""" """
self._write_all(self._wbuffer.getvalue()) self._write_all(self._wbuffer.getvalue())
self._wbuffer = StringIO() self._wbuffer = BytesIO()
return return
if PY3: if PY3:
@ -144,7 +144,7 @@ class BufferedFile (object):
if (size is None) or (size < 0): if (size is None) or (size < 0):
# go for broke # go for broke
result = self._rbuffer result = self._rbuffer
self._rbuffer = '' self._rbuffer = bytes()
self._pos += len(result) self._pos += len(result)
while True: while True:
try: try:
@ -156,12 +156,12 @@ class BufferedFile (object):
result += new_data result += new_data
self._realpos += len(new_data) self._realpos += len(new_data)
self._pos += len(new_data) self._pos += len(new_data)
return result return result if self._flags & self.FLAG_BINARY else u(result)
if size <= len(self._rbuffer): if size <= len(self._rbuffer):
result = self._rbuffer[:size] result = self._rbuffer[:size]
self._rbuffer = self._rbuffer[size:] self._rbuffer = self._rbuffer[size:]
self._pos += len(result) self._pos += len(result)
return result return result if self._flags & self.FLAG_BINARY else u(result)
while len(self._rbuffer) < size: while len(self._rbuffer) < size:
read_size = size - len(self._rbuffer) read_size = size - len(self._rbuffer)
if self._flags & self.FLAG_BUFFERED: if self._flags & self.FLAG_BUFFERED:
@ -177,7 +177,7 @@ class BufferedFile (object):
result = self._rbuffer[:size] result = self._rbuffer[:size]
self._rbuffer = self._rbuffer[size:] self._rbuffer = self._rbuffer[size:]
self._pos += len(result) self._pos += len(result)
return result return result if self._flags & self.FLAG_BINARY else u(result)
def readline(self, size=None): def readline(self, size=None):
""" """
@ -207,11 +207,11 @@ class BufferedFile (object):
if self._at_trailing_cr and (self._flags & self.FLAG_UNIVERSAL_NEWLINE) and (len(line) > 0): if self._at_trailing_cr and (self._flags & self.FLAG_UNIVERSAL_NEWLINE) and (len(line) > 0):
# edge case: the newline may be '\r\n' and we may have read # edge case: the newline may be '\r\n' and we may have read
# only the first '\r' last time. # only the first '\r' last time.
if line[0] == '\n': if line[0] == linefeed_byte_value:
line = line[1:] line = line[1:]
self._record_newline('\r\n') self._record_newline(crlf)
else: else:
self._record_newline('\r') self._record_newline(cr_byte)
self._at_trailing_cr = False self._at_trailing_cr = False
# check size before looking for a linefeed, in case we already have # check size before looking for a linefeed, in case we already have
# enough. # enough.
@ -221,42 +221,42 @@ class BufferedFile (object):
self._rbuffer = line[size:] self._rbuffer = line[size:]
line = line[:size] line = line[:size]
self._pos += len(line) self._pos += len(line)
return line return line if self._flags & self.FLAG_BINARY else u(line)
n = size - len(line) n = size - len(line)
else: else:
n = self._bufsize n = self._bufsize
if ('\n' in line) or ((self._flags & self.FLAG_UNIVERSAL_NEWLINE) and ('\r' in line)): if (linefeed_byte in line) or ((self._flags & self.FLAG_UNIVERSAL_NEWLINE) and (cr_byte in line)):
break break
try: try:
new_data = self._read(n) new_data = self._read(n)
except EOFError: except EOFError:
new_data = None new_data = None
if (new_data is None) or (len(new_data) == 0): if (new_data is None) or (len(new_data) == 0):
self._rbuffer = '' self._rbuffer = bytes()
self._pos += len(line) self._pos += len(line)
return line return line if self._flags & self.FLAG_BINARY else u(line)
line += b2s(new_data) line += new_data
self._realpos += len(new_data) self._realpos += len(new_data)
# find the newline # find the newline
pos = line.find('\n') pos = line.find(linefeed_byte)
if self._flags & self.FLAG_UNIVERSAL_NEWLINE: if self._flags & self.FLAG_UNIVERSAL_NEWLINE:
rpos = line.find('\r') rpos = line.find(cr_byte)
if (rpos >= 0) and ((rpos < pos) or (pos < 0)): if (rpos >= 0) and ((rpos < pos) or (pos < 0)):
pos = rpos pos = rpos
xpos = pos + 1 xpos = pos + 1
if (line[pos] == '\r') and (xpos < len(line)) and (line[xpos] == '\n'): if (line[pos] == cr_byte_value) and (xpos < len(line)) and (line[xpos] == linefeed_byte_value):
xpos += 1 xpos += 1
self._rbuffer = line[xpos:] self._rbuffer = line[xpos:]
lf = line[pos:xpos] lf = line[pos:xpos]
line = line[:pos] + '\n' line = line[:pos] + linefeed_byte
if (len(self._rbuffer) == 0) and (lf == '\r'): if (len(self._rbuffer) == 0) and (lf == cr_byte):
# we could read the line up to a '\r' and there could still be a # we could read the line up to a '\r' and there could still be a
# '\n' following that we read next time. note that and eat it. # '\n' following that we read next time. note that and eat it.
self._at_trailing_cr = True self._at_trailing_cr = True
else: else:
self._record_newline(lf) self._record_newline(lf)
self._pos += len(line) self._pos += len(line)
return line return line if self._flags & self.FLAG_BINARY else u(line)
def readlines(self, sizehint=None): def readlines(self, sizehint=None):
""" """
@ -323,6 +323,7 @@ class BufferedFile (object):
@param data: data to write. @param data: data to write.
@type data: str @type data: str
""" """
data = b(data)
if self._closed: if self._closed:
raise IOError('File is closed') raise IOError('File is closed')
if not (self._flags & self.FLAG_WRITE): if not (self._flags & self.FLAG_WRITE):
@ -333,12 +334,12 @@ class BufferedFile (object):
self._wbuffer.write(data) self._wbuffer.write(data)
if self._flags & self.FLAG_LINE_BUFFERED: if self._flags & self.FLAG_LINE_BUFFERED:
# only scan the new data for linefeed, to avoid wasting time. # only scan the new data for linefeed, to avoid wasting time.
last_newline_pos = data.rfind('\n') last_newline_pos = data.rfind(linefeed_byte)
if last_newline_pos >= 0: if last_newline_pos >= 0:
wbuf = self._wbuffer.getvalue() wbuf = self._wbuffer.getvalue()
last_newline_pos += len(wbuf) - len(data) last_newline_pos += len(wbuf) - len(data)
self._write_all(wbuf[:last_newline_pos + 1]) self._write_all(wbuf[:last_newline_pos + 1])
self._wbuffer = StringIO() self._wbuffer = BytesIO()
self._wbuffer.write(wbuf[last_newline_pos + 1:]) self._wbuffer.write(wbuf[last_newline_pos + 1:])
return return
# even if we're line buffering, if the buffer has grown past the # even if we're line buffering, if the buffer has grown past the
@ -471,7 +472,7 @@ class BufferedFile (object):
return return
if self.newlines is None: if self.newlines is None:
self.newlines = newline self.newlines = newline
elif (type(self.newlines) is str) and (self.newlines != newline): elif self.newlines != newline and isinstance(self.newlines, bytes_types):
self.newlines = (self.newlines, newline) self.newlines = (self.newlines, newline)
elif newline not in self.newlines: elif newline not in self.newlines:
self.newlines += (newline,) self.newlines += (newline,)

View File

@ -277,12 +277,12 @@ class Packetizer (object):
line, so it's okay to attempt large reads. line, so it's okay to attempt large reads.
""" """
buf = self.__remainder buf = self.__remainder
while not newline_byte in buf: while not linefeed_byte in buf:
buf += self._read_timeout(timeout) buf += self._read_timeout(timeout)
n = buf.index(newline_byte) n = buf.index(linefeed_byte)
self.__remainder = buf[n+1:] self.__remainder = buf[n+1:]
buf = buf[:n] buf = buf[:n]
if (len(buf) > 0) and (buf[-1] == cr_byte): if (len(buf) > 0) and (buf[-1] == cr_byte_value):
buf = buf[:-1] buf = buf[:-1]
return u(buf) return u(buf)

View File

@ -221,3 +221,5 @@ class SFTPAttributes (object):
return '%s 1 %-8d %-8d %8d %-12s %s' % (ks, uid, gid, self.st_size, datestr, filename) return '%s 1 %-8d %-8d %8d %-12s %s' % (ks, uid, gid, self.st_size, datestr, filename)
def asbytes(self):
return b(str(self))

View File

@ -497,7 +497,7 @@ class SFTPClient (BaseSFTP):
raise SFTPError('Realpath returned %d results' % count) raise SFTPError('Realpath returned %d results' % count)
return msg.get_text() return msg.get_text()
def chdir(self, path): def chdir(self, path=None):
""" """
Change the "current directory" of this SFTP session. Since SFTP Change the "current directory" of this SFTP session. Since SFTP
doesn't really have the concept of a current working directory, this doesn't really have the concept of a current working directory, this
@ -773,7 +773,7 @@ class SFTPClient (BaseSFTP):
path = b(path) path = b(path)
if self._cwd is None: if self._cwd is None:
return path return path
if (len(path) > 0) and (path[0] == '/'): if len(path) and path[0:1] == b_slash:
# absolute path # absolute path
return path return path
if self._cwd == b_slash: if self._cwd == b_slash:

View File

@ -218,7 +218,7 @@ class SFTPFile (BufferedFile):
self._realpos = self._pos self._realpos = self._pos
else: else:
self._realpos = self._pos = self._get_size() + offset self._realpos = self._pos = self._get_size() + offset
self._rbuffer = '' self._rbuffer = bytes()
def stat(self): def stat(self):
""" """

View File

@ -168,7 +168,11 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
if attr._flags & attr.FLAG_AMTIME: if attr._flags & attr.FLAG_AMTIME:
os.utime(filename, (attr.st_atime, attr.st_mtime)) os.utime(filename, (attr.st_atime, attr.st_mtime))
if attr._flags & attr.FLAG_SIZE: if attr._flags & attr.FLAG_SIZE:
open(filename, 'w+').truncate(attr.st_size) f = open(filename, 'w+')
try:
f.truncate(attr.st_size)
finally:
f.close()
set_file_attr = staticmethod(set_file_attr) set_file_attr = staticmethod(set_file_attr)
@ -234,7 +238,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
msg.add_int(len(flist)) msg.add_int(len(flist))
for attr in flist: for attr in flist:
msg.add_string(attr.filename) msg.add_string(attr.filename)
msg.add_string(str(attr)) msg.add_string(attr)
attr._pack(msg) attr._pack(msg)
self._send_packet(CMD_NAME, msg) self._send_packet(CMD_NAME, msg)
@ -282,7 +286,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
hash_obj = alg.new() hash_obj = alg.new()
while count < blocklen: while count < blocklen:
data = f.read(offset, chunklen) data = f.read(offset, chunklen)
if not type(data) is str: if not isinstance(data, bytes_types):
self._send_status(request_number, data, 'Unable to hash file') self._send_status(request_number, data, 'Unable to hash file')
return return
hash_obj.update(data) hash_obj.update(data)

View File

@ -204,8 +204,10 @@ class SSHClientTest (unittest.TestCase):
self.assert_(self.ts.is_active()) self.assert_(self.ts.is_active())
p = weakref.ref(self.tc._transport.packetizer) p = weakref.ref(self.tc._transport.packetizer)
self.assert_(p() is not None) self.assertTrue(p() is not None)
self.tc.close()
del self.tc del self.tc
# hrm, sometimes p isn't cleared right away. why is that? # hrm, sometimes p isn't cleared right away. why is that?
#st = time.time() #st = time.time()
#while (time.time() - st < 5.0) and (p() is not None): #while (time.time() - st < 5.0) and (p() is not None):
@ -216,5 +218,4 @@ class SSHClientTest (unittest.TestCase):
import gc import gc
gc.collect() gc.collect()
self.assert_(p() is None) self.assertTrue(p() is None)

View File

@ -22,6 +22,7 @@ Some unit tests for the BufferedFile abstraction.
import unittest import unittest
from paramiko.file import BufferedFile from paramiko.file import BufferedFile
from paramiko.common import *
class LoopbackFile (BufferedFile): class LoopbackFile (BufferedFile):
@ -31,7 +32,7 @@ class LoopbackFile (BufferedFile):
def __init__(self, mode='r', bufsize=-1): def __init__(self, mode='r', bufsize=-1):
BufferedFile.__init__(self) BufferedFile.__init__(self)
self._set_mode(mode, bufsize) self._set_mode(mode, bufsize)
self.buffer = '' self.buffer = bytes()
def _read(self, size): def _read(self, size):
if len(self.buffer) == 0: if len(self.buffer) == 0:
@ -83,9 +84,9 @@ class BufferedFileTest (unittest.TestCase):
self.assert_(False, 'no exception on readline of closed file') self.assert_(False, 'no exception on readline of closed file')
except IOError: except IOError:
pass pass
self.assert_('\n' in f.newlines) self.assert_(linefeed_byte in f.newlines)
self.assert_('\r\n' in f.newlines) self.assert_(crlf in f.newlines)
self.assert_('\r' not in f.newlines) self.assert_(cr_byte not in f.newlines)
def test_3_lf(self): def test_3_lf(self):
""" """
@ -97,7 +98,7 @@ class BufferedFileTest (unittest.TestCase):
f.write('\nSecond.\r\n') f.write('\nSecond.\r\n')
self.assertEqual(f.readline(), 'Second.\n') self.assertEqual(f.readline(), 'Second.\n')
f.close() f.close()
self.assertEqual(f.newlines, '\r\n') self.assertEqual(f.newlines, crlf)
def test_4_write(self): def test_4_write(self):
""" """

View File

@ -72,7 +72,8 @@ FOLDER = os.environ.get('TEST_FOLDER', 'temp-testing000')
sftp = None sftp = None
tc = None tc = None
g_big_file_test = True g_big_file_test = True
unicode_folder = u'\u00fcnic\u00f8de' unicode_folder = eval(compile(r"'\u00fcnic\u00f8de'" if PY3 else r"u'\u00fcnic\u00f8de'", 'test_sftp.py', 'eval'))
utf8_folder = eval(compile(r"b'/\xc3\xbcnic\xc3\xb8\x64\x65'" if PY3 else r"'/\xc3\xbcnic\xc3\xb8\x64\x65'", 'test_sftp.py', 'eval'))
def get_sftp(): def get_sftp():
global sftp global sftp
@ -151,6 +152,7 @@ class SFTPTest (unittest.TestCase):
pass pass
def tearDown(self): def tearDown(self):
#sftp.chdir()
sftp.rmdir(FOLDER) sftp.rmdir(FOLDER)
def test_1_file(self): def test_1_file(self):
@ -579,7 +581,7 @@ class SFTPTest (unittest.TestCase):
saved_progress.append((x, y)) saved_progress.append((x, y))
sftp.put(localname, FOLDER + '/bunny.txt', progress_callback) sftp.put(localname, FOLDER + '/bunny.txt', progress_callback)
f = sftp.open(FOLDER + '/bunny.txt', 'r') f = sftp.open(FOLDER + '/bunny.txt', 'rb')
self.assertEqual(text, f.read(128)) self.assertEqual(text, f.read(128))
f.close() f.close()
self.assertEqual((41, 41), saved_progress[-1]) self.assertEqual((41, 41), saved_progress[-1])
@ -647,11 +649,11 @@ class SFTPTest (unittest.TestCase):
try: try:
sftp.rename(FOLDER + '/something', FOLDER + '/' + unicode_folder) sftp.rename(FOLDER + '/something', FOLDER + '/' + unicode_folder)
sftp.open(FOLDER + '/\xc3\xbcnic\xc3\xb8\x64\x65', 'r') sftp.open(b(FOLDER) + utf8_folder, 'r')
except Exception: except Exception:
e = sys.exc_info()[1] e = sys.exc_info()[1]
self.fail('exception ' + str(e)) self.fail('exception ' + str(e))
sftp.unlink(FOLDER + '/\xc3\xbcnic\xc3\xb8\x64\x65') sftp.unlink(b(FOLDER) + utf8_folder)
def test_L_utf8_chdir(self): def test_L_utf8_chdir(self):
sftp.mkdir(FOLDER + '/' + unicode_folder) sftp.mkdir(FOLDER + '/' + unicode_folder)
@ -662,7 +664,7 @@ class SFTPTest (unittest.TestCase):
f.close() f.close()
sftp.unlink('something') sftp.unlink('something')
finally: finally:
sftp.chdir(None) sftp.chdir()
sftp.rmdir(FOLDER + '/' + unicode_folder) sftp.rmdir(FOLDER + '/' + unicode_folder)
def test_M_bad_readv(self): def test_M_bad_readv(self):
@ -691,8 +693,8 @@ class SFTPTest (unittest.TestCase):
fd, localname = mkstemp() fd, localname = mkstemp()
os.close(fd) os.close(fd)
text = b('All I wanted was a plastic bunny rabbit.\n') text = 'All I wanted was a plastic bunny rabbit.\n'
f = open(localname, 'wb') f = open(localname, 'w')
f.write(text) f.write(text)
f.close() f.close()
saved_progress = [] saved_progress = []

View File

@ -92,7 +92,7 @@ class BigSFTPTest (unittest.TestCase):
write a 1MB file with no buffering. write a 1MB file with no buffering.
""" """
sftp = get_sftp() sftp = get_sftp()
kblob = (1024 * b('x')) kblob = (1024 * 'x')
start = time.time() start = time.time()
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w') f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
@ -127,7 +127,7 @@ class BigSFTPTest (unittest.TestCase):
kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
start = time.time() start = time.time()
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w') f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
f.set_pipelined(True) f.set_pipelined(True)
for n in range(1024): for n in range(1024):
f.write(kblob) f.write(kblob)
@ -141,7 +141,7 @@ class BigSFTPTest (unittest.TestCase):
sys.stderr.write('%ds ' % round(end - start)) sys.stderr.write('%ds ' % round(end - start))
start = time.time() start = time.time()
f = sftp.open('%s/hongry.txt' % FOLDER, 'r') f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
f.prefetch() f.prefetch()
# read on odd boundaries to make sure the bytes aren't getting scrambled # read on odd boundaries to make sure the bytes aren't getting scrambled
@ -167,7 +167,7 @@ class BigSFTPTest (unittest.TestCase):
sftp = get_sftp() sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w') f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
f.set_pipelined(True) f.set_pipelined(True)
for n in range(1024): for n in range(1024):
f.write(kblob) f.write(kblob)
@ -182,7 +182,7 @@ class BigSFTPTest (unittest.TestCase):
k2blob = kblob + kblob k2blob = kblob + kblob
chunk = 793 chunk = 793
for i in range(10): for i in range(10):
f = sftp.open('%s/hongry.txt' % FOLDER, 'r') f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
f.prefetch() f.prefetch()
base_offset = (512 * 1024) + 17 * random.randint(1000, 2000) base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
offsets = [base_offset + j * chunk for j in range(100)] offsets = [base_offset + j * chunk for j in range(100)]
@ -205,7 +205,7 @@ class BigSFTPTest (unittest.TestCase):
sftp = get_sftp() sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w') f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
f.set_pipelined(True) f.set_pipelined(True)
for n in range(1024): for n in range(1024):
f.write(kblob) f.write(kblob)
@ -220,7 +220,7 @@ class BigSFTPTest (unittest.TestCase):
k2blob = kblob + kblob k2blob = kblob + kblob
chunk = 793 chunk = 793
for i in range(10): for i in range(10):
f = sftp.open('%s/hongry.txt' % FOLDER, 'r') f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
base_offset = (512 * 1024) + 17 * random.randint(1000, 2000) base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
# make a bunch of offsets and put them in random order # make a bunch of offsets and put them in random order
offsets = [base_offset + j * chunk for j in range(100)] offsets = [base_offset + j * chunk for j in range(100)]
@ -246,7 +246,7 @@ class BigSFTPTest (unittest.TestCase):
without using it, to verify that paramiko doesn't get confused. without using it, to verify that paramiko doesn't get confused.
""" """
sftp = get_sftp() sftp = get_sftp()
kblob = (1024 * b('x')) kblob = (1024 * 'x')
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w') f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
f.set_pipelined(True) f.set_pipelined(True)
@ -281,7 +281,7 @@ class BigSFTPTest (unittest.TestCase):
sftp = get_sftp() sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w') f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
f.set_pipelined(True) f.set_pipelined(True)
for n in range(1024): for n in range(1024):
f.write(kblob) f.write(kblob)
@ -292,7 +292,7 @@ class BigSFTPTest (unittest.TestCase):
self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
f = sftp.open('%s/hongry.txt' % FOLDER, 'r') f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
f.prefetch() f.prefetch()
data = f.read(1024) data = f.read(1024)
self.assertEqual(data, kblob) self.assertEqual(data, kblob)
@ -320,7 +320,7 @@ class BigSFTPTest (unittest.TestCase):
sftp = get_sftp() sftp = get_sftp()
kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w') f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
f.set_pipelined(True) f.set_pipelined(True)
for n in range(1024): for n in range(1024):
f.write(kblob) f.write(kblob)
@ -331,7 +331,7 @@ class BigSFTPTest (unittest.TestCase):
self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
f = sftp.open('%s/hongry.txt' % FOLDER, 'r') f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
data = list(f.readv([(23 * 1024, 128 * 1024)])) data = list(f.readv([(23 * 1024, 128 * 1024)]))
self.assertEqual(1, len(data)) self.assertEqual(1, len(data))
data = data[0] data = data[0]
@ -347,7 +347,7 @@ class BigSFTPTest (unittest.TestCase):
write a 1MB file, with no linefeeds, and a big buffer. write a 1MB file, with no linefeeds, and a big buffer.
""" """
sftp = get_sftp() sftp = get_sftp()
mblob = (1024 * 1024 * b('x')) mblob = (1024 * 1024 * 'x')
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) f = sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024)
f.write(mblob) f.write(mblob)
@ -364,7 +364,7 @@ class BigSFTPTest (unittest.TestCase):
sftp = get_sftp() sftp = get_sftp()
t = sftp.sock.get_transport() t = sftp.sock.get_transport()
t.packetizer.REKEY_BYTES = 512 * 1024 t.packetizer.REKEY_BYTES = 512 * 1024
k32blob = (32 * 1024 * b('x')) k32blob = (32 * 1024 * 'x')
try: try:
f = sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) f = sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024)
for i in range(32): for i in range(32):