Make sftp.open handle binary and text, more type conversion
This commit is contained in:
parent
7decda3297
commit
45e65b6e1e
|
@ -378,8 +378,8 @@ class SSHClient (object):
|
|||
chan.settimeout(timeout)
|
||||
chan.exec_command(command)
|
||||
stdin = chan.makefile('wb', bufsize)
|
||||
stdout = chan.makefile('rb', bufsize)
|
||||
stderr = chan.makefile_stderr('rb', bufsize)
|
||||
stdout = chan.makefile('r', bufsize)
|
||||
stderr = chan.makefile_stderr('r', bufsize)
|
||||
return stdin, stdout, stderr
|
||||
|
||||
def invoke_shell(self, term='vt100', width=80, height=24, width_pixels=0,
|
||||
|
|
|
@ -127,12 +127,16 @@ zero_byte = byte_chr(0)
|
|||
one_byte = byte_chr(1)
|
||||
four_byte = byte_chr(4)
|
||||
max_byte = byte_chr(0xff)
|
||||
newline_byte = byte_chr(10)
|
||||
cr_byte = byte_chr(13)
|
||||
linefeed_byte = byte_chr(10)
|
||||
crlf = cr_byte + linefeed_byte
|
||||
|
||||
if PY3:
|
||||
cr_byte = 13
|
||||
cr_byte_value = 13
|
||||
linefeed_byte_value = 10
|
||||
else:
|
||||
cr_byte = '\r'
|
||||
cr_byte_value = cr_byte
|
||||
linefeed_byte_value = linefeed_byte
|
||||
|
||||
|
||||
def asbytes(s):
|
||||
|
|
|
@ -47,8 +47,8 @@ class BufferedFile (object):
|
|||
self.newlines = None
|
||||
self._flags = 0
|
||||
self._bufsize = self._DEFAULT_BUFSIZE
|
||||
self._wbuffer = StringIO()
|
||||
self._rbuffer = ''
|
||||
self._wbuffer = BytesIO()
|
||||
self._rbuffer = bytes()
|
||||
self._at_trailing_cr = False
|
||||
self._closed = False
|
||||
# pos - position within the file, according to the user
|
||||
|
@ -89,7 +89,7 @@ class BufferedFile (object):
|
|||
buffering is not turned on.
|
||||
"""
|
||||
self._write_all(self._wbuffer.getvalue())
|
||||
self._wbuffer = StringIO()
|
||||
self._wbuffer = BytesIO()
|
||||
return
|
||||
|
||||
if PY3:
|
||||
|
@ -144,7 +144,7 @@ class BufferedFile (object):
|
|||
if (size is None) or (size < 0):
|
||||
# go for broke
|
||||
result = self._rbuffer
|
||||
self._rbuffer = ''
|
||||
self._rbuffer = bytes()
|
||||
self._pos += len(result)
|
||||
while True:
|
||||
try:
|
||||
|
@ -156,12 +156,12 @@ class BufferedFile (object):
|
|||
result += new_data
|
||||
self._realpos += len(new_data)
|
||||
self._pos += len(new_data)
|
||||
return result
|
||||
return result if self._flags & self.FLAG_BINARY else u(result)
|
||||
if size <= len(self._rbuffer):
|
||||
result = self._rbuffer[:size]
|
||||
self._rbuffer = self._rbuffer[size:]
|
||||
self._pos += len(result)
|
||||
return result
|
||||
return result if self._flags & self.FLAG_BINARY else u(result)
|
||||
while len(self._rbuffer) < size:
|
||||
read_size = size - len(self._rbuffer)
|
||||
if self._flags & self.FLAG_BUFFERED:
|
||||
|
@ -177,7 +177,7 @@ class BufferedFile (object):
|
|||
result = self._rbuffer[:size]
|
||||
self._rbuffer = self._rbuffer[size:]
|
||||
self._pos += len(result)
|
||||
return result
|
||||
return result if self._flags & self.FLAG_BINARY else u(result)
|
||||
|
||||
def readline(self, size=None):
|
||||
"""
|
||||
|
@ -207,11 +207,11 @@ class BufferedFile (object):
|
|||
if self._at_trailing_cr and (self._flags & self.FLAG_UNIVERSAL_NEWLINE) and (len(line) > 0):
|
||||
# edge case: the newline may be '\r\n' and we may have read
|
||||
# only the first '\r' last time.
|
||||
if line[0] == '\n':
|
||||
if line[0] == linefeed_byte_value:
|
||||
line = line[1:]
|
||||
self._record_newline('\r\n')
|
||||
self._record_newline(crlf)
|
||||
else:
|
||||
self._record_newline('\r')
|
||||
self._record_newline(cr_byte)
|
||||
self._at_trailing_cr = False
|
||||
# check size before looking for a linefeed, in case we already have
|
||||
# enough.
|
||||
|
@ -221,42 +221,42 @@ class BufferedFile (object):
|
|||
self._rbuffer = line[size:]
|
||||
line = line[:size]
|
||||
self._pos += len(line)
|
||||
return line
|
||||
return line if self._flags & self.FLAG_BINARY else u(line)
|
||||
n = size - len(line)
|
||||
else:
|
||||
n = self._bufsize
|
||||
if ('\n' in line) or ((self._flags & self.FLAG_UNIVERSAL_NEWLINE) and ('\r' in line)):
|
||||
if (linefeed_byte in line) or ((self._flags & self.FLAG_UNIVERSAL_NEWLINE) and (cr_byte in line)):
|
||||
break
|
||||
try:
|
||||
new_data = self._read(n)
|
||||
except EOFError:
|
||||
new_data = None
|
||||
if (new_data is None) or (len(new_data) == 0):
|
||||
self._rbuffer = ''
|
||||
self._rbuffer = bytes()
|
||||
self._pos += len(line)
|
||||
return line
|
||||
line += b2s(new_data)
|
||||
return line if self._flags & self.FLAG_BINARY else u(line)
|
||||
line += new_data
|
||||
self._realpos += len(new_data)
|
||||
# find the newline
|
||||
pos = line.find('\n')
|
||||
pos = line.find(linefeed_byte)
|
||||
if self._flags & self.FLAG_UNIVERSAL_NEWLINE:
|
||||
rpos = line.find('\r')
|
||||
rpos = line.find(cr_byte)
|
||||
if (rpos >= 0) and ((rpos < pos) or (pos < 0)):
|
||||
pos = rpos
|
||||
xpos = pos + 1
|
||||
if (line[pos] == '\r') and (xpos < len(line)) and (line[xpos] == '\n'):
|
||||
if (line[pos] == cr_byte_value) and (xpos < len(line)) and (line[xpos] == linefeed_byte_value):
|
||||
xpos += 1
|
||||
self._rbuffer = line[xpos:]
|
||||
lf = line[pos:xpos]
|
||||
line = line[:pos] + '\n'
|
||||
if (len(self._rbuffer) == 0) and (lf == '\r'):
|
||||
line = line[:pos] + linefeed_byte
|
||||
if (len(self._rbuffer) == 0) and (lf == cr_byte):
|
||||
# we could read the line up to a '\r' and there could still be a
|
||||
# '\n' following that we read next time. note that and eat it.
|
||||
self._at_trailing_cr = True
|
||||
else:
|
||||
self._record_newline(lf)
|
||||
self._pos += len(line)
|
||||
return line
|
||||
return line if self._flags & self.FLAG_BINARY else u(line)
|
||||
|
||||
def readlines(self, sizehint=None):
|
||||
"""
|
||||
|
@ -323,6 +323,7 @@ class BufferedFile (object):
|
|||
@param data: data to write.
|
||||
@type data: str
|
||||
"""
|
||||
data = b(data)
|
||||
if self._closed:
|
||||
raise IOError('File is closed')
|
||||
if not (self._flags & self.FLAG_WRITE):
|
||||
|
@ -333,12 +334,12 @@ class BufferedFile (object):
|
|||
self._wbuffer.write(data)
|
||||
if self._flags & self.FLAG_LINE_BUFFERED:
|
||||
# only scan the new data for linefeed, to avoid wasting time.
|
||||
last_newline_pos = data.rfind('\n')
|
||||
last_newline_pos = data.rfind(linefeed_byte)
|
||||
if last_newline_pos >= 0:
|
||||
wbuf = self._wbuffer.getvalue()
|
||||
last_newline_pos += len(wbuf) - len(data)
|
||||
self._write_all(wbuf[:last_newline_pos + 1])
|
||||
self._wbuffer = StringIO()
|
||||
self._wbuffer = BytesIO()
|
||||
self._wbuffer.write(wbuf[last_newline_pos + 1:])
|
||||
return
|
||||
# even if we're line buffering, if the buffer has grown past the
|
||||
|
@ -471,7 +472,7 @@ class BufferedFile (object):
|
|||
return
|
||||
if self.newlines is None:
|
||||
self.newlines = newline
|
||||
elif (type(self.newlines) is str) and (self.newlines != newline):
|
||||
elif self.newlines != newline and isinstance(self.newlines, bytes_types):
|
||||
self.newlines = (self.newlines, newline)
|
||||
elif newline not in self.newlines:
|
||||
self.newlines += (newline,)
|
||||
|
|
|
@ -277,12 +277,12 @@ class Packetizer (object):
|
|||
line, so it's okay to attempt large reads.
|
||||
"""
|
||||
buf = self.__remainder
|
||||
while not newline_byte in buf:
|
||||
while not linefeed_byte in buf:
|
||||
buf += self._read_timeout(timeout)
|
||||
n = buf.index(newline_byte)
|
||||
n = buf.index(linefeed_byte)
|
||||
self.__remainder = buf[n+1:]
|
||||
buf = buf[:n]
|
||||
if (len(buf) > 0) and (buf[-1] == cr_byte):
|
||||
if (len(buf) > 0) and (buf[-1] == cr_byte_value):
|
||||
buf = buf[:-1]
|
||||
return u(buf)
|
||||
|
||||
|
|
|
@ -221,3 +221,5 @@ class SFTPAttributes (object):
|
|||
|
||||
return '%s 1 %-8d %-8d %8d %-12s %s' % (ks, uid, gid, self.st_size, datestr, filename)
|
||||
|
||||
def asbytes(self):
|
||||
return b(str(self))
|
||||
|
|
|
@ -497,7 +497,7 @@ class SFTPClient (BaseSFTP):
|
|||
raise SFTPError('Realpath returned %d results' % count)
|
||||
return msg.get_text()
|
||||
|
||||
def chdir(self, path):
|
||||
def chdir(self, path=None):
|
||||
"""
|
||||
Change the "current directory" of this SFTP session. Since SFTP
|
||||
doesn't really have the concept of a current working directory, this
|
||||
|
@ -773,7 +773,7 @@ class SFTPClient (BaseSFTP):
|
|||
path = b(path)
|
||||
if self._cwd is None:
|
||||
return path
|
||||
if (len(path) > 0) and (path[0] == '/'):
|
||||
if len(path) and path[0:1] == b_slash:
|
||||
# absolute path
|
||||
return path
|
||||
if self._cwd == b_slash:
|
||||
|
|
|
@ -218,7 +218,7 @@ class SFTPFile (BufferedFile):
|
|||
self._realpos = self._pos
|
||||
else:
|
||||
self._realpos = self._pos = self._get_size() + offset
|
||||
self._rbuffer = ''
|
||||
self._rbuffer = bytes()
|
||||
|
||||
def stat(self):
|
||||
"""
|
||||
|
|
|
@ -168,7 +168,11 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
|||
if attr._flags & attr.FLAG_AMTIME:
|
||||
os.utime(filename, (attr.st_atime, attr.st_mtime))
|
||||
if attr._flags & attr.FLAG_SIZE:
|
||||
open(filename, 'w+').truncate(attr.st_size)
|
||||
f = open(filename, 'w+')
|
||||
try:
|
||||
f.truncate(attr.st_size)
|
||||
finally:
|
||||
f.close()
|
||||
set_file_attr = staticmethod(set_file_attr)
|
||||
|
||||
|
||||
|
@ -234,7 +238,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
|||
msg.add_int(len(flist))
|
||||
for attr in flist:
|
||||
msg.add_string(attr.filename)
|
||||
msg.add_string(str(attr))
|
||||
msg.add_string(attr)
|
||||
attr._pack(msg)
|
||||
self._send_packet(CMD_NAME, msg)
|
||||
|
||||
|
@ -282,7 +286,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
|||
hash_obj = alg.new()
|
||||
while count < blocklen:
|
||||
data = f.read(offset, chunklen)
|
||||
if not type(data) is str:
|
||||
if not isinstance(data, bytes_types):
|
||||
self._send_status(request_number, data, 'Unable to hash file')
|
||||
return
|
||||
hash_obj.update(data)
|
||||
|
|
|
@ -204,8 +204,10 @@ class SSHClientTest (unittest.TestCase):
|
|||
self.assert_(self.ts.is_active())
|
||||
|
||||
p = weakref.ref(self.tc._transport.packetizer)
|
||||
self.assert_(p() is not None)
|
||||
self.assertTrue(p() is not None)
|
||||
self.tc.close()
|
||||
del self.tc
|
||||
|
||||
# hrm, sometimes p isn't cleared right away. why is that?
|
||||
#st = time.time()
|
||||
#while (time.time() - st < 5.0) and (p() is not None):
|
||||
|
@ -216,5 +218,4 @@ class SSHClientTest (unittest.TestCase):
|
|||
import gc
|
||||
gc.collect()
|
||||
|
||||
self.assert_(p() is None)
|
||||
|
||||
self.assertTrue(p() is None)
|
||||
|
|
|
@ -22,6 +22,7 @@ Some unit tests for the BufferedFile abstraction.
|
|||
|
||||
import unittest
|
||||
from paramiko.file import BufferedFile
|
||||
from paramiko.common import *
|
||||
|
||||
|
||||
class LoopbackFile (BufferedFile):
|
||||
|
@ -31,7 +32,7 @@ class LoopbackFile (BufferedFile):
|
|||
def __init__(self, mode='r', bufsize=-1):
|
||||
BufferedFile.__init__(self)
|
||||
self._set_mode(mode, bufsize)
|
||||
self.buffer = ''
|
||||
self.buffer = bytes()
|
||||
|
||||
def _read(self, size):
|
||||
if len(self.buffer) == 0:
|
||||
|
@ -83,9 +84,9 @@ class BufferedFileTest (unittest.TestCase):
|
|||
self.assert_(False, 'no exception on readline of closed file')
|
||||
except IOError:
|
||||
pass
|
||||
self.assert_('\n' in f.newlines)
|
||||
self.assert_('\r\n' in f.newlines)
|
||||
self.assert_('\r' not in f.newlines)
|
||||
self.assert_(linefeed_byte in f.newlines)
|
||||
self.assert_(crlf in f.newlines)
|
||||
self.assert_(cr_byte not in f.newlines)
|
||||
|
||||
def test_3_lf(self):
|
||||
"""
|
||||
|
@ -97,7 +98,7 @@ class BufferedFileTest (unittest.TestCase):
|
|||
f.write('\nSecond.\r\n')
|
||||
self.assertEqual(f.readline(), 'Second.\n')
|
||||
f.close()
|
||||
self.assertEqual(f.newlines, '\r\n')
|
||||
self.assertEqual(f.newlines, crlf)
|
||||
|
||||
def test_4_write(self):
|
||||
"""
|
||||
|
|
|
@ -72,7 +72,8 @@ FOLDER = os.environ.get('TEST_FOLDER', 'temp-testing000')
|
|||
sftp = None
|
||||
tc = None
|
||||
g_big_file_test = True
|
||||
unicode_folder = u'\u00fcnic\u00f8de'
|
||||
unicode_folder = eval(compile(r"'\u00fcnic\u00f8de'" if PY3 else r"u'\u00fcnic\u00f8de'", 'test_sftp.py', 'eval'))
|
||||
utf8_folder = eval(compile(r"b'/\xc3\xbcnic\xc3\xb8\x64\x65'" if PY3 else r"'/\xc3\xbcnic\xc3\xb8\x64\x65'", 'test_sftp.py', 'eval'))
|
||||
|
||||
def get_sftp():
|
||||
global sftp
|
||||
|
@ -151,6 +152,7 @@ class SFTPTest (unittest.TestCase):
|
|||
pass
|
||||
|
||||
def tearDown(self):
|
||||
#sftp.chdir()
|
||||
sftp.rmdir(FOLDER)
|
||||
|
||||
def test_1_file(self):
|
||||
|
@ -579,7 +581,7 @@ class SFTPTest (unittest.TestCase):
|
|||
saved_progress.append((x, y))
|
||||
sftp.put(localname, FOLDER + '/bunny.txt', progress_callback)
|
||||
|
||||
f = sftp.open(FOLDER + '/bunny.txt', 'r')
|
||||
f = sftp.open(FOLDER + '/bunny.txt', 'rb')
|
||||
self.assertEqual(text, f.read(128))
|
||||
f.close()
|
||||
self.assertEqual((41, 41), saved_progress[-1])
|
||||
|
@ -647,11 +649,11 @@ class SFTPTest (unittest.TestCase):
|
|||
|
||||
try:
|
||||
sftp.rename(FOLDER + '/something', FOLDER + '/' + unicode_folder)
|
||||
sftp.open(FOLDER + '/\xc3\xbcnic\xc3\xb8\x64\x65', 'r')
|
||||
sftp.open(b(FOLDER) + utf8_folder, 'r')
|
||||
except Exception:
|
||||
e = sys.exc_info()[1]
|
||||
self.fail('exception ' + str(e))
|
||||
sftp.unlink(FOLDER + '/\xc3\xbcnic\xc3\xb8\x64\x65')
|
||||
sftp.unlink(b(FOLDER) + utf8_folder)
|
||||
|
||||
def test_L_utf8_chdir(self):
|
||||
sftp.mkdir(FOLDER + '/' + unicode_folder)
|
||||
|
@ -662,7 +664,7 @@ class SFTPTest (unittest.TestCase):
|
|||
f.close()
|
||||
sftp.unlink('something')
|
||||
finally:
|
||||
sftp.chdir(None)
|
||||
sftp.chdir()
|
||||
sftp.rmdir(FOLDER + '/' + unicode_folder)
|
||||
|
||||
def test_M_bad_readv(self):
|
||||
|
@ -691,8 +693,8 @@ class SFTPTest (unittest.TestCase):
|
|||
|
||||
fd, localname = mkstemp()
|
||||
os.close(fd)
|
||||
text = b('All I wanted was a plastic bunny rabbit.\n')
|
||||
f = open(localname, 'wb')
|
||||
text = 'All I wanted was a plastic bunny rabbit.\n'
|
||||
f = open(localname, 'w')
|
||||
f.write(text)
|
||||
f.close()
|
||||
saved_progress = []
|
||||
|
|
|
@ -92,7 +92,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
write a 1MB file with no buffering.
|
||||
"""
|
||||
sftp = get_sftp()
|
||||
kblob = (1024 * b('x'))
|
||||
kblob = (1024 * 'x')
|
||||
start = time.time()
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
|
||||
|
@ -127,7 +127,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
|
||||
start = time.time()
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
|
||||
f.set_pipelined(True)
|
||||
for n in range(1024):
|
||||
f.write(kblob)
|
||||
|
@ -141,7 +141,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
sys.stderr.write('%ds ' % round(end - start))
|
||||
|
||||
start = time.time()
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'r')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
|
||||
f.prefetch()
|
||||
|
||||
# read on odd boundaries to make sure the bytes aren't getting scrambled
|
||||
|
@ -167,7 +167,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
sftp = get_sftp()
|
||||
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
|
||||
f.set_pipelined(True)
|
||||
for n in range(1024):
|
||||
f.write(kblob)
|
||||
|
@ -182,7 +182,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
k2blob = kblob + kblob
|
||||
chunk = 793
|
||||
for i in range(10):
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'r')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
|
||||
f.prefetch()
|
||||
base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
|
||||
offsets = [base_offset + j * chunk for j in range(100)]
|
||||
|
@ -205,7 +205,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
sftp = get_sftp()
|
||||
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
|
||||
f.set_pipelined(True)
|
||||
for n in range(1024):
|
||||
f.write(kblob)
|
||||
|
@ -220,7 +220,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
k2blob = kblob + kblob
|
||||
chunk = 793
|
||||
for i in range(10):
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'r')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
|
||||
base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
|
||||
# make a bunch of offsets and put them in random order
|
||||
offsets = [base_offset + j * chunk for j in range(100)]
|
||||
|
@ -246,7 +246,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
without using it, to verify that paramiko doesn't get confused.
|
||||
"""
|
||||
sftp = get_sftp()
|
||||
kblob = (1024 * b('x'))
|
||||
kblob = (1024 * 'x')
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
|
||||
f.set_pipelined(True)
|
||||
|
@ -281,7 +281,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
sftp = get_sftp()
|
||||
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
|
||||
f.set_pipelined(True)
|
||||
for n in range(1024):
|
||||
f.write(kblob)
|
||||
|
@ -292,7 +292,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
|
||||
self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
|
||||
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'r')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
|
||||
f.prefetch()
|
||||
data = f.read(1024)
|
||||
self.assertEqual(data, kblob)
|
||||
|
@ -320,7 +320,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
sftp = get_sftp()
|
||||
kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'wb')
|
||||
f.set_pipelined(True)
|
||||
for n in range(1024):
|
||||
f.write(kblob)
|
||||
|
@ -331,7 +331,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
|
||||
self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
|
||||
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'r')
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'rb')
|
||||
data = list(f.readv([(23 * 1024, 128 * 1024)]))
|
||||
self.assertEqual(1, len(data))
|
||||
data = data[0]
|
||||
|
@ -347,7 +347,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
write a 1MB file, with no linefeeds, and a big buffer.
|
||||
"""
|
||||
sftp = get_sftp()
|
||||
mblob = (1024 * 1024 * b('x'))
|
||||
mblob = (1024 * 1024 * 'x')
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024)
|
||||
f.write(mblob)
|
||||
|
@ -364,7 +364,7 @@ class BigSFTPTest (unittest.TestCase):
|
|||
sftp = get_sftp()
|
||||
t = sftp.sock.get_transport()
|
||||
t.packetizer.REKEY_BYTES = 512 * 1024
|
||||
k32blob = (32 * 1024 * b('x'))
|
||||
k32blob = (32 * 1024 * 'x')
|
||||
try:
|
||||
f = sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024)
|
||||
for i in range(32):
|
||||
|
|
Loading…
Reference in New Issue