Merge branch 'master' into ecdsa-deterministic
Conflicts: paramiko/ecdsakey.py
This commit is contained in:
commit
b0876fa013
|
@ -5,5 +5,5 @@ tox>=1.4,<1.5
|
||||||
invoke>=0.7.0
|
invoke>=0.7.0
|
||||||
invocations>=0.5.0
|
invocations>=0.5.0
|
||||||
sphinx>=1.1.3
|
sphinx>=1.1.3
|
||||||
alabaster>=0.3.1
|
alabaster>=0.4.0
|
||||||
releases>=0.5.2
|
releases>=0.5.2
|
||||||
|
|
|
@ -20,8 +20,9 @@
|
||||||
DSS keys.
|
DSS keys.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from hashlib import sha1
|
||||||
|
|
||||||
from Crypto.PublicKey import DSA
|
from Crypto.PublicKey import DSA
|
||||||
from Crypto.Hash import SHA
|
|
||||||
|
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
from paramiko.common import zero_byte, rng
|
from paramiko.common import zero_byte, rng
|
||||||
|
@ -96,7 +97,7 @@ class DSSKey (PKey):
|
||||||
return self.x is not None
|
return self.x is not None
|
||||||
|
|
||||||
def sign_ssh_data(self, rng, data):
|
def sign_ssh_data(self, rng, data):
|
||||||
digest = SHA.new(data).digest()
|
digest = sha1(data).digest()
|
||||||
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q), long(self.x)))
|
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q), long(self.x)))
|
||||||
# generate a suitable k
|
# generate a suitable k
|
||||||
qsize = len(util.deflate_long(self.q, 0))
|
qsize = len(util.deflate_long(self.q, 0))
|
||||||
|
@ -130,7 +131,7 @@ class DSSKey (PKey):
|
||||||
# pull out (r, s) which are NOT encoded as mpints
|
# pull out (r, s) which are NOT encoded as mpints
|
||||||
sigR = util.inflate_long(sig[:20], 1)
|
sigR = util.inflate_long(sig[:20], 1)
|
||||||
sigS = util.inflate_long(sig[20:], 1)
|
sigS = util.inflate_long(sig[20:], 1)
|
||||||
sigM = util.inflate_long(SHA.new(data).digest(), 1)
|
sigM = util.inflate_long(sha1(data).digest(), 1)
|
||||||
|
|
||||||
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q)))
|
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q)))
|
||||||
return dss.verify(sigM, (sigR, sigS))
|
return dss.verify(sigM, (sigR, sigS))
|
||||||
|
|
|
@ -21,13 +21,12 @@ L{ECDSAKey}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
import hashlib
|
from hashlib import sha256
|
||||||
|
|
||||||
from ecdsa import SigningKey, VerifyingKey, der, curves
|
from ecdsa import SigningKey, VerifyingKey, der, curves
|
||||||
from Crypto.Hash import SHA256
|
|
||||||
from ecdsa.test_pyecdsa import ECDSA
|
from ecdsa.test_pyecdsa import ECDSA
|
||||||
from paramiko.common import four_byte, one_byte
|
|
||||||
|
|
||||||
|
from paramiko.common import four_byte, one_byte
|
||||||
from paramiko.message import Message
|
from paramiko.message import Message
|
||||||
from paramiko.pkey import PKey
|
from paramiko.pkey import PKey
|
||||||
from paramiko.py3compat import byte_chr, u
|
from paramiko.py3compat import byte_chr, u
|
||||||
|
@ -101,7 +100,7 @@ class ECDSAKey (PKey):
|
||||||
|
|
||||||
def sign_ssh_data(self, rpool, data):
|
def sign_ssh_data(self, rpool, data):
|
||||||
sig = self.signing_key.sign_deterministic(
|
sig = self.signing_key.sign_deterministic(
|
||||||
data, sigencode=self._sigencode, hashfunc=hashlib.sha256)
|
data, sigencode=self._sigencode, hashfunc=sha256)
|
||||||
m = Message()
|
m = Message()
|
||||||
m.add_string('ecdsa-sha2-nistp256')
|
m.add_string('ecdsa-sha2-nistp256')
|
||||||
m.add_string(sig)
|
m.add_string(sig)
|
||||||
|
@ -114,7 +113,7 @@ class ECDSAKey (PKey):
|
||||||
|
|
||||||
# verify the signature by SHA'ing the data and encrypting it
|
# verify the signature by SHA'ing the data and encrypting it
|
||||||
# using the public key.
|
# using the public key.
|
||||||
hash_obj = SHA256.new(data).digest()
|
hash_obj = sha256(data).digest()
|
||||||
return self.verifying_key.verify_digest(sig, hash_obj,
|
return self.verifying_key.verify_digest(sig, hash_obj,
|
||||||
sigdecode=self._sigdecode)
|
sigdecode=self._sigdecode)
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,9 @@
|
||||||
|
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
from Crypto.Hash import SHA, HMAC
|
from hashlib import sha1
|
||||||
|
from hmac import HMAC
|
||||||
|
|
||||||
from paramiko.common import rng
|
from paramiko.common import rng
|
||||||
from paramiko.py3compat import b, u, encodebytes, decodebytes
|
from paramiko.py3compat import b, u, encodebytes, decodebytes
|
||||||
|
|
||||||
|
@ -262,13 +264,13 @@ class HostKeys (MutableMapping):
|
||||||
:return: the hashed hostname as a `str`
|
:return: the hashed hostname as a `str`
|
||||||
"""
|
"""
|
||||||
if salt is None:
|
if salt is None:
|
||||||
salt = rng.read(SHA.digest_size)
|
salt = rng.read(sha1().digest_size)
|
||||||
else:
|
else:
|
||||||
if salt.startswith('|1|'):
|
if salt.startswith('|1|'):
|
||||||
salt = salt.split('|')[2]
|
salt = salt.split('|')[2]
|
||||||
salt = decodebytes(b(salt))
|
salt = decodebytes(b(salt))
|
||||||
assert len(salt) == SHA.digest_size
|
assert len(salt) == sha1().digest_size
|
||||||
hmac = HMAC.HMAC(salt, b(hostname), SHA).digest()
|
hmac = HMAC(salt, b(hostname), sha1).digest()
|
||||||
hostkey = '|1|%s|%s' % (u(encodebytes(salt)), u(encodebytes(hmac)))
|
hostkey = '|1|%s|%s' % (u(encodebytes(salt)), u(encodebytes(hmac)))
|
||||||
return hostkey.replace('\n', '')
|
return hostkey.replace('\n', '')
|
||||||
hash_host = staticmethod(hash_host)
|
hash_host = staticmethod(hash_host)
|
||||||
|
|
|
@ -22,7 +22,7 @@ generator "g" are provided by the server. A bit more work is required on the
|
||||||
client side, and a B{lot} more on the server side.
|
client side, and a B{lot} more on the server side.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from Crypto.Hash import SHA
|
from hashlib import sha1
|
||||||
|
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
from paramiko.common import DEBUG
|
from paramiko.common import DEBUG
|
||||||
|
@ -203,7 +203,7 @@ class KexGex (object):
|
||||||
hm.add_mpint(self.e)
|
hm.add_mpint(self.e)
|
||||||
hm.add_mpint(self.f)
|
hm.add_mpint(self.f)
|
||||||
hm.add_mpint(K)
|
hm.add_mpint(K)
|
||||||
H = SHA.new(hm.asbytes()).digest()
|
H = sha1(hm.asbytes()).digest()
|
||||||
self.transport._set_K_H(K, H)
|
self.transport._set_K_H(K, H)
|
||||||
# sign it
|
# sign it
|
||||||
sig = self.transport.get_server_key().sign_ssh_data(self.transport.rng, H)
|
sig = self.transport.get_server_key().sign_ssh_data(self.transport.rng, H)
|
||||||
|
@ -238,6 +238,6 @@ class KexGex (object):
|
||||||
hm.add_mpint(self.e)
|
hm.add_mpint(self.e)
|
||||||
hm.add_mpint(self.f)
|
hm.add_mpint(self.f)
|
||||||
hm.add_mpint(K)
|
hm.add_mpint(K)
|
||||||
self.transport._set_K_H(K, SHA.new(hm.asbytes()).digest())
|
self.transport._set_K_H(K, sha1(hm.asbytes()).digest())
|
||||||
self.transport._verify_key(host_key, sig)
|
self.transport._verify_key(host_key, sig)
|
||||||
self.transport._activate_outbound()
|
self.transport._activate_outbound()
|
||||||
|
|
|
@ -21,7 +21,7 @@ Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of
|
||||||
1024 bit key halves, using a known "p" prime and "g" generator.
|
1024 bit key halves, using a known "p" prime and "g" generator.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from Crypto.Hash import SHA
|
from hashlib import sha1
|
||||||
|
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
from paramiko.common import max_byte, zero_byte
|
from paramiko.common import max_byte, zero_byte
|
||||||
|
@ -105,7 +105,7 @@ class KexGroup1(object):
|
||||||
hm.add_mpint(self.e)
|
hm.add_mpint(self.e)
|
||||||
hm.add_mpint(self.f)
|
hm.add_mpint(self.f)
|
||||||
hm.add_mpint(K)
|
hm.add_mpint(K)
|
||||||
self.transport._set_K_H(K, SHA.new(hm.asbytes()).digest())
|
self.transport._set_K_H(K, sha1(hm.asbytes()).digest())
|
||||||
self.transport._verify_key(host_key, sig)
|
self.transport._verify_key(host_key, sig)
|
||||||
self.transport._activate_outbound()
|
self.transport._activate_outbound()
|
||||||
|
|
||||||
|
@ -124,7 +124,7 @@ class KexGroup1(object):
|
||||||
hm.add_mpint(self.e)
|
hm.add_mpint(self.e)
|
||||||
hm.add_mpint(self.f)
|
hm.add_mpint(self.f)
|
||||||
hm.add_mpint(K)
|
hm.add_mpint(K)
|
||||||
H = SHA.new(hm.asbytes()).digest()
|
H = sha1(hm.asbytes()).digest()
|
||||||
self.transport._set_K_H(K, H)
|
self.transport._set_K_H(K, H)
|
||||||
# sign it
|
# sign it
|
||||||
sig = self.transport.get_server_key().sign_ssh_data(self.transport.rng, H)
|
sig = self.transport.get_server_key().sign_ssh_data(self.transport.rng, H)
|
||||||
|
|
|
@ -25,6 +25,7 @@ import socket
|
||||||
import struct
|
import struct
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
from hmac import HMAC
|
||||||
|
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
from paramiko.common import linefeed_byte, cr_byte_value, asbytes, MSG_NAMES, \
|
from paramiko.common import linefeed_byte, cr_byte_value, asbytes, MSG_NAMES, \
|
||||||
|
@ -34,12 +35,6 @@ from paramiko.ssh_exception import SSHException, ProxyCommandFailure
|
||||||
from paramiko.message import Message
|
from paramiko.message import Message
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from r_hmac import HMAC
|
|
||||||
except ImportError:
|
|
||||||
from Crypto.Hash.HMAC import HMAC
|
|
||||||
|
|
||||||
|
|
||||||
def compute_hmac(key, message, digest_class):
|
def compute_hmac(key, message, digest_class):
|
||||||
return HMAC(key, message, digest_class).digest()
|
return HMAC(key, message, digest_class).digest()
|
||||||
|
|
||||||
|
@ -359,7 +354,7 @@ class Packetizer (object):
|
||||||
raise SSHException('Mismatched MAC')
|
raise SSHException('Mismatched MAC')
|
||||||
padding = byte_ord(packet[0])
|
padding = byte_ord(packet[0])
|
||||||
payload = packet[1:packet_size - padding]
|
payload = packet[1:packet_size - padding]
|
||||||
|
|
||||||
if self.__dump_packets:
|
if self.__dump_packets:
|
||||||
self._log(DEBUG, 'Got payload (%d bytes, %d padding)' % (packet_size, padding))
|
self._log(DEBUG, 'Got payload (%d bytes, %d padding)' % (packet_size, padding))
|
||||||
|
|
||||||
|
|
|
@ -23,8 +23,8 @@ Common API for all public keys.
|
||||||
import base64
|
import base64
|
||||||
from binascii import hexlify, unhexlify
|
from binascii import hexlify, unhexlify
|
||||||
import os
|
import os
|
||||||
|
from hashlib import md5
|
||||||
|
|
||||||
from Crypto.Hash import MD5
|
|
||||||
from Crypto.Cipher import DES3, AES
|
from Crypto.Cipher import DES3, AES
|
||||||
|
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
|
@ -126,7 +126,7 @@ class PKey (object):
|
||||||
a 16-byte `string <str>` (binary) of the MD5 fingerprint, in SSH
|
a 16-byte `string <str>` (binary) of the MD5 fingerprint, in SSH
|
||||||
format.
|
format.
|
||||||
"""
|
"""
|
||||||
return MD5.new(self.asbytes()).digest()
|
return md5(self.asbytes()).digest()
|
||||||
|
|
||||||
def get_base64(self):
|
def get_base64(self):
|
||||||
"""
|
"""
|
||||||
|
@ -300,7 +300,7 @@ class PKey (object):
|
||||||
keysize = self._CIPHER_TABLE[encryption_type]['keysize']
|
keysize = self._CIPHER_TABLE[encryption_type]['keysize']
|
||||||
mode = self._CIPHER_TABLE[encryption_type]['mode']
|
mode = self._CIPHER_TABLE[encryption_type]['mode']
|
||||||
salt = unhexlify(b(saltstr))
|
salt = unhexlify(b(saltstr))
|
||||||
key = util.generate_key_bytes(MD5, salt, password, keysize)
|
key = util.generate_key_bytes(md5, salt, password, keysize)
|
||||||
return cipher.new(key, mode, salt).decrypt(data)
|
return cipher.new(key, mode, salt).decrypt(data)
|
||||||
|
|
||||||
def _write_private_key_file(self, tag, filename, data, password=None):
|
def _write_private_key_file(self, tag, filename, data, password=None):
|
||||||
|
@ -332,7 +332,7 @@ class PKey (object):
|
||||||
blocksize = self._CIPHER_TABLE[cipher_name]['blocksize']
|
blocksize = self._CIPHER_TABLE[cipher_name]['blocksize']
|
||||||
mode = self._CIPHER_TABLE[cipher_name]['mode']
|
mode = self._CIPHER_TABLE[cipher_name]['mode']
|
||||||
salt = rng.read(16)
|
salt = rng.read(16)
|
||||||
key = util.generate_key_bytes(MD5, salt, password, keysize)
|
key = util.generate_key_bytes(md5, salt, password, keysize)
|
||||||
if len(data) % blocksize != 0:
|
if len(data) % blocksize != 0:
|
||||||
n = blocksize - len(data) % blocksize
|
n = blocksize - len(data) % blocksize
|
||||||
#data += rng.read(n)
|
#data += rng.read(n)
|
||||||
|
|
|
@ -20,31 +20,11 @@
|
||||||
Utility functions for dealing with primes.
|
Utility functions for dealing with primes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from Crypto.Util import number
|
|
||||||
|
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
from paramiko.py3compat import byte_mask, long
|
from paramiko.py3compat import byte_mask, long
|
||||||
from paramiko.ssh_exception import SSHException
|
from paramiko.ssh_exception import SSHException
|
||||||
|
|
||||||
|
|
||||||
def _generate_prime(bits, rng):
|
|
||||||
"""primtive attempt at prime generation"""
|
|
||||||
hbyte_mask = pow(2, bits % 8) - 1
|
|
||||||
while True:
|
|
||||||
# loop catches the case where we increment n into a higher bit-range
|
|
||||||
x = rng.read((bits + 7) // 8)
|
|
||||||
if hbyte_mask > 0:
|
|
||||||
x = byte_mask(x[0], hbyte_mask) + x[1:]
|
|
||||||
n = util.inflate_long(x, 1)
|
|
||||||
n |= 1
|
|
||||||
n |= (1 << (bits - 1))
|
|
||||||
while not number.isPrime(n):
|
|
||||||
n += 2
|
|
||||||
if util.bit_length(n) == bits:
|
|
||||||
break
|
|
||||||
return n
|
|
||||||
|
|
||||||
|
|
||||||
def _roll_random(rng, n):
|
def _roll_random(rng, n):
|
||||||
"""returns a random # from 0 to N-1"""
|
"""returns a random # from 0 to N-1"""
|
||||||
bits = util.bit_length(n - 1)
|
bits = util.bit_length(n - 1)
|
||||||
|
|
|
@ -20,8 +20,9 @@
|
||||||
RSA keys.
|
RSA keys.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from hashlib import sha1
|
||||||
|
|
||||||
from Crypto.PublicKey import RSA
|
from Crypto.PublicKey import RSA
|
||||||
from Crypto.Hash import SHA
|
|
||||||
|
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
from paramiko.common import rng, max_byte, zero_byte, one_byte
|
from paramiko.common import rng, max_byte, zero_byte, one_byte
|
||||||
|
@ -91,7 +92,7 @@ class RSAKey (PKey):
|
||||||
return self.d is not None
|
return self.d is not None
|
||||||
|
|
||||||
def sign_ssh_data(self, rpool, data):
|
def sign_ssh_data(self, rpool, data):
|
||||||
digest = SHA.new(data).digest()
|
digest = sha1(data).digest()
|
||||||
rsa = RSA.construct((long(self.n), long(self.e), long(self.d)))
|
rsa = RSA.construct((long(self.n), long(self.e), long(self.d)))
|
||||||
sig = util.deflate_long(rsa.sign(self._pkcs1imify(digest), bytes())[0], 0)
|
sig = util.deflate_long(rsa.sign(self._pkcs1imify(digest), bytes())[0], 0)
|
||||||
m = Message()
|
m = Message()
|
||||||
|
@ -106,7 +107,7 @@ class RSAKey (PKey):
|
||||||
# verify the signature by SHA'ing the data and encrypting it using the
|
# verify the signature by SHA'ing the data and encrypting it using the
|
||||||
# public key. some wackiness ensues where we "pkcs1imify" the 20-byte
|
# public key. some wackiness ensues where we "pkcs1imify" the 20-byte
|
||||||
# hash into a string as long as the RSA key.
|
# hash into a string as long as the RSA key.
|
||||||
hash_obj = util.inflate_long(self._pkcs1imify(SHA.new(data).digest()), True)
|
hash_obj = util.inflate_long(self._pkcs1imify(sha1(data).digest()), True)
|
||||||
rsa = RSA.construct((long(self.n), long(self.e)))
|
rsa = RSA.construct((long(self.n), long(self.e)))
|
||||||
return rsa.verify(hash_obj, (sig,))
|
return rsa.verify(hash_obj, (sig,))
|
||||||
|
|
||||||
|
|
|
@ -22,9 +22,9 @@ Server-mode SFTP support.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import errno
|
import errno
|
||||||
|
|
||||||
from Crypto.Hash import MD5, SHA
|
|
||||||
import sys
|
import sys
|
||||||
|
from hashlib import md5, sha1
|
||||||
|
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
from paramiko.sftp import BaseSFTP, Message, SFTP_FAILURE, \
|
from paramiko.sftp import BaseSFTP, Message, SFTP_FAILURE, \
|
||||||
SFTP_PERMISSION_DENIED, SFTP_NO_SUCH_FILE
|
SFTP_PERMISSION_DENIED, SFTP_NO_SUCH_FILE
|
||||||
|
@ -45,8 +45,8 @@ from paramiko.sftp import CMD_HANDLE, SFTP_DESC, CMD_STATUS, SFTP_EOF, CMD_NAME,
|
||||||
CMD_READLINK, CMD_SYMLINK, CMD_REALPATH, CMD_EXTENDED, SFTP_OP_UNSUPPORTED
|
CMD_READLINK, CMD_SYMLINK, CMD_REALPATH, CMD_EXTENDED, SFTP_OP_UNSUPPORTED
|
||||||
|
|
||||||
_hash_class = {
|
_hash_class = {
|
||||||
'sha1': SHA,
|
'sha1': sha1,
|
||||||
'md5': MD5,
|
'md5': md5,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -82,14 +82,14 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
||||||
self.file_table = {}
|
self.file_table = {}
|
||||||
self.folder_table = {}
|
self.folder_table = {}
|
||||||
self.server = sftp_si(server, *largs, **kwargs)
|
self.server = sftp_si(server, *largs, **kwargs)
|
||||||
|
|
||||||
def _log(self, level, msg):
|
def _log(self, level, msg):
|
||||||
if issubclass(type(msg), list):
|
if issubclass(type(msg), list):
|
||||||
for m in msg:
|
for m in msg:
|
||||||
super(SFTPServer, self)._log(level, "[chan " + self.sock.get_name() + "] " + m)
|
super(SFTPServer, self)._log(level, "[chan " + self.sock.get_name() + "] " + m)
|
||||||
else:
|
else:
|
||||||
super(SFTPServer, self)._log(level, "[chan " + self.sock.get_name() + "] " + msg)
|
super(SFTPServer, self)._log(level, "[chan " + self.sock.get_name() + "] " + msg)
|
||||||
|
|
||||||
def start_subsystem(self, name, transport, channel):
|
def start_subsystem(self, name, transport, channel):
|
||||||
self.sock = channel
|
self.sock = channel
|
||||||
self._log(DEBUG, 'Started sftp server on channel %s' % repr(channel))
|
self._log(DEBUG, 'Started sftp server on channel %s' % repr(channel))
|
||||||
|
@ -157,7 +157,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
||||||
|
|
||||||
This is meant to be a handy helper function for translating SFTP file
|
This is meant to be a handy helper function for translating SFTP file
|
||||||
requests into local file operations.
|
requests into local file operations.
|
||||||
|
|
||||||
:param str filename:
|
:param str filename:
|
||||||
name of the file to alter (should usually be an absolute path).
|
name of the file to alter (should usually be an absolute path).
|
||||||
:param .SFTPAttributes attr: attributes to change.
|
:param .SFTPAttributes attr: attributes to change.
|
||||||
|
@ -281,7 +281,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
||||||
# don't try to read more than about 64KB at a time
|
# don't try to read more than about 64KB at a time
|
||||||
chunklen = min(blocklen, 65536)
|
chunklen = min(blocklen, 65536)
|
||||||
count = 0
|
count = 0
|
||||||
hash_obj = alg.new()
|
hash_obj = alg()
|
||||||
while count < blocklen:
|
while count < blocklen:
|
||||||
data = f.read(offset, chunklen)
|
data = f.read(offset, chunklen)
|
||||||
if not isinstance(data, bytes_types):
|
if not isinstance(data, bytes_types):
|
||||||
|
@ -298,7 +298,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
||||||
msg.add_string(algname)
|
msg.add_string(algname)
|
||||||
msg.add_bytes(sum_out)
|
msg.add_bytes(sum_out)
|
||||||
self._send_packet(CMD_EXTENDED_REPLY, msg)
|
self._send_packet(CMD_EXTENDED_REPLY, msg)
|
||||||
|
|
||||||
def _convert_pflags(self, pflags):
|
def _convert_pflags(self, pflags):
|
||||||
"""convert SFTP-style open() flags to Python's os.open() flags"""
|
"""convert SFTP-style open() flags to Python's os.open() flags"""
|
||||||
if (pflags & SFTP_FLAG_READ) and (pflags & SFTP_FLAG_WRITE):
|
if (pflags & SFTP_FLAG_READ) and (pflags & SFTP_FLAG_WRITE):
|
||||||
|
|
|
@ -25,6 +25,7 @@ import sys
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import weakref
|
import weakref
|
||||||
|
from hashlib import md5, sha1
|
||||||
|
|
||||||
import paramiko
|
import paramiko
|
||||||
from paramiko import util
|
from paramiko import util
|
||||||
|
@ -59,7 +60,6 @@ from paramiko.util import retry_on_signal
|
||||||
|
|
||||||
from Crypto import Random
|
from Crypto import Random
|
||||||
from Crypto.Cipher import Blowfish, AES, DES3, ARC4
|
from Crypto.Cipher import Blowfish, AES, DES3, ARC4
|
||||||
from Crypto.Hash import SHA, MD5
|
|
||||||
try:
|
try:
|
||||||
from Crypto.Util import Counter
|
from Crypto.Util import Counter
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -107,10 +107,10 @@ class Transport (threading.Thread):
|
||||||
}
|
}
|
||||||
|
|
||||||
_mac_info = {
|
_mac_info = {
|
||||||
'hmac-sha1': {'class': SHA, 'size': 20},
|
'hmac-sha1': {'class': sha1, 'size': 20},
|
||||||
'hmac-sha1-96': {'class': SHA, 'size': 12},
|
'hmac-sha1-96': {'class': sha1, 'size': 12},
|
||||||
'hmac-md5': {'class': MD5, 'size': 16},
|
'hmac-md5': {'class': md5, 'size': 16},
|
||||||
'hmac-md5-96': {'class': MD5, 'size': 12},
|
'hmac-md5-96': {'class': md5, 'size': 12},
|
||||||
}
|
}
|
||||||
|
|
||||||
_key_info = {
|
_key_info = {
|
||||||
|
@ -1338,13 +1338,13 @@ class Transport (threading.Thread):
|
||||||
m.add_bytes(self.H)
|
m.add_bytes(self.H)
|
||||||
m.add_byte(b(id))
|
m.add_byte(b(id))
|
||||||
m.add_bytes(self.session_id)
|
m.add_bytes(self.session_id)
|
||||||
out = sofar = SHA.new(m.asbytes()).digest()
|
out = sofar = sha1(m.asbytes()).digest()
|
||||||
while len(out) < nbytes:
|
while len(out) < nbytes:
|
||||||
m = Message()
|
m = Message()
|
||||||
m.add_mpint(self.K)
|
m.add_mpint(self.K)
|
||||||
m.add_bytes(self.H)
|
m.add_bytes(self.H)
|
||||||
m.add_bytes(sofar)
|
m.add_bytes(sofar)
|
||||||
digest = SHA.new(m.asbytes()).digest()
|
digest = sha1(m.asbytes()).digest()
|
||||||
out += digest
|
out += digest
|
||||||
sofar += digest
|
sofar += digest
|
||||||
return out[:nbytes]
|
return out[:nbytes]
|
||||||
|
@ -1719,9 +1719,9 @@ class Transport (threading.Thread):
|
||||||
# initial mac keys are done in the hash's natural size (not the potentially truncated
|
# initial mac keys are done in the hash's natural size (not the potentially truncated
|
||||||
# transmission size)
|
# transmission size)
|
||||||
if self.server_mode:
|
if self.server_mode:
|
||||||
mac_key = self._compute_key('E', mac_engine.digest_size)
|
mac_key = self._compute_key('E', mac_engine().digest_size)
|
||||||
else:
|
else:
|
||||||
mac_key = self._compute_key('F', mac_engine.digest_size)
|
mac_key = self._compute_key('F', mac_engine().digest_size)
|
||||||
self.packetizer.set_inbound_cipher(engine, block_size, mac_engine, mac_size, mac_key)
|
self.packetizer.set_inbound_cipher(engine, block_size, mac_engine, mac_size, mac_key)
|
||||||
compress_in = self._compression_info[self.remote_compression][1]
|
compress_in = self._compression_info[self.remote_compression][1]
|
||||||
if (compress_in is not None) and ((self.remote_compression != 'zlib@openssh.com') or self.authenticated):
|
if (compress_in is not None) and ((self.remote_compression != 'zlib@openssh.com') or self.authenticated):
|
||||||
|
@ -1746,9 +1746,9 @@ class Transport (threading.Thread):
|
||||||
# initial mac keys are done in the hash's natural size (not the potentially truncated
|
# initial mac keys are done in the hash's natural size (not the potentially truncated
|
||||||
# transmission size)
|
# transmission size)
|
||||||
if self.server_mode:
|
if self.server_mode:
|
||||||
mac_key = self._compute_key('F', mac_engine.digest_size)
|
mac_key = self._compute_key('F', mac_engine().digest_size)
|
||||||
else:
|
else:
|
||||||
mac_key = self._compute_key('E', mac_engine.digest_size)
|
mac_key = self._compute_key('E', mac_engine().digest_size)
|
||||||
sdctr = self.local_cipher.endswith('-ctr')
|
sdctr = self.local_cipher.endswith('-ctr')
|
||||||
self.packetizer.set_outbound_cipher(engine, block_size, mac_engine, mac_size, mac_key, sdctr)
|
self.packetizer.set_outbound_cipher(engine, block_size, mac_engine, mac_size, mac_key, sdctr)
|
||||||
compress_out = self._compression_info[self.local_compression][0]
|
compress_out = self._compression_info[self.local_compression][0]
|
||||||
|
|
|
@ -143,15 +143,14 @@ def tb_strings():
|
||||||
return ''.join(traceback.format_exception(*sys.exc_info())).split('\n')
|
return ''.join(traceback.format_exception(*sys.exc_info())).split('\n')
|
||||||
|
|
||||||
|
|
||||||
def generate_key_bytes(hashclass, salt, key, nbytes):
|
def generate_key_bytes(hash_alg, salt, key, nbytes):
|
||||||
"""
|
"""
|
||||||
Given a password, passphrase, or other human-source key, scramble it
|
Given a password, passphrase, or other human-source key, scramble it
|
||||||
through a secure hash into some keyworthy bytes. This specific algorithm
|
through a secure hash into some keyworthy bytes. This specific algorithm
|
||||||
is used for encrypting/decrypting private key files.
|
is used for encrypting/decrypting private key files.
|
||||||
|
|
||||||
:param class hashclass:
|
:param function hash_alg: A function which creates a new hash object, such
|
||||||
class from `Crypto.Hash` that can be used as a secure hashing function
|
as ``hashlib.sha256``.
|
||||||
(like ``MD5`` or ``SHA``).
|
|
||||||
:param salt: data to salt the hash with.
|
:param salt: data to salt the hash with.
|
||||||
:type salt: byte string
|
:type salt: byte string
|
||||||
:param str key: human-entered password or passphrase.
|
:param str key: human-entered password or passphrase.
|
||||||
|
@ -163,7 +162,7 @@ def generate_key_bytes(hashclass, salt, key, nbytes):
|
||||||
if len(salt) > 8:
|
if len(salt) > 8:
|
||||||
salt = salt[:8]
|
salt = salt[:8]
|
||||||
while nbytes > 0:
|
while nbytes > 0:
|
||||||
hash_obj = hashclass.new()
|
hash_obj = hash_alg()
|
||||||
if len(digest) > 0:
|
if len(digest) > 0:
|
||||||
hash_obj.update(digest)
|
hash_obj.update(digest)
|
||||||
hash_obj.update(b(key))
|
hash_obj.update(b(key))
|
||||||
|
|
7
setup.py
7
setup.py
|
@ -40,9 +40,10 @@ import sys
|
||||||
try:
|
try:
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
kw = {
|
kw = {
|
||||||
'install_requires': ['pycrypto >= 2.1, != 2.4',
|
'install_requires': [
|
||||||
'ecdsa',
|
'pycrypto >= 2.1, != 2.4',
|
||||||
],
|
'ecdsa',
|
||||||
|
],
|
||||||
}
|
}
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from distutils.core import setup
|
from distutils.core import setup
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 6.3 KiB |
|
@ -5,16 +5,11 @@ sys.path.append(os.path.abspath('../..'))
|
||||||
from shared_conf import *
|
from shared_conf import *
|
||||||
|
|
||||||
# Enable autodoc, intersphinx
|
# Enable autodoc, intersphinx
|
||||||
extensions.extend(['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'])
|
extensions.extend(['sphinx.ext.autodoc'])
|
||||||
|
|
||||||
# Autodoc settings
|
# Autodoc settings
|
||||||
autodoc_default_flags = ['members', 'special-members']
|
autodoc_default_flags = ['members', 'special-members']
|
||||||
|
|
||||||
# Intersphinx connection to stdlib
|
|
||||||
intersphinx_mapping = {
|
|
||||||
'python': ('http://docs.python.org/2.6', None),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Sister-site links to WWW
|
# Sister-site links to WWW
|
||||||
html_theme_options['extra_nav_links'] = {
|
html_theme_options['extra_nav_links'] = {
|
||||||
"Main website": 'http://www.paramiko.org',
|
"Main website": 'http://www.paramiko.org',
|
||||||
|
|
|
@ -1,15 +1,12 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import alabaster
|
import alabaster
|
||||||
|
|
||||||
|
|
||||||
# Alabaster theme + mini-extension
|
# Alabaster theme + mini-extension
|
||||||
html_theme_path = [alabaster.get_path()]
|
html_theme_path = [alabaster.get_path()]
|
||||||
extensions = ['alabaster']
|
extensions = ['alabaster', 'sphinx.ext.intersphinx']
|
||||||
# Paths relative to invoking conf.py - not this shared file
|
# Paths relative to invoking conf.py - not this shared file
|
||||||
html_static_path = ['../_shared_static']
|
|
||||||
html_theme = 'alabaster'
|
html_theme = 'alabaster'
|
||||||
html_theme_options = {
|
html_theme_options = {
|
||||||
'description': "A Python implementation of SSHv2.",
|
'description': "A Python implementation of SSHv2.",
|
||||||
|
@ -17,9 +14,6 @@ html_theme_options = {
|
||||||
'github_repo': 'paramiko',
|
'github_repo': 'paramiko',
|
||||||
'gittip_user': 'bitprophet',
|
'gittip_user': 'bitprophet',
|
||||||
'analytics_id': 'UA-18486793-2',
|
'analytics_id': 'UA-18486793-2',
|
||||||
|
|
||||||
'link': '#3782BE',
|
|
||||||
'link_hover': '#3782BE',
|
|
||||||
}
|
}
|
||||||
html_sidebars = {
|
html_sidebars = {
|
||||||
'**': [
|
'**': [
|
||||||
|
@ -30,6 +24,11 @@ html_sidebars = {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Everything intersphinx's to Python
|
||||||
|
intersphinx_mapping = {
|
||||||
|
'python': ('http://docs.python.org/2.6', None),
|
||||||
|
}
|
||||||
|
|
||||||
# Regular settings
|
# Regular settings
|
||||||
project = 'Paramiko'
|
project = 'Paramiko'
|
||||||
year = datetime.now().year
|
year = datetime.now().year
|
||||||
|
|
|
@ -1,140 +0,0 @@
|
||||||
from collections import namedtuple
|
|
||||||
from datetime import datetime
|
|
||||||
import time
|
|
||||||
import email.utils
|
|
||||||
|
|
||||||
from sphinx.util.compat import Directive
|
|
||||||
from docutils import nodes
|
|
||||||
|
|
||||||
|
|
||||||
class BlogDateDirective(Directive):
|
|
||||||
"""
|
|
||||||
Used to parse/attach date info to blog post documents.
|
|
||||||
|
|
||||||
No nodes generated, since none are needed.
|
|
||||||
"""
|
|
||||||
has_content = True
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
# Tag parent document with parsed date value.
|
|
||||||
self.state.document.blog_date = datetime.strptime(
|
|
||||||
self.content[0], "%Y-%m-%d"
|
|
||||||
)
|
|
||||||
# Don't actually insert any nodes, we're already done.
|
|
||||||
return []
|
|
||||||
|
|
||||||
class blog_post_list(nodes.General, nodes.Element):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class BlogPostListDirective(Directive):
|
|
||||||
"""
|
|
||||||
Simply spits out a 'blog_post_list' temporary node for replacement.
|
|
||||||
|
|
||||||
Gets replaced at doctree-resolved time - only then will all blog post
|
|
||||||
documents be written out (& their date directives executed).
|
|
||||||
"""
|
|
||||||
def run(self):
|
|
||||||
return [blog_post_list('')]
|
|
||||||
|
|
||||||
|
|
||||||
Post = namedtuple('Post', 'name doc title date opener')
|
|
||||||
|
|
||||||
def get_posts(app):
|
|
||||||
# Obtain blog posts
|
|
||||||
post_names = filter(lambda x: x.startswith('blog/'), app.env.found_docs)
|
|
||||||
posts = map(lambda x: (x, app.env.get_doctree(x)), post_names)
|
|
||||||
# Obtain common data used for list page & RSS
|
|
||||||
data = []
|
|
||||||
for post, doc in sorted(posts, key=lambda x: x[1].blog_date, reverse=True):
|
|
||||||
# Welp. No "nice" way to get post title. Thanks Sphinx.
|
|
||||||
title = doc[0][0][0]
|
|
||||||
# Date. This may or may not end up reflecting the required
|
|
||||||
# *input* format, but doing it here gives us flexibility.
|
|
||||||
date = doc.blog_date
|
|
||||||
# 1st paragraph as opener. TODO: allow a role or something marking
|
|
||||||
# where to actually pull from?
|
|
||||||
opener = doc.traverse(nodes.paragraph)[0]
|
|
||||||
data.append(Post(post, doc, title, date, opener))
|
|
||||||
return data
|
|
||||||
|
|
||||||
def replace_blog_post_lists(app, doctree, fromdocname):
|
|
||||||
"""
|
|
||||||
Replace blog_post_list nodes with ordered list-o-links to posts.
|
|
||||||
"""
|
|
||||||
# Obtain blog posts
|
|
||||||
post_names = filter(lambda x: x.startswith('blog/'), app.env.found_docs)
|
|
||||||
posts = map(lambda x: (x, app.env.get_doctree(x)), post_names)
|
|
||||||
# Build "list" of links/etc
|
|
||||||
post_links = []
|
|
||||||
for post, doc, title, date, opener in get_posts(app):
|
|
||||||
# Link itself
|
|
||||||
uri = app.builder.get_relative_uri(fromdocname, post)
|
|
||||||
link = nodes.reference('', '', refdocname=post, refuri=uri)
|
|
||||||
# Title, bolded. TODO: use 'topic' or something maybe?
|
|
||||||
link.append(nodes.strong('', title))
|
|
||||||
date = date.strftime("%Y-%m-%d")
|
|
||||||
# Meh @ not having great docutils nodes which map to this.
|
|
||||||
html = '<div class="timestamp"><span>%s</span></div>' % date
|
|
||||||
timestamp = nodes.raw(text=html, format='html')
|
|
||||||
# NOTE: may group these within another element later if styling
|
|
||||||
# necessitates it
|
|
||||||
group = [timestamp, nodes.paragraph('', '', link), opener]
|
|
||||||
post_links.extend(group)
|
|
||||||
|
|
||||||
# Replace temp node(s) w/ expanded list-o-links
|
|
||||||
for node in doctree.traverse(blog_post_list):
|
|
||||||
node.replace_self(post_links)
|
|
||||||
|
|
||||||
def rss_timestamp(timestamp):
|
|
||||||
# Use horribly inappropriate module for its magical daylight-savings-aware
|
|
||||||
# timezone madness. Props to Tinkerer for the idea.
|
|
||||||
return email.utils.formatdate(
|
|
||||||
time.mktime(timestamp.timetuple()),
|
|
||||||
localtime=True
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_rss(app):
|
|
||||||
# Meh at having to run this subroutine like 3x per build. Not worth trying
|
|
||||||
# to be clever for now tho.
|
|
||||||
posts_ = get_posts(app)
|
|
||||||
# LOL URLs
|
|
||||||
root = app.config.rss_link
|
|
||||||
if not root.endswith('/'):
|
|
||||||
root += '/'
|
|
||||||
# Oh boy
|
|
||||||
posts = [
|
|
||||||
(
|
|
||||||
root + app.builder.get_target_uri(x.name),
|
|
||||||
x.title,
|
|
||||||
str(x.opener[0]), # Grab inner text element from paragraph
|
|
||||||
rss_timestamp(x.date),
|
|
||||||
)
|
|
||||||
for x in posts_
|
|
||||||
]
|
|
||||||
location = 'blog/rss.xml'
|
|
||||||
context = {
|
|
||||||
'title': app.config.project,
|
|
||||||
'link': root,
|
|
||||||
'atom': root + location,
|
|
||||||
'description': app.config.rss_description,
|
|
||||||
# 'posts' is sorted by date already
|
|
||||||
'date': rss_timestamp(posts_[0].date),
|
|
||||||
'posts': posts,
|
|
||||||
}
|
|
||||||
yield (location, context, 'rss.xml')
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# Link in RSS feed back to main website, e.g. 'http://paramiko.org'
|
|
||||||
app.add_config_value('rss_link', None, '')
|
|
||||||
# Ditto for RSS description field
|
|
||||||
app.add_config_value('rss_description', None, '')
|
|
||||||
# Interprets date metadata in blog post documents
|
|
||||||
app.add_directive('date', BlogDateDirective)
|
|
||||||
# Inserts blog post list node (in e.g. a listing page) for replacement
|
|
||||||
# below
|
|
||||||
app.add_node(blog_post_list)
|
|
||||||
app.add_directive('blog-posts', BlogPostListDirective)
|
|
||||||
# Performs abovementioned replacement
|
|
||||||
app.connect('doctree-resolved', replace_blog_post_lists)
|
|
||||||
# Generates RSS page from whole cloth at page generation step
|
|
||||||
app.connect('html-collect-pages', generate_rss)
|
|
|
@ -1,16 +0,0 @@
|
||||||
====
|
|
||||||
Blog
|
|
||||||
====
|
|
||||||
|
|
||||||
.. blog-posts directive gets replaced with an ordered list of blog posts.
|
|
||||||
|
|
||||||
.. blog-posts::
|
|
||||||
|
|
||||||
|
|
||||||
.. The following toctree ensures blog posts get processed.
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:hidden:
|
|
||||||
:glob:
|
|
||||||
|
|
||||||
blog/*
|
|
|
@ -1,7 +0,0 @@
|
||||||
===========
|
|
||||||
First post!
|
|
||||||
===========
|
|
||||||
|
|
||||||
A blog post.
|
|
||||||
|
|
||||||
.. date:: 2013-12-04
|
|
|
@ -1,7 +0,0 @@
|
||||||
===========
|
|
||||||
Another one
|
|
||||||
===========
|
|
||||||
|
|
||||||
.. date:: 2013-12-05
|
|
||||||
|
|
||||||
Indeed!
|
|
|
@ -2,6 +2,8 @@
|
||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
* :support:`295` Swap out a bunch of PyCrypto hash functions with use of
|
||||||
|
`hashlib`. Thanks to Alex Gaynor.
|
||||||
* :support:`290` (also :issue:`292`) Add support for building universal
|
* :support:`290` (also :issue:`292`) Add support for building universal
|
||||||
(Python 2+3 compatible) wheel files during the release process. Courtesy of
|
(Python 2+3 compatible) wheel files during the release process. Courtesy of
|
||||||
Alex Gaynor.
|
Alex Gaynor.
|
||||||
|
|
|
@ -6,15 +6,10 @@ from os.path import abspath, join, dirname
|
||||||
sys.path.append(abspath(join(dirname(__file__), '..')))
|
sys.path.append(abspath(join(dirname(__file__), '..')))
|
||||||
from shared_conf import *
|
from shared_conf import *
|
||||||
|
|
||||||
# Local blog extension
|
|
||||||
sys.path.append(abspath('.'))
|
|
||||||
extensions.append('blog')
|
|
||||||
rss_link = 'http://paramiko.org'
|
|
||||||
rss_description = 'Paramiko project news'
|
|
||||||
|
|
||||||
# Releases changelog extension
|
# Releases changelog extension
|
||||||
extensions.append('releases')
|
extensions.append('releases')
|
||||||
releases_release_uri = "https://github.com/paramiko/paramiko/tree/%s"
|
# Paramiko 1.x tags start with 'v'. Meh.
|
||||||
|
releases_release_uri = "https://github.com/paramiko/paramiko/tree/v%s"
|
||||||
releases_issue_uri = "https://github.com/paramiko/paramiko/issues/%s"
|
releases_issue_uri = "https://github.com/paramiko/paramiko/issues/%s"
|
||||||
|
|
||||||
# Intersphinx for referencing API/usage docs
|
# Intersphinx for referencing API/usage docs
|
||||||
|
@ -25,9 +20,7 @@ target = join(dirname(__file__), '..', 'docs', '_build')
|
||||||
if os.environ.get('READTHEDOCS') == 'True':
|
if os.environ.get('READTHEDOCS') == 'True':
|
||||||
# TODO: switch to docs.paramiko.org post go-live of sphinx API docs
|
# TODO: switch to docs.paramiko.org post go-live of sphinx API docs
|
||||||
target = 'http://docs.paramiko.org/en/latest/'
|
target = 'http://docs.paramiko.org/en/latest/'
|
||||||
intersphinx_mapping = {
|
intersphinx_mapping['docs'] = (target, None)
|
||||||
'docs': (target, None),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Sister-site links to API docs
|
# Sister-site links to API docs
|
||||||
html_theme_options['extra_nav_links'] = {
|
html_theme_options['extra_nav_links'] = {
|
||||||
|
|
|
@ -11,20 +11,17 @@ contribution guidelines, development roadmap, news/blog, and so forth. Detailed
|
||||||
usage and API documentation can be found at our code documentation site,
|
usage and API documentation can be found at our code documentation site,
|
||||||
`docs.paramiko.org <http://docs.paramiko.org>`_.
|
`docs.paramiko.org <http://docs.paramiko.org>`_.
|
||||||
|
|
||||||
|
Please see the sidebar to the left to begin.
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
:hidden:
|
||||||
|
|
||||||
changelog
|
changelog
|
||||||
FAQs <faq>
|
FAQs <faq>
|
||||||
installing
|
installing
|
||||||
contributing
|
contributing
|
||||||
contact
|
contact
|
||||||
|
|
||||||
.. Hide blog in hidden toctree for now (to avoid warnings.)
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:hidden:
|
|
||||||
|
|
||||||
blog
|
|
||||||
|
|
||||||
|
|
||||||
.. rubric:: Footnotes
|
.. rubric:: Footnotes
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,12 @@ Some unit tests for the ssh2 protocol in Transport.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
from hashlib import sha1
|
||||||
|
|
||||||
from tests.loop import LoopSocket
|
from tests.loop import LoopSocket
|
||||||
|
|
||||||
from Crypto.Cipher import AES
|
from Crypto.Cipher import AES
|
||||||
from Crypto.Hash import SHA
|
|
||||||
from paramiko import Message, Packetizer, util
|
from paramiko import Message, Packetizer, util
|
||||||
from paramiko.common import byte_chr, zero_byte
|
from paramiko.common import byte_chr, zero_byte
|
||||||
|
|
||||||
|
@ -41,7 +44,7 @@ class PacketizerTest (unittest.TestCase):
|
||||||
p.set_log(util.get_logger('paramiko.transport'))
|
p.set_log(util.get_logger('paramiko.transport'))
|
||||||
p.set_hexdump(True)
|
p.set_hexdump(True)
|
||||||
cipher = AES.new(zero_byte * 16, AES.MODE_CBC, x55 * 16)
|
cipher = AES.new(zero_byte * 16, AES.MODE_CBC, x55 * 16)
|
||||||
p.set_outbound_cipher(cipher, 16, SHA, 12, x1f * 20)
|
p.set_outbound_cipher(cipher, 16, sha1, 12, x1f * 20)
|
||||||
|
|
||||||
# message has to be at least 16 bytes long, so we'll have at least one
|
# message has to be at least 16 bytes long, so we'll have at least one
|
||||||
# block of data encrypted that contains zero random padding bytes
|
# block of data encrypted that contains zero random padding bytes
|
||||||
|
@ -64,7 +67,7 @@ class PacketizerTest (unittest.TestCase):
|
||||||
p.set_log(util.get_logger('paramiko.transport'))
|
p.set_log(util.get_logger('paramiko.transport'))
|
||||||
p.set_hexdump(True)
|
p.set_hexdump(True)
|
||||||
cipher = AES.new(zero_byte * 16, AES.MODE_CBC, x55 * 16)
|
cipher = AES.new(zero_byte * 16, AES.MODE_CBC, x55 * 16)
|
||||||
p.set_inbound_cipher(cipher, 16, SHA, 12, x1f * 20)
|
p.set_inbound_cipher(cipher, 16, sha1, 12, x1f * 20)
|
||||||
wsock.send(b'\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0\x90\xd2\x16\x56\x0d\x71\x73\x61\x38\x7c\x4c\x3d\xfb\x97\x7d\xe2\x6e\x03\xb1\xa0\xc2\x1c\xd6\x41\x41\x4c\xb4\x59')
|
wsock.send(b'\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0\x90\xd2\x16\x56\x0d\x71\x73\x61\x38\x7c\x4c\x3d\xfb\x97\x7d\xe2\x6e\x03\xb1\xa0\xc2\x1c\xd6\x41\x41\x4c\xb4\x59')
|
||||||
cmd, m = p.read_message()
|
cmd, m = p.read_message()
|
||||||
self.assertEqual(100, cmd)
|
self.assertEqual(100, cmd)
|
||||||
|
|
|
@ -20,11 +20,14 @@
|
||||||
Some unit tests for public/private key objects.
|
Some unit tests for public/private key objects.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from binascii import hexlify
|
|
||||||
import unittest
|
import unittest
|
||||||
|
from binascii import hexlify
|
||||||
|
from hashlib import md5
|
||||||
|
|
||||||
from paramiko import RSAKey, DSSKey, ECDSAKey, Message, util
|
from paramiko import RSAKey, DSSKey, ECDSAKey, Message, util
|
||||||
from paramiko.py3compat import StringIO, byte_chr, b, bytes
|
from paramiko.py3compat import StringIO, byte_chr, b, bytes
|
||||||
from paramiko.common import rng
|
from paramiko.common import rng
|
||||||
|
|
||||||
from tests.util import test_path
|
from tests.util import test_path
|
||||||
|
|
||||||
# from openssh's ssh-keygen
|
# from openssh's ssh-keygen
|
||||||
|
@ -90,8 +93,7 @@ class KeyTest (unittest.TestCase):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test_1_generate_key_bytes(self):
|
def test_1_generate_key_bytes(self):
|
||||||
from Crypto.Hash import MD5
|
key = util.generate_key_bytes(md5, x1234, 'happy birthday', 30)
|
||||||
key = util.generate_key_bytes(MD5, x1234, 'happy birthday', 30)
|
|
||||||
exp = b'\x61\xE1\xF2\x72\xF4\xC1\xC4\x56\x15\x86\xBD\x32\x24\x98\xC0\xE9\x24\x67\x27\x80\xF4\x7B\xB3\x7D\xDA\x7D\x54\x01\x9E\x64'
|
exp = b'\x61\xE1\xF2\x72\xF4\xC1\xC4\x56\x15\x86\xBD\x32\x24\x98\xC0\xE9\x24\x67\x27\x80\xF4\x7B\xB3\x7D\xDA\x7D\x54\x01\x9E\x64'
|
||||||
self.assertEqual(exp, key)
|
self.assertEqual(exp, key)
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,8 @@ Some unit tests for utility functions.
|
||||||
from binascii import hexlify
|
from binascii import hexlify
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
from Crypto.Hash import SHA
|
from hashlib import sha1
|
||||||
|
|
||||||
import paramiko.util
|
import paramiko.util
|
||||||
from paramiko.util import lookup_ssh_host_config as host_config
|
from paramiko.util import lookup_ssh_host_config as host_config
|
||||||
from paramiko.py3compat import StringIO, byte_ord
|
from paramiko.py3compat import StringIO, byte_ord
|
||||||
|
@ -136,7 +137,7 @@ class UtilTest(ParamikoTest):
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_4_generate_key_bytes(self):
|
def test_4_generate_key_bytes(self):
|
||||||
x = paramiko.util.generate_key_bytes(SHA, b'ABCDEFGH', 'This is my secret passphrase.', 64)
|
x = paramiko.util.generate_key_bytes(sha1, b'ABCDEFGH', 'This is my secret passphrase.', 64)
|
||||||
hex = ''.join(['%02x' % byte_ord(c) for c in x])
|
hex = ''.join(['%02x' % byte_ord(c) for c in x])
|
||||||
self.assertEqual(hex, '9110e2f6793b69363e58173e9436b13a5a4b339005741d5c680e505f57d871347b4239f14fb5c46e857d5e100424873ba849ac699cea98d729e57b3e84378e8b')
|
self.assertEqual(hex, '9110e2f6793b69363e58173e9436b13a5a4b339005741d5c680e505f57d871347b4239f14fb5c46e857d5e100424873ba849ac699cea98d729e57b3e84378e8b')
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue