Merge branch 'master' into use-urandom
Conflicts: paramiko/dsskey.py paramiko/ecdsakey.py paramiko/hostkeys.py paramiko/kex_gex.py paramiko/kex_group1.py paramiko/pkey.py paramiko/primes.py paramiko/rsakey.py tests/test_pkey.py
This commit is contained in:
commit
191fd465f1
|
@ -5,5 +5,5 @@ tox>=1.4,<1.5
|
|||
invoke>=0.7.0
|
||||
invocations>=0.5.0
|
||||
sphinx>=1.1.3
|
||||
alabaster>=0.3.1
|
||||
alabaster>=0.4.0
|
||||
releases>=0.5.2
|
||||
|
|
|
@ -21,9 +21,9 @@ DSS keys.
|
|||
"""
|
||||
|
||||
import os
|
||||
from hashlib import sha1
|
||||
|
||||
from Crypto.PublicKey import DSA
|
||||
from Crypto.Hash import SHA
|
||||
|
||||
from paramiko import util
|
||||
from paramiko.common import zero_byte
|
||||
|
@ -98,7 +98,7 @@ class DSSKey (PKey):
|
|||
return self.x is not None
|
||||
|
||||
def sign_ssh_data(self, data):
|
||||
digest = SHA.new(data).digest()
|
||||
digest = sha1(data).digest()
|
||||
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q), long(self.x)))
|
||||
# generate a suitable k
|
||||
qsize = len(util.deflate_long(self.q, 0))
|
||||
|
@ -132,7 +132,7 @@ class DSSKey (PKey):
|
|||
# pull out (r, s) which are NOT encoded as mpints
|
||||
sigR = util.inflate_long(sig[:20], 1)
|
||||
sigS = util.inflate_long(sig[20:], 1)
|
||||
sigM = util.inflate_long(SHA.new(data).digest(), 1)
|
||||
sigM = util.inflate_long(sha1(data).digest(), 1)
|
||||
|
||||
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q)))
|
||||
return dss.verify(sigM, (sigR, sigS))
|
||||
|
|
|
@ -21,13 +21,11 @@ L{ECDSAKey}
|
|||
"""
|
||||
|
||||
import binascii
|
||||
import os
|
||||
from hashlib import sha256
|
||||
|
||||
from ecdsa import SigningKey, VerifyingKey, der, curves
|
||||
from ecdsa.test_pyecdsa import ECDSA
|
||||
|
||||
from Crypto.Hash import SHA256
|
||||
|
||||
from paramiko.common import four_byte, one_byte
|
||||
from paramiko.message import Message
|
||||
from paramiko.pkey import PKey
|
||||
|
@ -101,7 +99,7 @@ class ECDSAKey (PKey):
|
|||
return self.signing_key is not None
|
||||
|
||||
def sign_ssh_data(self, data):
|
||||
digest = SHA256.new(data).digest()
|
||||
digest = sha256(data).digest()
|
||||
sig = self.signing_key.sign_digest(digest, sigencode=self._sigencode)
|
||||
m = Message()
|
||||
m.add_string('ecdsa-sha2-nistp256')
|
||||
|
@ -115,7 +113,7 @@ class ECDSAKey (PKey):
|
|||
|
||||
# verify the signature by SHA'ing the data and encrypting it
|
||||
# using the public key.
|
||||
hash_obj = SHA256.new(data).digest()
|
||||
hash_obj = sha256(data).digest()
|
||||
return self.verifying_key.verify_digest(sig, hash_obj,
|
||||
sigdecode=self._sigdecode)
|
||||
|
||||
|
|
|
@ -20,7 +20,8 @@
|
|||
import binascii
|
||||
import os
|
||||
|
||||
from Crypto.Hash import SHA, HMAC
|
||||
from hashlib import sha1
|
||||
from hmac import HMAC
|
||||
|
||||
from paramiko.py3compat import b, u, encodebytes, decodebytes
|
||||
|
||||
|
@ -264,13 +265,13 @@ class HostKeys (MutableMapping):
|
|||
:return: the hashed hostname as a `str`
|
||||
"""
|
||||
if salt is None:
|
||||
salt = os.urandom(SHA.digest_size)
|
||||
salt = os.urandom(sha1().digest_size)
|
||||
else:
|
||||
if salt.startswith('|1|'):
|
||||
salt = salt.split('|')[2]
|
||||
salt = decodebytes(b(salt))
|
||||
assert len(salt) == SHA.digest_size
|
||||
hmac = HMAC.HMAC(salt, b(hostname), SHA).digest()
|
||||
assert len(salt) == sha1().digest_size
|
||||
hmac = HMAC(salt, b(hostname), sha1).digest()
|
||||
hostkey = '|1|%s|%s' % (u(encodebytes(salt)), u(encodebytes(hmac)))
|
||||
return hostkey.replace('\n', '')
|
||||
hash_host = staticmethod(hash_host)
|
||||
|
|
|
@ -23,8 +23,7 @@ client side, and a B{lot} more on the server side.
|
|||
"""
|
||||
|
||||
import os
|
||||
|
||||
from Crypto.Hash import SHA
|
||||
from hashlib import sha1
|
||||
|
||||
from paramiko import util
|
||||
from paramiko.common import DEBUG
|
||||
|
@ -205,7 +204,7 @@ class KexGex (object):
|
|||
hm.add_mpint(self.e)
|
||||
hm.add_mpint(self.f)
|
||||
hm.add_mpint(K)
|
||||
H = SHA.new(hm.asbytes()).digest()
|
||||
H = sha1(hm.asbytes()).digest()
|
||||
self.transport._set_K_H(K, H)
|
||||
# sign it
|
||||
sig = self.transport.get_server_key().sign_ssh_data(H)
|
||||
|
@ -240,6 +239,6 @@ class KexGex (object):
|
|||
hm.add_mpint(self.e)
|
||||
hm.add_mpint(self.f)
|
||||
hm.add_mpint(K)
|
||||
self.transport._set_K_H(K, SHA.new(hm.asbytes()).digest())
|
||||
self.transport._set_K_H(K, sha1(hm.asbytes()).digest())
|
||||
self.transport._verify_key(host_key, sig)
|
||||
self.transport._activate_outbound()
|
||||
|
|
|
@ -22,8 +22,7 @@ Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of
|
|||
"""
|
||||
|
||||
import os
|
||||
|
||||
from Crypto.Hash import SHA
|
||||
from hashlib import sha1
|
||||
|
||||
from paramiko import util
|
||||
from paramiko.common import max_byte, zero_byte
|
||||
|
@ -107,7 +106,7 @@ class KexGroup1(object):
|
|||
hm.add_mpint(self.e)
|
||||
hm.add_mpint(self.f)
|
||||
hm.add_mpint(K)
|
||||
self.transport._set_K_H(K, SHA.new(hm.asbytes()).digest())
|
||||
self.transport._set_K_H(K, sha1(hm.asbytes()).digest())
|
||||
self.transport._verify_key(host_key, sig)
|
||||
self.transport._activate_outbound()
|
||||
|
||||
|
@ -126,7 +125,7 @@ class KexGroup1(object):
|
|||
hm.add_mpint(self.e)
|
||||
hm.add_mpint(self.f)
|
||||
hm.add_mpint(K)
|
||||
H = SHA.new(hm.asbytes()).digest()
|
||||
H = sha1(hm.asbytes()).digest()
|
||||
self.transport._set_K_H(K, H)
|
||||
# sign it
|
||||
sig = self.transport.get_server_key().sign_ssh_data(H)
|
||||
|
|
|
@ -26,6 +26,7 @@ import socket
|
|||
import struct
|
||||
import threading
|
||||
import time
|
||||
from hmac import HMAC
|
||||
|
||||
from paramiko import util
|
||||
from paramiko.common import linefeed_byte, cr_byte_value, asbytes, MSG_NAMES, \
|
||||
|
@ -35,12 +36,6 @@ from paramiko.ssh_exception import SSHException, ProxyCommandFailure
|
|||
from paramiko.message import Message
|
||||
|
||||
|
||||
try:
|
||||
from r_hmac import HMAC
|
||||
except ImportError:
|
||||
from Crypto.Hash.HMAC import HMAC
|
||||
|
||||
|
||||
def compute_hmac(key, message, digest_class):
|
||||
return HMAC(key, message, digest_class).digest()
|
||||
|
||||
|
@ -360,7 +355,7 @@ class Packetizer (object):
|
|||
raise SSHException('Mismatched MAC')
|
||||
padding = byte_ord(packet[0])
|
||||
payload = packet[1:packet_size - padding]
|
||||
|
||||
|
||||
if self.__dump_packets:
|
||||
self._log(DEBUG, 'Got payload (%d bytes, %d padding)' % (packet_size, padding))
|
||||
|
||||
|
|
|
@ -23,8 +23,8 @@ Common API for all public keys.
|
|||
import base64
|
||||
from binascii import hexlify, unhexlify
|
||||
import os
|
||||
from hashlib import md5
|
||||
|
||||
from Crypto.Hash import MD5
|
||||
from Crypto.Cipher import DES3, AES
|
||||
|
||||
from paramiko import util
|
||||
|
@ -126,7 +126,7 @@ class PKey (object):
|
|||
a 16-byte `string <str>` (binary) of the MD5 fingerprint, in SSH
|
||||
format.
|
||||
"""
|
||||
return MD5.new(self.asbytes()).digest()
|
||||
return md5(self.asbytes()).digest()
|
||||
|
||||
def get_base64(self):
|
||||
"""
|
||||
|
@ -299,7 +299,7 @@ class PKey (object):
|
|||
keysize = self._CIPHER_TABLE[encryption_type]['keysize']
|
||||
mode = self._CIPHER_TABLE[encryption_type]['mode']
|
||||
salt = unhexlify(b(saltstr))
|
||||
key = util.generate_key_bytes(MD5, salt, password, keysize)
|
||||
key = util.generate_key_bytes(md5, salt, password, keysize)
|
||||
return cipher.new(key, mode, salt).decrypt(data)
|
||||
|
||||
def _write_private_key_file(self, tag, filename, data, password=None):
|
||||
|
@ -331,7 +331,7 @@ class PKey (object):
|
|||
blocksize = self._CIPHER_TABLE[cipher_name]['blocksize']
|
||||
mode = self._CIPHER_TABLE[cipher_name]['mode']
|
||||
salt = os.urandom(16)
|
||||
key = util.generate_key_bytes(MD5, salt, password, keysize)
|
||||
key = util.generate_key_bytes(md5, salt, password, keysize)
|
||||
if len(data) % blocksize != 0:
|
||||
n = blocksize - len(data) % blocksize
|
||||
#data += os.urandom(n)
|
||||
|
|
|
@ -22,31 +22,11 @@ Utility functions for dealing with primes.
|
|||
|
||||
import os
|
||||
|
||||
from Crypto.Util import number
|
||||
|
||||
from paramiko import util
|
||||
from paramiko.py3compat import byte_mask, long
|
||||
from paramiko.ssh_exception import SSHException
|
||||
|
||||
|
||||
def _generate_prime(bits):
|
||||
"""primtive attempt at prime generation"""
|
||||
hbyte_mask = pow(2, bits % 8) - 1
|
||||
while True:
|
||||
# loop catches the case where we increment n into a higher bit-range
|
||||
x = os.urandom((bits + 7) // 8)
|
||||
if hbyte_mask > 0:
|
||||
x = byte_mask(x[0], hbyte_mask) + x[1:]
|
||||
n = util.inflate_long(x, 1)
|
||||
n |= 1
|
||||
n |= (1 << (bits - 1))
|
||||
while not number.isPrime(n):
|
||||
n += 2
|
||||
if util.bit_length(n) == bits:
|
||||
break
|
||||
return n
|
||||
|
||||
|
||||
def _roll_random(n):
|
||||
"""returns a random # from 0 to N-1"""
|
||||
bits = util.bit_length(n - 1)
|
||||
|
|
|
@ -21,9 +21,9 @@ RSA keys.
|
|||
"""
|
||||
|
||||
import os
|
||||
from hashlib import sha1
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Hash import SHA
|
||||
|
||||
from paramiko import util
|
||||
from paramiko.common import max_byte, zero_byte, one_byte
|
||||
|
@ -93,7 +93,7 @@ class RSAKey (PKey):
|
|||
return self.d is not None
|
||||
|
||||
def sign_ssh_data(self, data):
|
||||
digest = SHA.new(data).digest()
|
||||
digest = sha1(data).digest()
|
||||
rsa = RSA.construct((long(self.n), long(self.e), long(self.d)))
|
||||
sig = util.deflate_long(rsa.sign(self._pkcs1imify(digest), bytes())[0], 0)
|
||||
m = Message()
|
||||
|
@ -108,7 +108,7 @@ class RSAKey (PKey):
|
|||
# verify the signature by SHA'ing the data and encrypting it using the
|
||||
# public key. some wackiness ensues where we "pkcs1imify" the 20-byte
|
||||
# hash into a string as long as the RSA key.
|
||||
hash_obj = util.inflate_long(self._pkcs1imify(SHA.new(data).digest()), True)
|
||||
hash_obj = util.inflate_long(self._pkcs1imify(sha1(data).digest()), True)
|
||||
rsa = RSA.construct((long(self.n), long(self.e)))
|
||||
return rsa.verify(hash_obj, (sig,))
|
||||
|
||||
|
|
|
@ -22,9 +22,9 @@ Server-mode SFTP support.
|
|||
|
||||
import os
|
||||
import errno
|
||||
|
||||
from Crypto.Hash import MD5, SHA
|
||||
import sys
|
||||
from hashlib import md5, sha1
|
||||
|
||||
from paramiko import util
|
||||
from paramiko.sftp import BaseSFTP, Message, SFTP_FAILURE, \
|
||||
SFTP_PERMISSION_DENIED, SFTP_NO_SUCH_FILE
|
||||
|
@ -45,8 +45,8 @@ from paramiko.sftp import CMD_HANDLE, SFTP_DESC, CMD_STATUS, SFTP_EOF, CMD_NAME,
|
|||
CMD_READLINK, CMD_SYMLINK, CMD_REALPATH, CMD_EXTENDED, SFTP_OP_UNSUPPORTED
|
||||
|
||||
_hash_class = {
|
||||
'sha1': SHA,
|
||||
'md5': MD5,
|
||||
'sha1': sha1,
|
||||
'md5': md5,
|
||||
}
|
||||
|
||||
|
||||
|
@ -82,14 +82,14 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
|||
self.file_table = {}
|
||||
self.folder_table = {}
|
||||
self.server = sftp_si(server, *largs, **kwargs)
|
||||
|
||||
|
||||
def _log(self, level, msg):
|
||||
if issubclass(type(msg), list):
|
||||
for m in msg:
|
||||
super(SFTPServer, self)._log(level, "[chan " + self.sock.get_name() + "] " + m)
|
||||
else:
|
||||
super(SFTPServer, self)._log(level, "[chan " + self.sock.get_name() + "] " + msg)
|
||||
|
||||
|
||||
def start_subsystem(self, name, transport, channel):
|
||||
self.sock = channel
|
||||
self._log(DEBUG, 'Started sftp server on channel %s' % repr(channel))
|
||||
|
@ -157,7 +157,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
|||
|
||||
This is meant to be a handy helper function for translating SFTP file
|
||||
requests into local file operations.
|
||||
|
||||
|
||||
:param str filename:
|
||||
name of the file to alter (should usually be an absolute path).
|
||||
:param .SFTPAttributes attr: attributes to change.
|
||||
|
@ -281,7 +281,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
|||
# don't try to read more than about 64KB at a time
|
||||
chunklen = min(blocklen, 65536)
|
||||
count = 0
|
||||
hash_obj = alg.new()
|
||||
hash_obj = alg()
|
||||
while count < blocklen:
|
||||
data = f.read(offset, chunklen)
|
||||
if not isinstance(data, bytes_types):
|
||||
|
@ -298,7 +298,7 @@ class SFTPServer (BaseSFTP, SubsystemHandler):
|
|||
msg.add_string(algname)
|
||||
msg.add_bytes(sum_out)
|
||||
self._send_packet(CMD_EXTENDED_REPLY, msg)
|
||||
|
||||
|
||||
def _convert_pflags(self, pflags):
|
||||
"""convert SFTP-style open() flags to Python's os.open() flags"""
|
||||
if (pflags & SFTP_FLAG_READ) and (pflags & SFTP_FLAG_WRITE):
|
||||
|
|
|
@ -26,6 +26,7 @@ import sys
|
|||
import threading
|
||||
import time
|
||||
import weakref
|
||||
from hashlib import md5, sha1
|
||||
|
||||
import paramiko
|
||||
from paramiko import util
|
||||
|
@ -59,7 +60,6 @@ from paramiko.ssh_exception import (SSHException, BadAuthenticationType,
|
|||
from paramiko.util import retry_on_signal
|
||||
|
||||
from Crypto.Cipher import Blowfish, AES, DES3, ARC4
|
||||
from Crypto.Hash import SHA, MD5
|
||||
try:
|
||||
from Crypto.Util import Counter
|
||||
except ImportError:
|
||||
|
@ -107,10 +107,10 @@ class Transport (threading.Thread):
|
|||
}
|
||||
|
||||
_mac_info = {
|
||||
'hmac-sha1': {'class': SHA, 'size': 20},
|
||||
'hmac-sha1-96': {'class': SHA, 'size': 12},
|
||||
'hmac-md5': {'class': MD5, 'size': 16},
|
||||
'hmac-md5-96': {'class': MD5, 'size': 12},
|
||||
'hmac-sha1': {'class': sha1, 'size': 20},
|
||||
'hmac-sha1-96': {'class': sha1, 'size': 12},
|
||||
'hmac-md5': {'class': md5, 'size': 16},
|
||||
'hmac-md5-96': {'class': md5, 'size': 12},
|
||||
}
|
||||
|
||||
_key_info = {
|
||||
|
@ -1336,13 +1336,13 @@ class Transport (threading.Thread):
|
|||
m.add_bytes(self.H)
|
||||
m.add_byte(b(id))
|
||||
m.add_bytes(self.session_id)
|
||||
out = sofar = SHA.new(m.asbytes()).digest()
|
||||
out = sofar = sha1(m.asbytes()).digest()
|
||||
while len(out) < nbytes:
|
||||
m = Message()
|
||||
m.add_mpint(self.K)
|
||||
m.add_bytes(self.H)
|
||||
m.add_bytes(sofar)
|
||||
digest = SHA.new(m.asbytes()).digest()
|
||||
digest = sha1(m.asbytes()).digest()
|
||||
out += digest
|
||||
sofar += digest
|
||||
return out[:nbytes]
|
||||
|
@ -1713,9 +1713,9 @@ class Transport (threading.Thread):
|
|||
# initial mac keys are done in the hash's natural size (not the potentially truncated
|
||||
# transmission size)
|
||||
if self.server_mode:
|
||||
mac_key = self._compute_key('E', mac_engine.digest_size)
|
||||
mac_key = self._compute_key('E', mac_engine().digest_size)
|
||||
else:
|
||||
mac_key = self._compute_key('F', mac_engine.digest_size)
|
||||
mac_key = self._compute_key('F', mac_engine().digest_size)
|
||||
self.packetizer.set_inbound_cipher(engine, block_size, mac_engine, mac_size, mac_key)
|
||||
compress_in = self._compression_info[self.remote_compression][1]
|
||||
if (compress_in is not None) and ((self.remote_compression != 'zlib@openssh.com') or self.authenticated):
|
||||
|
@ -1740,9 +1740,9 @@ class Transport (threading.Thread):
|
|||
# initial mac keys are done in the hash's natural size (not the potentially truncated
|
||||
# transmission size)
|
||||
if self.server_mode:
|
||||
mac_key = self._compute_key('F', mac_engine.digest_size)
|
||||
mac_key = self._compute_key('F', mac_engine().digest_size)
|
||||
else:
|
||||
mac_key = self._compute_key('E', mac_engine.digest_size)
|
||||
mac_key = self._compute_key('E', mac_engine().digest_size)
|
||||
sdctr = self.local_cipher.endswith('-ctr')
|
||||
self.packetizer.set_outbound_cipher(engine, block_size, mac_engine, mac_size, mac_key, sdctr)
|
||||
compress_out = self._compression_info[self.local_compression][0]
|
||||
|
|
|
@ -143,15 +143,14 @@ def tb_strings():
|
|||
return ''.join(traceback.format_exception(*sys.exc_info())).split('\n')
|
||||
|
||||
|
||||
def generate_key_bytes(hashclass, salt, key, nbytes):
|
||||
def generate_key_bytes(hash_alg, salt, key, nbytes):
|
||||
"""
|
||||
Given a password, passphrase, or other human-source key, scramble it
|
||||
through a secure hash into some keyworthy bytes. This specific algorithm
|
||||
is used for encrypting/decrypting private key files.
|
||||
|
||||
:param class hashclass:
|
||||
class from `Crypto.Hash` that can be used as a secure hashing function
|
||||
(like ``MD5`` or ``SHA``).
|
||||
:param function hash_alg: A function which creates a new hash object, such
|
||||
as ``hashlib.sha256``.
|
||||
:param salt: data to salt the hash with.
|
||||
:type salt: byte string
|
||||
:param str key: human-entered password or passphrase.
|
||||
|
@ -163,7 +162,7 @@ def generate_key_bytes(hashclass, salt, key, nbytes):
|
|||
if len(salt) > 8:
|
||||
salt = salt[:8]
|
||||
while nbytes > 0:
|
||||
hash_obj = hashclass.new()
|
||||
hash_obj = hash_alg()
|
||||
if len(digest) > 0:
|
||||
hash_obj.update(digest)
|
||||
hash_obj.update(b(key))
|
||||
|
|
7
setup.py
7
setup.py
|
@ -40,9 +40,10 @@ import sys
|
|||
try:
|
||||
from setuptools import setup
|
||||
kw = {
|
||||
'install_requires': ['pycrypto >= 2.1, != 2.4',
|
||||
'ecdsa',
|
||||
],
|
||||
'install_requires': [
|
||||
'pycrypto >= 2.1, != 2.4',
|
||||
'ecdsa',
|
||||
],
|
||||
}
|
||||
except ImportError:
|
||||
from distutils.core import setup
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 6.3 KiB |
|
@ -5,12 +5,12 @@ sys.path.append(os.path.abspath('../..'))
|
|||
from shared_conf import *
|
||||
|
||||
# Enable autodoc, intersphinx
|
||||
extensions.extend(['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'])
|
||||
extensions.extend(['sphinx.ext.autodoc'])
|
||||
|
||||
# Autodoc settings
|
||||
autodoc_default_flags = ['members', 'special-members']
|
||||
|
||||
# Intersphinx connection to stdlib
|
||||
intersphinx_mapping = {
|
||||
'python': ('http://docs.python.org/2.6', None),
|
||||
# Sister-site links to WWW
|
||||
html_theme_options['extra_nav_links'] = {
|
||||
"Main website": 'http://www.paramiko.org',
|
||||
}
|
||||
|
|
|
@ -1,15 +1,12 @@
|
|||
from datetime import datetime
|
||||
import os
|
||||
import sys
|
||||
|
||||
import alabaster
|
||||
|
||||
|
||||
# Alabaster theme + mini-extension
|
||||
html_theme_path = [alabaster.get_path()]
|
||||
extensions = ['alabaster']
|
||||
extensions = ['alabaster', 'sphinx.ext.intersphinx']
|
||||
# Paths relative to invoking conf.py - not this shared file
|
||||
html_static_path = ['../_shared_static']
|
||||
html_theme = 'alabaster'
|
||||
html_theme_options = {
|
||||
'description': "A Python implementation of SSHv2.",
|
||||
|
@ -17,9 +14,6 @@ html_theme_options = {
|
|||
'github_repo': 'paramiko',
|
||||
'gittip_user': 'bitprophet',
|
||||
'analytics_id': 'UA-18486793-2',
|
||||
|
||||
'link': '#3782BE',
|
||||
'link_hover': '#3782BE',
|
||||
}
|
||||
html_sidebars = {
|
||||
'**': [
|
||||
|
@ -30,6 +24,11 @@ html_sidebars = {
|
|||
]
|
||||
}
|
||||
|
||||
# Everything intersphinx's to Python
|
||||
intersphinx_mapping = {
|
||||
'python': ('http://docs.python.org/2.6', None),
|
||||
}
|
||||
|
||||
# Regular settings
|
||||
project = 'Paramiko'
|
||||
year = datetime.now().year
|
||||
|
|
|
@ -1,140 +0,0 @@
|
|||
from collections import namedtuple
|
||||
from datetime import datetime
|
||||
import time
|
||||
import email.utils
|
||||
|
||||
from sphinx.util.compat import Directive
|
||||
from docutils import nodes
|
||||
|
||||
|
||||
class BlogDateDirective(Directive):
|
||||
"""
|
||||
Used to parse/attach date info to blog post documents.
|
||||
|
||||
No nodes generated, since none are needed.
|
||||
"""
|
||||
has_content = True
|
||||
|
||||
def run(self):
|
||||
# Tag parent document with parsed date value.
|
||||
self.state.document.blog_date = datetime.strptime(
|
||||
self.content[0], "%Y-%m-%d"
|
||||
)
|
||||
# Don't actually insert any nodes, we're already done.
|
||||
return []
|
||||
|
||||
class blog_post_list(nodes.General, nodes.Element):
|
||||
pass
|
||||
|
||||
class BlogPostListDirective(Directive):
|
||||
"""
|
||||
Simply spits out a 'blog_post_list' temporary node for replacement.
|
||||
|
||||
Gets replaced at doctree-resolved time - only then will all blog post
|
||||
documents be written out (& their date directives executed).
|
||||
"""
|
||||
def run(self):
|
||||
return [blog_post_list('')]
|
||||
|
||||
|
||||
Post = namedtuple('Post', 'name doc title date opener')
|
||||
|
||||
def get_posts(app):
|
||||
# Obtain blog posts
|
||||
post_names = filter(lambda x: x.startswith('blog/'), app.env.found_docs)
|
||||
posts = map(lambda x: (x, app.env.get_doctree(x)), post_names)
|
||||
# Obtain common data used for list page & RSS
|
||||
data = []
|
||||
for post, doc in sorted(posts, key=lambda x: x[1].blog_date, reverse=True):
|
||||
# Welp. No "nice" way to get post title. Thanks Sphinx.
|
||||
title = doc[0][0][0]
|
||||
# Date. This may or may not end up reflecting the required
|
||||
# *input* format, but doing it here gives us flexibility.
|
||||
date = doc.blog_date
|
||||
# 1st paragraph as opener. TODO: allow a role or something marking
|
||||
# where to actually pull from?
|
||||
opener = doc.traverse(nodes.paragraph)[0]
|
||||
data.append(Post(post, doc, title, date, opener))
|
||||
return data
|
||||
|
||||
def replace_blog_post_lists(app, doctree, fromdocname):
|
||||
"""
|
||||
Replace blog_post_list nodes with ordered list-o-links to posts.
|
||||
"""
|
||||
# Obtain blog posts
|
||||
post_names = filter(lambda x: x.startswith('blog/'), app.env.found_docs)
|
||||
posts = map(lambda x: (x, app.env.get_doctree(x)), post_names)
|
||||
# Build "list" of links/etc
|
||||
post_links = []
|
||||
for post, doc, title, date, opener in get_posts(app):
|
||||
# Link itself
|
||||
uri = app.builder.get_relative_uri(fromdocname, post)
|
||||
link = nodes.reference('', '', refdocname=post, refuri=uri)
|
||||
# Title, bolded. TODO: use 'topic' or something maybe?
|
||||
link.append(nodes.strong('', title))
|
||||
date = date.strftime("%Y-%m-%d")
|
||||
# Meh @ not having great docutils nodes which map to this.
|
||||
html = '<div class="timestamp"><span>%s</span></div>' % date
|
||||
timestamp = nodes.raw(text=html, format='html')
|
||||
# NOTE: may group these within another element later if styling
|
||||
# necessitates it
|
||||
group = [timestamp, nodes.paragraph('', '', link), opener]
|
||||
post_links.extend(group)
|
||||
|
||||
# Replace temp node(s) w/ expanded list-o-links
|
||||
for node in doctree.traverse(blog_post_list):
|
||||
node.replace_self(post_links)
|
||||
|
||||
def rss_timestamp(timestamp):
|
||||
# Use horribly inappropriate module for its magical daylight-savings-aware
|
||||
# timezone madness. Props to Tinkerer for the idea.
|
||||
return email.utils.formatdate(
|
||||
time.mktime(timestamp.timetuple()),
|
||||
localtime=True
|
||||
)
|
||||
|
||||
def generate_rss(app):
|
||||
# Meh at having to run this subroutine like 3x per build. Not worth trying
|
||||
# to be clever for now tho.
|
||||
posts_ = get_posts(app)
|
||||
# LOL URLs
|
||||
root = app.config.rss_link
|
||||
if not root.endswith('/'):
|
||||
root += '/'
|
||||
# Oh boy
|
||||
posts = [
|
||||
(
|
||||
root + app.builder.get_target_uri(x.name),
|
||||
x.title,
|
||||
str(x.opener[0]), # Grab inner text element from paragraph
|
||||
rss_timestamp(x.date),
|
||||
)
|
||||
for x in posts_
|
||||
]
|
||||
location = 'blog/rss.xml'
|
||||
context = {
|
||||
'title': app.config.project,
|
||||
'link': root,
|
||||
'atom': root + location,
|
||||
'description': app.config.rss_description,
|
||||
# 'posts' is sorted by date already
|
||||
'date': rss_timestamp(posts_[0].date),
|
||||
'posts': posts,
|
||||
}
|
||||
yield (location, context, 'rss.xml')
|
||||
|
||||
def setup(app):
|
||||
# Link in RSS feed back to main website, e.g. 'http://paramiko.org'
|
||||
app.add_config_value('rss_link', None, '')
|
||||
# Ditto for RSS description field
|
||||
app.add_config_value('rss_description', None, '')
|
||||
# Interprets date metadata in blog post documents
|
||||
app.add_directive('date', BlogDateDirective)
|
||||
# Inserts blog post list node (in e.g. a listing page) for replacement
|
||||
# below
|
||||
app.add_node(blog_post_list)
|
||||
app.add_directive('blog-posts', BlogPostListDirective)
|
||||
# Performs abovementioned replacement
|
||||
app.connect('doctree-resolved', replace_blog_post_lists)
|
||||
# Generates RSS page from whole cloth at page generation step
|
||||
app.connect('html-collect-pages', generate_rss)
|
|
@ -1,16 +0,0 @@
|
|||
====
|
||||
Blog
|
||||
====
|
||||
|
||||
.. blog-posts directive gets replaced with an ordered list of blog posts.
|
||||
|
||||
.. blog-posts::
|
||||
|
||||
|
||||
.. The following toctree ensures blog posts get processed.
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
:glob:
|
||||
|
||||
blog/*
|
|
@ -1,7 +0,0 @@
|
|||
===========
|
||||
First post!
|
||||
===========
|
||||
|
||||
A blog post.
|
||||
|
||||
.. date:: 2013-12-04
|
|
@ -1,7 +0,0 @@
|
|||
===========
|
||||
Another one
|
||||
===========
|
||||
|
||||
.. date:: 2013-12-05
|
||||
|
||||
Indeed!
|
|
@ -2,6 +2,8 @@
|
|||
Changelog
|
||||
=========
|
||||
|
||||
* :support:`295` Swap out a bunch of PyCrypto hash functions with use of
|
||||
`hashlib`. Thanks to Alex Gaynor.
|
||||
* :support:`290` (also :issue:`292`) Add support for building universal
|
||||
(Python 2+3 compatible) wheel files during the release process. Courtesy of
|
||||
Alex Gaynor.
|
||||
|
|
|
@ -6,15 +6,10 @@ from os.path import abspath, join, dirname
|
|||
sys.path.append(abspath(join(dirname(__file__), '..')))
|
||||
from shared_conf import *
|
||||
|
||||
# Local blog extension
|
||||
sys.path.append(abspath('.'))
|
||||
extensions.append('blog')
|
||||
rss_link = 'http://paramiko.org'
|
||||
rss_description = 'Paramiko project news'
|
||||
|
||||
# Releases changelog extension
|
||||
extensions.append('releases')
|
||||
releases_release_uri = "https://github.com/paramiko/paramiko/tree/%s"
|
||||
# Paramiko 1.x tags start with 'v'. Meh.
|
||||
releases_release_uri = "https://github.com/paramiko/paramiko/tree/v%s"
|
||||
releases_issue_uri = "https://github.com/paramiko/paramiko/issues/%s"
|
||||
|
||||
# Intersphinx for referencing API/usage docs
|
||||
|
@ -25,9 +20,7 @@ target = join(dirname(__file__), '..', 'docs', '_build')
|
|||
if os.environ.get('READTHEDOCS') == 'True':
|
||||
# TODO: switch to docs.paramiko.org post go-live of sphinx API docs
|
||||
target = 'http://docs.paramiko.org/en/latest/'
|
||||
intersphinx_mapping = {
|
||||
'docs': (target, None),
|
||||
}
|
||||
intersphinx_mapping['docs'] = (target, None)
|
||||
|
||||
# Sister-site links to API docs
|
||||
html_theme_options['extra_nav_links'] = {
|
||||
|
|
|
@ -11,20 +11,17 @@ contribution guidelines, development roadmap, news/blog, and so forth. Detailed
|
|||
usage and API documentation can be found at our code documentation site,
|
||||
`docs.paramiko.org <http://docs.paramiko.org>`_.
|
||||
|
||||
Please see the sidebar to the left to begin.
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
|
||||
changelog
|
||||
FAQs <faq>
|
||||
installing
|
||||
contributing
|
||||
contact
|
||||
|
||||
.. Hide blog in hidden toctree for now (to avoid warnings.)
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
|
||||
blog
|
||||
|
||||
|
||||
.. rubric:: Footnotes
|
||||
|
||||
|
|
|
@ -21,9 +21,12 @@ Some unit tests for the ssh2 protocol in Transport.
|
|||
"""
|
||||
|
||||
import unittest
|
||||
from hashlib import sha1
|
||||
|
||||
from tests.loop import LoopSocket
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Hash import SHA
|
||||
|
||||
from paramiko import Message, Packetizer, util
|
||||
from paramiko.common import byte_chr, zero_byte
|
||||
|
||||
|
@ -41,7 +44,7 @@ class PacketizerTest (unittest.TestCase):
|
|||
p.set_log(util.get_logger('paramiko.transport'))
|
||||
p.set_hexdump(True)
|
||||
cipher = AES.new(zero_byte * 16, AES.MODE_CBC, x55 * 16)
|
||||
p.set_outbound_cipher(cipher, 16, SHA, 12, x1f * 20)
|
||||
p.set_outbound_cipher(cipher, 16, sha1, 12, x1f * 20)
|
||||
|
||||
# message has to be at least 16 bytes long, so we'll have at least one
|
||||
# block of data encrypted that contains zero random padding bytes
|
||||
|
@ -64,7 +67,7 @@ class PacketizerTest (unittest.TestCase):
|
|||
p.set_log(util.get_logger('paramiko.transport'))
|
||||
p.set_hexdump(True)
|
||||
cipher = AES.new(zero_byte * 16, AES.MODE_CBC, x55 * 16)
|
||||
p.set_inbound_cipher(cipher, 16, SHA, 12, x1f * 20)
|
||||
p.set_inbound_cipher(cipher, 16, sha1, 12, x1f * 20)
|
||||
wsock.send(b'\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0\x90\xd2\x16\x56\x0d\x71\x73\x61\x38\x7c\x4c\x3d\xfb\x97\x7d\xe2\x6e\x03\xb1\xa0\xc2\x1c\xd6\x41\x41\x4c\xb4\x59')
|
||||
cmd, m = p.read_message()
|
||||
self.assertEqual(100, cmd)
|
||||
|
|
|
@ -20,8 +20,9 @@
|
|||
Some unit tests for public/private key objects.
|
||||
"""
|
||||
|
||||
from binascii import hexlify
|
||||
import unittest
|
||||
from binascii import hexlify
|
||||
from hashlib import md5
|
||||
|
||||
from paramiko import RSAKey, DSSKey, ECDSAKey, Message, util
|
||||
from paramiko.py3compat import StringIO, byte_chr, b, bytes
|
||||
|
@ -91,8 +92,7 @@ class KeyTest (unittest.TestCase):
|
|||
pass
|
||||
|
||||
def test_1_generate_key_bytes(self):
|
||||
from Crypto.Hash import MD5
|
||||
key = util.generate_key_bytes(MD5, x1234, 'happy birthday', 30)
|
||||
key = util.generate_key_bytes(md5, x1234, 'happy birthday', 30)
|
||||
exp = b'\x61\xE1\xF2\x72\xF4\xC1\xC4\x56\x15\x86\xBD\x32\x24\x98\xC0\xE9\x24\x67\x27\x80\xF4\x7B\xB3\x7D\xDA\x7D\x54\x01\x9E\x64'
|
||||
self.assertEqual(exp, key)
|
||||
|
||||
|
|
|
@ -23,7 +23,8 @@ Some unit tests for utility functions.
|
|||
from binascii import hexlify
|
||||
import errno
|
||||
import os
|
||||
from Crypto.Hash import SHA
|
||||
from hashlib import sha1
|
||||
|
||||
import paramiko.util
|
||||
from paramiko.util import lookup_ssh_host_config as host_config
|
||||
from paramiko.py3compat import StringIO, byte_ord
|
||||
|
@ -136,7 +137,7 @@ class UtilTest(ParamikoTest):
|
|||
)
|
||||
|
||||
def test_4_generate_key_bytes(self):
|
||||
x = paramiko.util.generate_key_bytes(SHA, b'ABCDEFGH', 'This is my secret passphrase.', 64)
|
||||
x = paramiko.util.generate_key_bytes(sha1, b'ABCDEFGH', 'This is my secret passphrase.', 64)
|
||||
hex = ''.join(['%02x' % byte_ord(c) for c in x])
|
||||
self.assertEqual(hex, '9110e2f6793b69363e58173e9436b13a5a4b339005741d5c680e505f57d871347b4239f14fb5c46e857d5e100424873ba849ac699cea98d729e57b3e84378e8b')
|
||||
|
||||
|
|
Loading…
Reference in New Issue