summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.coveragerc1
-rw-r--r--dev-requirements.txt5
-rw-r--r--paramiko/_winapi.py9
-rw-r--r--paramiko/agent.py10
-rw-r--r--paramiko/auth_handler.py12
-rw-r--r--paramiko/ber.py11
-rw-r--r--paramiko/buffered_pipe.py2
-rw-r--r--paramiko/channel.py16
-rw-r--r--paramiko/client.py6
-rw-r--r--paramiko/common.py18
-rw-r--r--paramiko/compress.py4
-rw-r--r--paramiko/config.py22
-rw-r--r--paramiko/ecdsakey.py4
-rw-r--r--paramiko/file.py4
-rw-r--r--paramiko/hostkeys.py2
-rw-r--r--paramiko/kex_curve25519.py2
-rw-r--r--paramiko/kex_gex.py4
-rw-r--r--paramiko/kex_group1.py6
-rw-r--r--paramiko/kex_gss.py19
-rw-r--r--paramiko/message.py67
-rw-r--r--paramiko/packet.py13
-rw-r--r--paramiko/pipe.py6
-rw-r--r--paramiko/pkey.py66
-rw-r--r--paramiko/primes.py2
-rw-r--r--paramiko/server.py4
-rw-r--r--paramiko/sftp.py12
-rw-r--r--paramiko/sftp_attr.py6
-rw-r--r--paramiko/sftp_client.py7
-rw-r--r--paramiko/sftp_file.py12
-rw-r--r--paramiko/sftp_handle.py4
-rw-r--r--paramiko/sftp_server.py16
-rw-r--r--paramiko/sftp_si.py4
-rw-r--r--paramiko/ssh_exception.py12
-rw-r--r--paramiko/ssh_gss.py2
-rw-r--r--paramiko/transport.py27
-rw-r--r--paramiko/util.py38
-rw-r--r--paramiko/win_pageant.py11
-rw-r--r--pytest.ini4
-rw-r--r--setup.cfg8
-rw-r--r--setup_helper.py9
-rw-r--r--sites/docs/conf.py5
-rw-r--r--sites/www/changelog.rst43
-rw-r--r--tasks.py2
-rw-r--r--tests/conftest.py6
-rw-r--r--tests/loop.py2
-rw-r--r--tests/test_channelfile.py4
-rw-r--r--tests/test_client.py33
-rw-r--r--tests/test_config.py42
-rw-r--r--tests/test_file.py3
-rw-r--r--tests/test_gssapi.py2
-rw-r--r--tests/test_kex.py8
-rw-r--r--tests/test_kex_gss.py2
-rw-r--r--tests/test_message.py30
-rw-r--r--tests/test_packetizer.py2
-rw-r--r--tests/test_pkey.py4
-rw-r--r--tests/test_proxy.py2
-rw-r--r--tests/test_sftp.py38
-rw-r--r--tests/test_sftp_big.py107
-rw-r--r--tests/test_ssh_gss.py4
-rw-r--r--tests/test_transport.py11
-rw-r--r--tests/test_util.py41
-rw-r--r--tests/util.py7
62 files changed, 425 insertions, 460 deletions
diff --git a/.coveragerc b/.coveragerc
index 47b6f4c3..90c7ab09 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -3,3 +3,4 @@ branch = True
include =
paramiko/*
tests/*
+omit = paramiko/_winapi.py
diff --git a/dev-requirements.txt b/dev-requirements.txt
index 94578c22..8ed7eabc 100644
--- a/dev-requirements.txt
+++ b/dev-requirements.txt
@@ -1,6 +1,5 @@
# Invocations for common project tasks
-#invoke>=2.0
-invoke @ git+https://github.com/pyinvoke/invoke@main
+invoke>=2.0
invocations>=3.0
# Testing!
pytest-relaxed>=2
@@ -14,7 +13,7 @@ black>=22.8,<22.9
coverage>=6.2,<7
codecov==2.1.12
# Documentation tools
-alabaster==0.7.12
+alabaster==0.7.13
releases>=2.0
# Debuggery
icecream>=2.1
diff --git a/paramiko/_winapi.py b/paramiko/_winapi.py
index fcc18552..42954574 100644
--- a/paramiko/_winapi.py
+++ b/paramiko/_winapi.py
@@ -7,7 +7,6 @@ in jaraco.windows and asking the author to port the fixes back here.
"""
import builtins
-import sys
import ctypes.wintypes
from paramiko.util import u
@@ -62,7 +61,7 @@ class WindowsError(builtins.WindowsError):
value = ctypes.windll.kernel32.GetLastError()
strerror = format_system_message(value)
args = 0, strerror, None, value
- super(WindowsError, self).__init__(*args)
+ super().__init__(*args)
@property
def message(self):
@@ -131,7 +130,7 @@ ctypes.windll.kernel32.LocalFree.argtypes = (ctypes.wintypes.HLOCAL,)
# jaraco.windows.mmap
-class MemoryMap(object):
+class MemoryMap:
"""
A memory map object which can have security attributes overridden.
"""
@@ -173,7 +172,7 @@ class MemoryMap(object):
assert isinstance(msg, bytes)
n = len(msg)
if self.pos + n >= self.length: # A little safety.
- raise ValueError("Refusing to write %d bytes" % n)
+ raise ValueError(f"Refusing to write {n} bytes")
dest = self.view + self.pos
length = ctypes.c_size_t(n)
ctypes.windll.kernel32.RtlMoveMemory(dest, msg, length)
@@ -323,7 +322,7 @@ class SECURITY_ATTRIBUTES(ctypes.Structure):
]
def __init__(self, *args, **kwargs):
- super(SECURITY_ATTRIBUTES, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.nLength = ctypes.sizeof(SECURITY_ATTRIBUTES)
@property
diff --git a/paramiko/agent.py b/paramiko/agent.py
index 31a16e2e..c1a07390 100644
--- a/paramiko/agent.py
+++ b/paramiko/agent.py
@@ -34,7 +34,7 @@ from paramiko.common import io_sleep, byte_chr
from paramiko.ssh_exception import SSHException, AuthenticationException
from paramiko.message import Message
from paramiko.pkey import PKey
-from paramiko.util import retry_on_signal, asbytes
+from paramiko.util import asbytes
cSSH2_AGENTC_REQUEST_IDENTITIES = byte_chr(11)
SSH2_AGENT_IDENTITIES_ANSWER = 12
@@ -54,7 +54,7 @@ ALGORITHM_FLAG_MAP = {
}
-class AgentSSH(object):
+class AgentSSH:
def __init__(self):
self._conn = None
self._keys = ()
@@ -213,7 +213,7 @@ def get_agent_connection():
if ("SSH_AUTH_SOCK" in os.environ) and (sys.platform != "win32"):
conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
- retry_on_signal(lambda: conn.connect(os.environ["SSH_AUTH_SOCK"]))
+ conn.connect(os.environ["SSH_AUTH_SOCK"])
return conn
except:
# probably a dangling env var: the ssh agent is gone
@@ -232,7 +232,7 @@ def get_agent_connection():
return
-class AgentClientProxy(object):
+class AgentClientProxy:
"""
Class proxying request as a client:
@@ -325,7 +325,7 @@ class AgentServerProxy(AgentSSH):
return self._file
-class AgentRequestHandler(object):
+class AgentRequestHandler:
"""
Primary/default implementation of SSH agent forwarding functionality.
diff --git a/paramiko/auth_handler.py b/paramiko/auth_handler.py
index f18a3ed6..22f506c1 100644
--- a/paramiko/auth_handler.py
+++ b/paramiko/auth_handler.py
@@ -73,7 +73,7 @@ from paramiko.server import InteractiveQuery
from paramiko.ssh_gss import GSSAuth, GSS_EXCEPTIONS
-class AuthHandler(object):
+class AuthHandler:
"""
Internal class to handle the mechanics of authentication.
"""
@@ -367,7 +367,7 @@ class AuthHandler(object):
def _parse_service_accept(self, m):
service = m.get_text()
if service == "ssh-userauth":
- # TODO 3.0: this message sucks ass. change it to something more
+ # TODO 4.0: this message sucks ass. change it to something more
# obvious. it always appears to mean "we already authed" but no! it
# just means "we are allowed to TRY authing!"
self._log(DEBUG, "userauth is OK")
@@ -613,9 +613,7 @@ Error Message: {}
self._log(INFO, "Auth rejected: public key: {}".format(str(e)))
key = None
except Exception as e:
- msg = (
- "Auth rejected: unsupported or mangled public key ({}: {})"
- ) # noqa
+ msg = "Auth rejected: unsupported or mangled public key ({}: {})" # noqa
self._log(INFO, msg.format(e.__class__.__name__, e))
key = None
if key is None:
@@ -808,7 +806,7 @@ Error Message: {}
return
# TODO: do the same to the other tables, in Transport.
- # TODO 3.0: MAY make sense to make these tables into actual
+ # TODO 4.0: MAY make sense to make these tables into actual
# classes/instances that can be fed a mode bool or whatever. Or,
# alternately (both?) make the message types small classes or enums that
# embed this info within themselves (which could also then tidy up the
@@ -841,7 +839,7 @@ Error Message: {}
return self._client_handler_table
-class GssapiWithMicAuthHandler(object):
+class GssapiWithMicAuthHandler:
"""A specialized Auth handler for gssapi-with-mic
During the GSSAPI token exchange we need a modified dispatch table,
diff --git a/paramiko/ber.py b/paramiko/ber.py
index 4e93e66f..b8287f5d 100644
--- a/paramiko/ber.py
+++ b/paramiko/ber.py
@@ -19,13 +19,14 @@ from paramiko.common import max_byte, zero_byte, byte_ord, byte_chr
import paramiko.util as util
from paramiko.util import b
+from paramiko.sftp import int64
class BERException(Exception):
pass
-class BER(object):
+class BER:
"""
Robey's tiny little attempt at a BER decoder.
"""
@@ -57,7 +58,7 @@ class BER(object):
while self.idx < len(self.content):
t = byte_ord(self.content[self.idx])
self.idx += 1
- ident = (ident << 7) | (t & 0x7f)
+ ident = (ident << 7) | (t & 0x7F)
if not (t & 0x80):
break
if self.idx >= len(self.content):
@@ -68,7 +69,7 @@ class BER(object):
if size & 0x80:
# more complimicated...
# FIXME: theoretically should handle indefinite-length (0x80)
- t = size & 0x7f
+ t = size & 0x7F
if self.idx + t > len(self.content):
return None
size = util.inflate_long(
@@ -106,7 +107,7 @@ class BER(object):
def encode_tlv(self, ident, val):
# no need to support ident > 31 here
self.content += byte_chr(ident)
- if len(val) > 0x7f:
+ if len(val) > 0x7F:
lenstr = util.deflate_long(len(val))
self.content += byte_chr(0x80 + len(lenstr)) + lenstr
else:
@@ -119,7 +120,7 @@ class BER(object):
self.encode_tlv(1, max_byte)
else:
self.encode_tlv(1, zero_byte)
- elif (type(x) is int) or (type(x) is long):
+ elif (type(x) is int) or (type(x) is int64):
self.encode_tlv(2, util.deflate_long(x))
elif type(x) is str:
self.encode_tlv(4, x)
diff --git a/paramiko/buffered_pipe.py b/paramiko/buffered_pipe.py
index 79854ada..c19279c0 100644
--- a/paramiko/buffered_pipe.py
+++ b/paramiko/buffered_pipe.py
@@ -36,7 +36,7 @@ class PipeTimeout(IOError):
pass
-class BufferedPipe(object):
+class BufferedPipe:
"""
A buffer that obeys normal read (with timeout) & close semantics for a
file or socket, but is fed data from another thread. This is used by
diff --git a/paramiko/channel.py b/paramiko/channel.py
index ccd23e9d..0e009eff 100644
--- a/paramiko/channel.py
+++ b/paramiko/channel.py
@@ -688,7 +688,7 @@ class Channel(ClosingContextManager):
length zero is returned, the channel stream has closed.
:param int nbytes: maximum number of bytes to read.
- :return: received data, as a ``str``/``bytes``.
+ :return: received data, as a `bytes`.
:raises socket.timeout:
if no data is ready before the timeout set by `settimeout`.
@@ -734,7 +734,7 @@ class Channel(ClosingContextManager):
channel stream has closed.
:param int nbytes: maximum number of bytes to read.
- :return: received data as a `str`
+ :return: received data as a `bytes`
:raises socket.timeout: if no data is ready before the timeout set by
`settimeout`.
@@ -786,7 +786,7 @@ class Channel(ClosingContextManager):
transmitted, the application needs to attempt delivery of the remaining
data.
- :param str s: data to send
+ :param bytes s: data to send
:return: number of bytes actually sent, as an `int`
:raises socket.timeout: if no data could be sent before the timeout set
@@ -807,7 +807,7 @@ class Channel(ClosingContextManager):
data has been sent: if only some of the data was transmitted, the
application needs to attempt delivery of the remaining data.
- :param str s: data to send.
+ :param bytes s: data to send.
:return: number of bytes actually sent, as an `int`.
:raises socket.timeout:
@@ -849,10 +849,10 @@ class Channel(ClosingContextManager):
"""
Send data to the channel's "stderr" stream, without allowing partial
results. Unlike `send_stderr`, this method continues to send data
- from the given string until all data has been sent or an error occurs.
- Nothing is returned.
+ from the given bytestring until all data has been sent or an error
+ occurs. Nothing is returned.
- :param str s: data to send to the client as "stderr" output.
+ :param bytes s: data to send to the client as "stderr" output.
:raises socket.timeout:
if sending stalled for longer than the timeout set by `settimeout`.
@@ -1386,5 +1386,5 @@ class ChannelStdinFile(ChannelFile):
"""
def close(self):
- super(ChannelStdinFile, self).close()
+ super().close()
self.channel.shutdown_write()
diff --git a/paramiko/client.py b/paramiko/client.py
index 73909219..5667d7e7 100644
--- a/paramiko/client.py
+++ b/paramiko/client.py
@@ -42,7 +42,7 @@ from paramiko.ssh_exception import (
NoValidConnectionsError,
)
from paramiko.transport import Transport
-from paramiko.util import retry_on_signal, ClosingContextManager
+from paramiko.util import ClosingContextManager
class SSHClient(ClosingContextManager):
@@ -354,7 +354,7 @@ class SSHClient(ClosingContextManager):
sock.settimeout(timeout)
except:
pass
- retry_on_signal(lambda: sock.connect(addr))
+ sock.connect(addr)
# Break out of the loop on success
break
except socket.error as e:
@@ -784,7 +784,7 @@ class SSHClient(ClosingContextManager):
self._transport._log(level, msg)
-class MissingHostKeyPolicy(object):
+class MissingHostKeyPolicy:
"""
Interface for defining the policy that `.SSHClient` should use when the
SSH server's hostname is not in either the system host keys or the
diff --git a/paramiko/common.py b/paramiko/common.py
index 3721efe4..b57149b7 100644
--- a/paramiko/common.py
+++ b/paramiko/common.py
@@ -26,20 +26,24 @@ import struct
# Formerly of py3compat.py. May be fully delete'able with a deeper look?
#
+
def byte_chr(c):
assert isinstance(c, int)
return struct.pack("B", c)
+
def byte_mask(c, mask):
assert isinstance(c, int)
return struct.pack("B", c & mask)
+
def byte_ord(c):
# In case we're handed a string instead of an int.
if not isinstance(c, int):
c = ord(c)
return c
+
(
MSG_DISCONNECT,
MSG_IGNORE,
@@ -198,7 +202,7 @@ CONNECTION_FAILED_CODE = {
zero_byte = byte_chr(0)
one_byte = byte_chr(1)
four_byte = byte_chr(4)
-max_byte = byte_chr(0xff)
+max_byte = byte_chr(0xFF)
cr_byte = byte_chr(13)
linefeed_byte = byte_chr(10)
crlf = cr_byte + linefeed_byte
@@ -206,7 +210,7 @@ cr_byte_value = 13
linefeed_byte_value = 10
-xffffffff = 0xffffffff
+xffffffff = 0xFFFFFFFF
x80000000 = 0x80000000
o666 = 438
o660 = 432
@@ -225,17 +229,17 @@ CRITICAL = logging.CRITICAL
# Common IO/select/etc sleep period, in seconds
io_sleep = 0.01
-DEFAULT_WINDOW_SIZE = 64 * 2 ** 15
-DEFAULT_MAX_PACKET_SIZE = 2 ** 15
+DEFAULT_WINDOW_SIZE = 64 * 2**15
+DEFAULT_MAX_PACKET_SIZE = 2**15
# lower bound on the max packet size we'll accept from the remote host
# Minimum packet size is 32768 bytes according to
# http://www.ietf.org/rfc/rfc4254.txt
-MIN_WINDOW_SIZE = 2 ** 15
+MIN_WINDOW_SIZE = 2**15
# However, according to http://www.ietf.org/rfc/rfc4253.txt it is perfectly
# legal to accept a size much smaller, as OpenSSH client does as size 16384.
-MIN_PACKET_SIZE = 2 ** 12
+MIN_PACKET_SIZE = 2**12
# Max windows size according to http://www.ietf.org/rfc/rfc4254.txt
-MAX_WINDOW_SIZE = 2 ** 32 - 1
+MAX_WINDOW_SIZE = 2**32 - 1
diff --git a/paramiko/compress.py b/paramiko/compress.py
index 7fe26db1..18ff4843 100644
--- a/paramiko/compress.py
+++ b/paramiko/compress.py
@@ -23,7 +23,7 @@ Compression implementations for a Transport.
import zlib
-class ZlibCompressor(object):
+class ZlibCompressor:
def __init__(self):
# Use the default level of zlib compression
self.z = zlib.compressobj()
@@ -32,7 +32,7 @@ class ZlibCompressor(object):
return self.z.compress(data) + self.z.flush(zlib.Z_FULL_FLUSH)
-class ZlibDecompressor(object):
+class ZlibDecompressor:
def __init__(self):
self.z = zlib.decompressobj()
diff --git a/paramiko/config.py b/paramiko/config.py
index 21e72b20..48bcb101 100644
--- a/paramiko/config.py
+++ b/paramiko/config.py
@@ -43,7 +43,7 @@ from .ssh_exception import CouldNotCanonicalize, ConfigParseError
SSH_PORT = 22
-class SSHConfig(object):
+class SSHConfig:
"""
Representation of config information as stored in the format used by
OpenSSH. Queries can be made via `lookup`. The format is described in
@@ -149,7 +149,7 @@ class SSHConfig(object):
self._config.append(context)
context = {"config": {}}
if key == "host":
- # TODO 3.0: make these real objects or at least name this
+ # TODO 4.0: make these real objects or at least name this
# "hosts" to acknowledge it's an iterable. (Doing so prior
# to 3.0, despite it being a private API, feels bad -
# surely such an old codebase has folks actually relying on
@@ -159,9 +159,8 @@ class SSHConfig(object):
context["matches"] = self._get_matches(value)
# Special-case for noop ProxyCommands
elif key == "proxycommand" and value.lower() == "none":
- # Store 'none' as None; prior to 3.x, it will get stripped out
- # at the end (for compatibility with issue #415). After 3.x, it
- # will simply not get stripped, leaving a nice explicit marker.
+ # Store 'none' as None - not as a string implying that the
+ # proxycommand is the literal shell command "none"!
context["config"][key] = None
# All other keywords get stored, directly or via append
else:
@@ -267,9 +266,6 @@ class SSHConfig(object):
# Expand variables in resulting values (besides 'Match exec' which was
# already handled above)
options = self._expand_variables(options, hostname)
- # TODO: remove in 3.x re #670
- if "proxycommand" in options and options["proxycommand"] is None:
- del options["proxycommand"]
return options
def canonicalize(self, hostname, options, domains):
@@ -340,10 +336,6 @@ class SSHConfig(object):
match = True
return match
- # TODO 3.0: remove entirely (is now unused internally)
- def _allowed(self, hosts, hostname):
- return self._pattern_matches(hosts, hostname)
-
def _does_match(self, match_list, target_hostname, canonical, options):
matched = []
candidates = match_list[:]
@@ -584,7 +576,7 @@ def _addressfamily_host_lookup(hostname, options):
pass
-class LazyFqdn(object):
+class LazyFqdn:
"""
Returns the host's fqdn on request as string.
"""
@@ -656,10 +648,6 @@ class SSHConfigDict(dict):
.. versionadded:: 2.5
"""
- def __init__(self, *args, **kwargs):
- # Hey, guess what? Python 2's userdict is an old-style class!
- super(SSHConfigDict, self).__init__(*args, **kwargs)
-
def as_bool(self, key):
"""
Express given key's value as a boolean type.
diff --git a/paramiko/ecdsakey.py b/paramiko/ecdsakey.py
index ad84fe31..e2279754 100644
--- a/paramiko/ecdsakey.py
+++ b/paramiko/ecdsakey.py
@@ -36,7 +36,7 @@ from paramiko.ssh_exception import SSHException
from paramiko.util import deflate_long
-class _ECDSACurve(object):
+class _ECDSACurve:
"""
Represents a specific ECDSA Curve (nistp256, nistp384, etc).
@@ -63,7 +63,7 @@ class _ECDSACurve(object):
self.curve_class = curve_class
-class _ECDSACurveSet(object):
+class _ECDSACurveSet:
"""
A collection to hold the ECDSA curves. Allows querying by oid and by key
format identifier. The two ways in which ECDSAKey needs to be able to look
diff --git a/paramiko/file.py b/paramiko/file.py
index a30e5137..b1450ac9 100644
--- a/paramiko/file.py
+++ b/paramiko/file.py
@@ -520,9 +520,7 @@ class BufferedFile(ClosingContextManager):
return
if self.newlines is None:
self.newlines = newline
- elif self.newlines != newline and isinstance(
- self.newlines, bytes
- ):
+ elif self.newlines != newline and isinstance(self.newlines, bytes):
self.newlines = (self.newlines, newline)
elif newline not in self.newlines:
self.newlines += (newline,)
diff --git a/paramiko/hostkeys.py b/paramiko/hostkeys.py
index dc8fb8d0..b189aac6 100644
--- a/paramiko/hostkeys.py
+++ b/paramiko/hostkeys.py
@@ -20,7 +20,6 @@
from base64 import encodebytes, decodebytes
import binascii
import os
-import sys
from collections.abc import MutableMapping
from hashlib import sha1
@@ -271,7 +270,6 @@ class HostKeys(MutableMapping):
self._entries.append(HostKeyEntry([hostname], entry[key_type]))
def keys(self):
- # Python 2.4 sets would be nice here.
ret = []
for e in self._entries:
for h in e.hostnames:
diff --git a/paramiko/kex_curve25519.py b/paramiko/kex_curve25519.py
index bb1b5423..20c23e42 100644
--- a/paramiko/kex_curve25519.py
+++ b/paramiko/kex_curve25519.py
@@ -17,7 +17,7 @@ _MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32)
c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)]
-class KexCurve25519(object):
+class KexCurve25519:
hash_algo = hashlib.sha256
def __init__(self, transport):
diff --git a/paramiko/kex_gex.py b/paramiko/kex_gex.py
index 7bf24ddd..baa0803d 100644
--- a/paramiko/kex_gex.py
+++ b/paramiko/kex_gex.py
@@ -48,7 +48,7 @@ from paramiko.ssh_exception import SSHException
) = [byte_chr(c) for c in range(30, 35)]
-class KexGex(object):
+class KexGex:
name = "diffie-hellman-group-exchange-sha1"
min_bits = 1024
@@ -111,7 +111,7 @@ class KexGex(object):
qnorm = util.deflate_long(q, 0)
qhbyte = byte_ord(qnorm[0])
byte_count = len(qnorm)
- qmask = 0xff
+ qmask = 0xFF
while not (qhbyte & 0x80):
qhbyte <<= 1
qmask >>= 1
diff --git a/paramiko/kex_group1.py b/paramiko/kex_group1.py
index 1e3d55f6..f0742566 100644
--- a/paramiko/kex_group1.py
+++ b/paramiko/kex_group1.py
@@ -33,11 +33,11 @@ from paramiko.ssh_exception import SSHException
_MSG_KEXDH_INIT, _MSG_KEXDH_REPLY = range(30, 32)
c_MSG_KEXDH_INIT, c_MSG_KEXDH_REPLY = [byte_chr(c) for c in range(30, 32)]
-b7fffffffffffffff = byte_chr(0x7f) + max_byte * 7
+b7fffffffffffffff = byte_chr(0x7F) + max_byte * 7
b0000000000000000 = zero_byte * 8
-class KexGroup1(object):
+class KexGroup1:
# draft-ietf-secsh-transport-09.txt, page 17
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa
@@ -86,7 +86,7 @@ class KexGroup1(object):
# potential x).
while 1:
x_bytes = os.urandom(128)
- x_bytes = byte_mask(x_bytes[0], 0x7f) + x_bytes[1:]
+ x_bytes = byte_mask(x_bytes[0], 0x7F) + x_bytes[1:]
if (
x_bytes[:8] != b7fffffffffffffff
and x_bytes[:8] != b0000000000000000
diff --git a/paramiko/kex_gss.py b/paramiko/kex_gss.py
index 55f3f5e7..e3fbb36e 100644
--- a/paramiko/kex_gss.py
+++ b/paramiko/kex_gss.py
@@ -41,7 +41,12 @@ import os
from hashlib import sha1
from paramiko.common import (
- DEBUG, max_byte, zero_byte, byte_chr, byte_mask, byte_ord,
+ DEBUG,
+ max_byte,
+ zero_byte,
+ byte_chr,
+ byte_mask,
+ byte_ord,
)
from paramiko import util
from paramiko.message import Message
@@ -68,7 +73,7 @@ from paramiko.ssh_exception import SSHException
]
-class KexGSSGroup1(object):
+class KexGSSGroup1:
"""
GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange as defined in `RFC
4462 Section 2 <https://tools.ietf.org/html/rfc4462.html#section-2>`_
@@ -77,7 +82,7 @@ class KexGSSGroup1(object):
# draft-ietf-secsh-transport-09.txt, page 17
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa
G = 2
- b7fffffffffffffff = byte_chr(0x7f) + max_byte * 7 # noqa
+ b7fffffffffffffff = byte_chr(0x7F) + max_byte * 7 # noqa
b0000000000000000 = zero_byte * 8 # noqa
NAME = "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g=="
@@ -147,7 +152,7 @@ class KexGSSGroup1(object):
"""
while 1:
x_bytes = os.urandom(128)
- x_bytes = byte_mask(x_bytes[0], 0x7f) + x_bytes[1:]
+ x_bytes = byte_mask(x_bytes[0], 0x7F) + x_bytes[1:]
first = x_bytes[:8]
if first not in (self.b7fffffffffffffff, self.b0000000000000000):
break
@@ -328,7 +333,7 @@ class KexGSSGroup14(KexGSSGroup1):
NAME = "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g=="
-class KexGSSGex(object):
+class KexGSSGex:
"""
GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange as defined in
`RFC 4462 Section 2 <https://tools.ietf.org/html/rfc4462.html#section-2>`_
@@ -402,7 +407,7 @@ class KexGSSGex(object):
qnorm = util.deflate_long(q, 0)
qhbyte = byte_ord(qnorm[0])
byte_count = len(qnorm)
- qmask = 0xff
+ qmask = 0xFF
while not (qhbyte & 0x80):
qhbyte <<= 1
qmask >>= 1
@@ -664,7 +669,7 @@ Error Message: {}
)
-class NullHostKey(object):
+class NullHostKey:
"""
This class represents the Null Host Key for GSS-API Key Exchange as defined
in `RFC 4462 Section 5
diff --git a/paramiko/message.py b/paramiko/message.py
index fc832732..8c2b3bd0 100644
--- a/paramiko/message.py
+++ b/paramiko/message.py
@@ -28,7 +28,7 @@ from paramiko.common import zero_byte, max_byte, one_byte
from paramiko.util import u
-class Message(object):
+class Message:
"""
An SSH2 message is a stream of bytes that encodes some combination of
strings, integers, bools, and infinite-precision integers. This class
@@ -39,13 +39,13 @@ class Message(object):
paramiko doesn't support yet.
"""
- big_int = 0xff000000
+ big_int = 0xFF000000
def __init__(self, content=None):
"""
Create a new SSH2 message.
- :param str content:
+ :param bytes content:
the byte stream to use as the message content (passed in only when
decomposing a message).
"""
@@ -54,10 +54,7 @@ class Message(object):
else:
self.packet = BytesIO()
- def __str__(self):
- """
- Return the byte stream content of this message, as a string/bytes obj.
- """
+ def __bytes__(self):
return self.asbytes()
def __repr__(self):
@@ -66,9 +63,10 @@ class Message(object):
"""
return "paramiko.Message(" + repr(self.packet.getvalue()) + ")"
+ # TODO 4.0: just merge into __bytes__ (everywhere)
def asbytes(self):
"""
- Return the byte stream content of this Message, as bytes.
+ Return the byte stream content of this Message, as a `bytes`.
"""
return self.packet.getvalue()
@@ -81,8 +79,8 @@ class Message(object):
def get_remainder(self):
"""
- Return the bytes (as a `str`) of this message that haven't already been
- parsed and returned.
+ Return the `bytes` of this message that haven't already been parsed and
+ returned.
"""
position = self.packet.tell()
remainder = self.packet.read()
@@ -91,7 +89,7 @@ class Message(object):
def get_so_far(self):
"""
- Returns the `str` bytes of this message that have been parsed and
+ Returns the `bytes` of this message that have been parsed and
returned. The string passed into a message's constructor can be
regenerated by concatenating ``get_so_far`` and `get_remainder`.
"""
@@ -101,10 +99,10 @@ class Message(object):
def get_bytes(self, n):
"""
- Return the next ``n`` bytes of the message (as a `str`), without
- decomposing into an int, decoded string, etc. Just the raw bytes are
- returned. Returns a string of ``n`` zero bytes if there weren't ``n``
- bytes remaining in the message.
+ Return the next ``n`` bytes of the message, without decomposing into an
+ int, decoded string, etc. Just the raw bytes are returned. Returns a
+ string of ``n`` zero bytes if there weren't ``n`` bytes remaining in
+ the message.
"""
b = self.packet.read(n)
max_pad_size = 1 << 20 # Limit padding to 1 MB
@@ -118,8 +116,8 @@ class Message(object):
is equivalent to `get_bytes(1) <get_bytes>`.
:return:
- the next (`str`) byte of the message, or ``'\000'`` if there aren't
- any bytes remaining.
+ the next (`bytes`) byte of the message, or ``b'\000'`` if there
+ aren't any bytes remaining.
"""
return self.get_bytes(1)
@@ -164,25 +162,30 @@ class Message(object):
"""
return util.inflate_long(self.get_binary())
+ # TODO 4.0: depending on where this is used internally or downstream, force
+ # users to specify get_binary instead and delete this.
def get_string(self):
"""
- Fetch a `str` from the stream. This could be a byte string and may
- contain unprintable characters. (It's not unheard of for a string to
- contain another byte-stream message.)
+ Fetch a "string" from the stream. This will actually be a `bytes`
+ object, and may contain unprintable characters. (It's not unheard of
+ for a string to contain another byte-stream message.)
"""
return self.get_bytes(self.get_int())
+ # TODO 4.0: also consider having this take over the get_string name, and
+ # remove this name instead.
def get_text(self):
"""
Fetch a Unicode string from the stream.
+
+ This currently operates by attempting to encode the next "string" as
+ ``utf-8``.
"""
return u(self.get_string())
def get_binary(self):
"""
- Fetch a string from the stream. This could be a byte string and may
- contain unprintable characters. (It's not unheard of for a string to
- contain another byte-stream Message.)
+ Alias for `get_string` (obtains a bytestring).
"""
return self.get_bytes(self.get_int())
@@ -198,7 +201,7 @@ class Message(object):
"""
Write bytes to the stream, without any formatting.
- :param str b: bytes to add
+ :param bytes b: bytes to add
"""
self.packet.write(b)
return self
@@ -207,7 +210,7 @@ class Message(object):
"""
Write a single byte to the stream, without any formatting.
- :param str b: byte to add
+ :param bytes b: byte to add
"""
self.packet.write(b)
return self
@@ -250,7 +253,7 @@ class Message(object):
"""
Add a 64-bit int to the stream.
- :param long n: long int to add
+ :param int n: long int to add
"""
self.packet.write(struct.pack(">Q", n))
return self
@@ -260,16 +263,18 @@ class Message(object):
Add a long int to the stream, encoded as an infinite-precision
integer. This method only works on positive numbers.
- :param long z: long int to add
+ :param int z: long int to add
"""
self.add_string(util.deflate_long(z))
return self
+ # TODO: see the TODO for get_string/get_text/et al, this should change
+ # to match.
def add_string(self, s):
"""
- Add a string to the stream.
+ Add a bytestring to the stream.
- :param str s: string to add
+ :param byte s: bytestring to add
"""
s = util.asbytes(s)
self.add_int(len(s))
@@ -297,10 +302,12 @@ class Message(object):
else:
return self.add_string(i)
+ # TODO: this would never have worked for unicode strings under Python 3,
+ # guessing nobody/nothing ever used it for that purpose?
def add(self, *seq):
"""
Add a sequence of items to the stream. The values are encoded based
- on their type: str, int, bool, list, or long.
+ on their type: bytes, str, int, bool, or list.
.. warning::
Longs are encoded non-deterministically. Don't use this method.
diff --git a/paramiko/packet.py b/paramiko/packet.py
index b6498c5e..7b482de5 100644
--- a/paramiko/packet.py
+++ b/paramiko/packet.py
@@ -62,7 +62,7 @@ def first_arg(e):
return arg
-class Packetizer(object):
+class Packetizer:
"""
Implementation of the base SSH packet protocol.
"""
@@ -312,9 +312,6 @@ class Packetizer(object):
arg = first_arg(e)
if arg == errno.EAGAIN:
got_timeout = True
- elif arg == errno.EINTR:
- # syscall interrupted; try again
- pass
elif self.__closed:
raise EOFError()
else:
@@ -340,9 +337,6 @@ class Packetizer(object):
arg = first_arg(e)
if arg == errno.EAGAIN:
retry_write = True
- elif arg == errno.EINTR:
- # syscall interrupted; try again
- retry_write = True
else:
n = -1
except ProxyCommandFailure:
@@ -610,11 +604,6 @@ class Packetizer(object):
break
except socket.timeout:
pass
- except EnvironmentError as e:
- if first_arg(e) == errno.EINTR:
- pass
- else:
- raise
if self.__closed:
raise EOFError()
now = time.time()
diff --git a/paramiko/pipe.py b/paramiko/pipe.py
index 3905949d..65944fad 100644
--- a/paramiko/pipe.py
+++ b/paramiko/pipe.py
@@ -38,7 +38,7 @@ def make_pipe():
return p
-class PosixPipe(object):
+class PosixPipe:
def __init__(self):
self._rfd, self._wfd = os.pipe()
self._set = False
@@ -71,7 +71,7 @@ class PosixPipe(object):
self.set()
-class WindowsPipe(object):
+class WindowsPipe:
"""
On Windows, only an OS-level "WinSock" may be used in select(), but reads
and writes must be to the actual socket object.
@@ -118,7 +118,7 @@ class WindowsPipe(object):
self.set()
-class OrPipe(object):
+class OrPipe:
def __init__(self, pipe):
self._set = False
self._partner = None
diff --git a/paramiko/pkey.py b/paramiko/pkey.py
index 5e4a51ca..32d8cad5 100644
--- a/paramiko/pkey.py
+++ b/paramiko/pkey.py
@@ -28,7 +28,6 @@ from hashlib import md5
import re
import struct
-import six
import bcrypt
from cryptography.hazmat.backends import default_backend
@@ -49,18 +48,18 @@ def _unpad_openssh(data):
# At the moment, this is only used for unpadding private keys on disk. This
# really ought to be made constant time (possibly by upstreaming this logic
# into pyca/cryptography).
- padding_length = six.indexbytes(data, -1)
- if 0x20 <= padding_length < 0x7f:
+ padding_length = data[-1]
+ if 0x20 <= padding_length < 0x7F:
return data # no padding, last byte part comment (printable ascii)
if padding_length > 15:
raise SSHException("Invalid key")
for i in range(padding_length):
- if six.indexbytes(data, i - padding_length) != i + 1:
+ if data[i - padding_length] != i + 1:
raise SSHException("Invalid key")
return data[:-padding_length]
-class PKey(object):
+class PKey:
"""
Base class for public keys.
"""
@@ -111,6 +110,7 @@ class PKey(object):
"""
pass
+ # TODO 4.0: just merge into __bytes__ (everywhere)
def asbytes(self):
"""
Return a string of an SSH `.Message` made up of the public part(s) of
@@ -119,27 +119,9 @@ class PKey(object):
"""
return bytes()
- def __str__(self):
+ def __bytes__(self):
return self.asbytes()
- # noinspection PyUnresolvedReferences
- # TODO: The comparison functions should be removed as per:
- # https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons
- def __cmp__(self, other):
- """
- Compare this key to another. Returns 0 if this key is equivalent to
- the given key, or non-0 if they are different. Only the public parts
- of the key are compared, so a public key will compare equal to its
- corresponding private key.
-
- :param .PKey other: key to compare to.
- """
- hs = hash(self)
- ho = hash(other)
- if hs != ho:
- return cmp(hs, ho) # noqa
- return cmp(self.asbytes(), other.asbytes()) # noqa
-
def __eq__(self, other):
return isinstance(other, PKey) and self._fields == other._fields
@@ -202,7 +184,7 @@ class PKey(object):
Sign a blob of data with this private key, and return a `.Message`
representing an SSH signature message.
- :param str data:
+ :param bytes data:
the data to sign.
:param str algorithm:
the signature algorithm to use, if different from the key's
@@ -219,7 +201,7 @@ class PKey(object):
Given a blob of data, and an SSH message representing a signature of
that data, verify that it was signed with this key.
- :param str data: the data that was signed.
+ :param bytes data: the data that was signed.
:param .Message msg: an SSH signature message
:return:
``True`` if the signature verifies correctly; ``False`` otherwise.
@@ -312,7 +294,7 @@ class PKey(object):
:param str password:
an optional password to use to decrypt the key file, if it's
encrypted.
- :return: data blob (`str`) that makes up the private key.
+ :return: the `bytes` that make up the private key.
:raises: ``IOError`` -- if there was an error reading the file.
:raises: `.PasswordRequiredException` -- if the private key file is
@@ -556,7 +538,7 @@ class PKey(object):
:param str tag:
``"RSA"`` or ``"DSA"``, the tag used to mark the data block.
:param filename: name of the file to write.
- :param str data: data blob that makes up the private key.
+ :param bytes data: data blob that makes up the private key.
:param str password: an optional password to use to encrypt the file.
:raises: ``IOError`` -- if there was an error writing the file.
@@ -564,16 +546,20 @@ class PKey(object):
# Ensure that we create new key files directly with a user-only mode,
# instead of opening, writing, then chmodding, which leaves us open to
# CVE-2022-24302.
- # NOTE: O_TRUNC is a noop on new files, and O_CREAT is a noop on
- # existing files, so using all 3 in both cases is fine. Ditto the use
- # of the 'mode' argument; it should be safe to give even for existing
- # files (though it will not act like a chmod in that case).
- # TODO 3.0: turn into kwargs again
- args = [os.O_WRONLY | os.O_TRUNC | os.O_CREAT, o600]
- # NOTE: yea, you still gotta inform the FLO that it is in "write" mode
- with os.fdopen(os.open(filename, *args), "w") as f:
- # TODO 3.0: remove the now redundant chmod
- os.chmod(filename, o600)
+ with os.fdopen(
+ os.open(
+ filename,
+ # NOTE: O_TRUNC is a noop on new files, and O_CREAT is a noop
+ # on existing files, so using all 3 in both cases is fine.
+ flags=os.O_WRONLY | os.O_TRUNC | os.O_CREAT,
+ # Ditto the use of the 'mode' argument; it should be safe to
+ # give even for existing files (though it will not act like a
+ # chmod in that case).
+ mode=o600,
+ ),
+ # Yea, you still gotta inform the FLO that it is in "write" mode.
+ "w",
+ ) as f:
self._write_private_key(f, key, format, password=password)
def _write_private_key(self, f, key, format, password=None):
@@ -673,7 +659,7 @@ class PKey(object):
# Of little value in the case of standard public keys
# {ssh-rsa, ssh-dss, ssh-ecdsa, ssh-ed25519}, but should
# provide rudimentary support for {*-cert.v01}
-class PublicBlob(object):
+class PublicBlob:
"""
OpenSSH plain public key or OpenSSH signed public key (certificate).
@@ -692,7 +678,7 @@ class PublicBlob(object):
Create a new public blob of given type and contents.
:param str type_: Type indicator, eg ``ssh-rsa``.
- :param blob: The blob bytes themselves.
+ :param bytes blob: The blob bytes themselves.
:param str comment: A comment, if one was given (e.g. file-based.)
"""
self.key_type = type_
diff --git a/paramiko/primes.py b/paramiko/primes.py
index 388029c9..663c58ed 100644
--- a/paramiko/primes.py
+++ b/paramiko/primes.py
@@ -49,7 +49,7 @@ def _roll_random(n):
return num
-class ModulusPack(object):
+class ModulusPack:
"""
convenience object for holding the contents of the /etc/ssh/moduli file,
on systems that have such a file.
diff --git a/paramiko/server.py b/paramiko/server.py
index 68f5ae92..b68607e1 100644
--- a/paramiko/server.py
+++ b/paramiko/server.py
@@ -31,7 +31,7 @@ from paramiko.common import (
)
-class ServerInterface(object):
+class ServerInterface:
"""
This class defines an interface for controlling the behavior of Paramiko
in server mode.
@@ -593,7 +593,7 @@ class ServerInterface(object):
return (None, None)
-class InteractiveQuery(object):
+class InteractiveQuery:
"""
A query (set of prompts) for a user during interactive authentication.
"""
diff --git a/paramiko/sftp.py b/paramiko/sftp.py
index 144edd4a..b3528d4e 100644
--- a/paramiko/sftp.py
+++ b/paramiko/sftp.py
@@ -116,11 +116,21 @@ CMD_NAMES = {
}
+# TODO: rewrite SFTP file/server modules' overly-flexible "make a request with
+# xyz components" so we don't need this very silly method of signaling whether
+# a given Python integer should be 32- or 64-bit.
+# NOTE: this only became an issue when dropping Python 2 support; prior to
+# doing so, we had to support actual-longs, which served as that signal. This
+# is simply recreating that structure in a more tightly scoped fashion.
+class int64(int):
+ pass
+
+
class SFTPError(Exception):
pass
-class BaseSFTP(object):
+class BaseSFTP:
def __init__(self):
self.logger = util.get_logger("paramiko.sftp")
self.sock = None
diff --git a/paramiko/sftp_attr.py b/paramiko/sftp_attr.py
index 51c26e88..eb4dd900 100644
--- a/paramiko/sftp_attr.py
+++ b/paramiko/sftp_attr.py
@@ -21,7 +21,7 @@ import time
from paramiko.common import x80000000, o700, o70, xffffffff
-class SFTPAttributes(object):
+class SFTPAttributes:
"""
Representation of the attributes of a file (or proxied file) for SFTP in
client or server mode. It attemps to mirror the object returned by
@@ -205,8 +205,8 @@ class SFTPAttributes(object):
datestr = "(unknown date)"
else:
time_tuple = time.localtime(self.st_mtime)
- if abs(time.time() - self.st_mtime) > 15552000:
- # (15552000 = 6 months)
+ if abs(time.time() - self.st_mtime) > 15_552_000:
+ # (15,552,000s = 6 months)
datestr = time.strftime("%d %b %Y", time_tuple)
else:
datestr = time.strftime("%d %b %H:%M", time_tuple)
diff --git a/paramiko/sftp_client.py b/paramiko/sftp_client.py
index eaaf0dad..d91a3951 100644
--- a/paramiko/sftp_client.py
+++ b/paramiko/sftp_client.py
@@ -60,6 +60,7 @@ from paramiko.sftp import (
SFTP_EOF,
SFTP_NO_SUCH_FILE,
SFTP_PERMISSION_DENIED,
+ int64,
)
from paramiko.sftp_attr import SFTPAttributes
@@ -178,7 +179,7 @@ class SFTPClient(BaseSFTP, ClosingContextManager):
# escape '%' in msg (they could come from file or directory names)
# before logging
msg = msg.replace("%", "%%")
- super(SFTPClient, self)._log(
+ super()._log(
level,
"[chan %s] " + msg,
*([self.sock.get_name()] + list(args))
@@ -827,8 +828,10 @@ class SFTPClient(BaseSFTP, ClosingContextManager):
msg = Message()
msg.add_int(self.request_number)
for item in arg:
- if isinstance(item, int):
+ if isinstance(item, int64):
msg.add_int64(item)
+ elif isinstance(item, int):
+ msg.add_int(item)
elif isinstance(item, SFTPAttributes):
item._pack(msg)
else:
diff --git a/paramiko/sftp_file.py b/paramiko/sftp_file.py
index 0975cba3..9a0a6b34 100644
--- a/paramiko/sftp_file.py
+++ b/paramiko/sftp_file.py
@@ -20,7 +20,6 @@
SFTP file object
"""
-from __future__ import with_statement
from binascii import hexlify
from collections import deque
@@ -42,6 +41,7 @@ from paramiko.sftp import (
CMD_ATTRS,
CMD_FSETSTAT,
CMD_EXTENDED,
+ int64,
)
from paramiko.sftp_attr import SFTPAttributes
@@ -183,7 +183,7 @@ class SFTPFile(BufferedFile):
if data is not None:
return data
t, msg = self.sftp._request(
- CMD_READ, self.handle, int(self._realpos), int(size)
+ CMD_READ, self.handle, int64(self._realpos), int(size)
)
if t != CMD_DATA:
raise SFTPError("Expected data")
@@ -196,7 +196,7 @@ class SFTPFile(BufferedFile):
type(None),
CMD_WRITE,
self.handle,
- int(self._realpos),
+ int64(self._realpos),
data[:chunk],
)
self._reqs.append(sftp_async_request)
@@ -406,8 +406,8 @@ class SFTPFile(BufferedFile):
"check-file",
self.handle,
hash_algorithm,
- int(offset),
- int(length),
+ int64(offset),
+ int64(length),
block_size,
)
msg.get_text() # ext
@@ -535,7 +535,7 @@ class SFTPFile(BufferedFile):
# a lot of them, so it may block.
for offset, length in chunks:
num = self.sftp._async_request(
- self, CMD_READ, self.handle, int(offset), int(length)
+ self, CMD_READ, self.handle, int64(offset), int(length)
)
with self._prefetch_lock:
self._prefetch_extents[num] = (offset, length)
diff --git a/paramiko/sftp_handle.py b/paramiko/sftp_handle.py
index 1b4e1363..b2046526 100644
--- a/paramiko/sftp_handle.py
+++ b/paramiko/sftp_handle.py
@@ -87,7 +87,7 @@ class SFTPHandle(ClosingContextManager):
:param offset: position in the file to start reading from.
:param int length: number of bytes to attempt to read.
- :return: data read from the file, or an SFTP error code, as a `str`.
+ :return: the `bytes` read, or an error code `int`.
"""
readfile = getattr(self, "readfile", None)
if readfile is None:
@@ -120,7 +120,7 @@ class SFTPHandle(ClosingContextManager):
refer to the same file.
:param offset: position in the file to start reading from.
- :param str data: data to write into the file.
+ :param bytes data: data to write into the file.
:return: an SFTP error code like ``SFTP_OK``.
"""
writefile = getattr(self, "writefile", None)
diff --git a/paramiko/sftp_server.py b/paramiko/sftp_server.py
index 94c451f7..6cb7ec62 100644
--- a/paramiko/sftp_server.py
+++ b/paramiko/sftp_server.py
@@ -32,6 +32,7 @@ from paramiko.sftp import (
SFTP_FAILURE,
SFTP_PERMISSION_DENIED,
SFTP_NO_SUCH_FILE,
+ int64,
)
from paramiko.sftp_si import SFTPServerInterface
from paramiko.sftp_attr import SFTPAttributes
@@ -128,13 +129,9 @@ class SFTPServer(BaseSFTP, SubsystemHandler):
def _log(self, level, msg):
if issubclass(type(msg), list):
for m in msg:
- super(SFTPServer, self)._log(
- level, "[chan " + self.sock.get_name() + "] " + m
- )
+ super()._log(level, "[chan " + self.sock.get_name() + "] " + m)
else:
- super(SFTPServer, self)._log(
- level, "[chan " + self.sock.get_name() + "] " + msg
- )
+ super()._log(level, "[chan " + self.sock.get_name() + "] " + msg)
def start_subsystem(self, name, transport, channel):
self.sock = channel
@@ -166,7 +163,7 @@ class SFTPServer(BaseSFTP, SubsystemHandler):
def finish_subsystem(self):
self.server.session_ended()
- super(SFTPServer, self).finish_subsystem()
+ super().finish_subsystem()
# close any file handles that were left open
# (so we can return them to the OS quickly)
for f in self.file_table.values():
@@ -228,8 +225,11 @@ class SFTPServer(BaseSFTP, SubsystemHandler):
msg = Message()
msg.add_int(request_number)
for item in arg:
- if isinstance(item, int):
+ # NOTE: this is a very silly tiny class used for SFTPFile mostly
+ if isinstance(item, int64):
msg.add_int64(item)
+ elif isinstance(item, int):
+ msg.add_int(item)
elif isinstance(item, (str, bytes)):
msg.add_string(item)
elif type(item) is SFTPAttributes:
diff --git a/paramiko/sftp_si.py b/paramiko/sftp_si.py
index 3199310a..26b0ac9b 100644
--- a/paramiko/sftp_si.py
+++ b/paramiko/sftp_si.py
@@ -25,7 +25,7 @@ import sys
from paramiko.sftp import SFTP_OP_UNSUPPORTED
-class SFTPServerInterface(object):
+class SFTPServerInterface:
"""
This class defines an interface for controlling the behavior of paramiko
when using the `.SFTPServer` subsystem to provide an SFTP server.
@@ -48,7 +48,7 @@ class SFTPServerInterface(object):
:param .ServerInterface server:
the server object associated with this channel and SFTP subsystem
"""
- super(SFTPServerInterface, self).__init__(*largs, **kwargs)
+ super().__init__(*largs, **kwargs)
def session_started(self):
"""
diff --git a/paramiko/ssh_exception.py b/paramiko/ssh_exception.py
index 620ab259..9b1b44c3 100644
--- a/paramiko/ssh_exception.py
+++ b/paramiko/ssh_exception.py
@@ -58,9 +58,9 @@ class BadAuthenticationType(AuthenticationException):
allowed_types = []
- # TODO 3.0: remove explanation kwarg
+ # TODO 4.0: remove explanation kwarg
def __init__(self, explanation, types):
- # TODO 3.0: remove this supercall unless it's actually required for
+ # TODO 4.0: remove this supercall unless it's actually required for
# pickling (after fixing pickling)
AuthenticationException.__init__(self, explanation, types)
self.explanation = explanation
@@ -125,9 +125,7 @@ class BadHostKeyException(SSHException):
self.expected_key = expected_key
def __str__(self):
- msg = (
- "Host key for server '{}' does not match: got '{}', expected '{}'"
- ) # noqa
+ msg = "Host key for server '{}' does not match: got '{}', expected '{}'" # noqa
return msg.format(
self.hostname,
self.key.get_base64(),
@@ -142,7 +140,7 @@ class IncompatiblePeer(SSHException):
.. versionadded:: 2.9
"""
- # TODO 3.0: consider making this annotate w/ 1..N 'missing' algorithms,
+ # TODO 4.0: consider making this annotate w/ 1..N 'missing' algorithms,
# either just the first one that would halt kex, or even updating the
# Transport logic so we record /all/ that /could/ halt kex.
# TODO: update docstrings where this may end up raised so they are more
@@ -204,7 +202,7 @@ class NoValidConnectionsError(socket.error):
msg = "Unable to connect to port {0} on {1} or {2}"
else:
msg = "Unable to connect to port {0} on {2}"
- super(NoValidConnectionsError, self).__init__(
+ super().__init__(
None, msg.format(addrs[0][1], body, tail) # stand-in for errno
)
self.errors = errors
diff --git a/paramiko/ssh_gss.py b/paramiko/ssh_gss.py
index 4f1581c3..ee49c34d 100644
--- a/paramiko/ssh_gss.py
+++ b/paramiko/ssh_gss.py
@@ -107,7 +107,7 @@ def GSSAuth(auth_method, gss_deleg_creds=True):
raise ImportError("Unable to import a GSS-API / SSPI module!")
-class _SSH_GSSAuth(object):
+class _SSH_GSSAuth:
"""
Contains the shared variables and methods of `._SSH_GSSAPI_OLD`,
`._SSH_GSSAPI_NEW` and `._SSH_SSPI`.
diff --git a/paramiko/transport.py b/paramiko/transport.py
index b2e8d432..2b6acd6e 100644
--- a/paramiko/transport.py
+++ b/paramiko/transport.py
@@ -21,7 +21,6 @@
Core protocol implementation
"""
-from __future__ import print_function
import os
import socket
import sys
@@ -112,7 +111,11 @@ from paramiko.ssh_exception import (
IncompatiblePeer,
ProxyCommandFailure,
)
-from paramiko.util import retry_on_signal, ClosingContextManager, clamp_value, b
+from paramiko.util import (
+ ClosingContextManager,
+ clamp_value,
+ b,
+)
# for thread cleanup
@@ -339,8 +342,8 @@ class Transport(threading.Thread, ClosingContextManager):
If the object is not actually a socket, it must have the following
methods:
- - ``send(str)``: Writes from 1 to ``len(str)`` bytes, and returns an
- int representing the number of bytes written. Returns
+ - ``send(bytes)``: Writes from 1 to ``len(bytes)`` bytes, and returns
+ an int representing the number of bytes written. Returns
0 or raises ``EOFError`` if the stream has been closed.
- ``recv(int)``: Reads from 1 to ``int`` bytes and returns them as a
string. Returns 0 or raises ``EOFError`` if the stream has been
@@ -432,7 +435,7 @@ class Transport(threading.Thread, ClosingContextManager):
# addr = sockaddr
sock = socket.socket(af, socket.SOCK_STREAM)
try:
- retry_on_signal(lambda: sock.connect((hostname, port)))
+ sock.connect((hostname, port))
except socket.error as e:
reason = str(e)
else:
@@ -1875,9 +1878,9 @@ class Transport(threading.Thread, ClosingContextManager):
"""you are holding the lock"""
chanid = self._channel_counter
while self._channels.get(chanid) is not None:
- self._channel_counter = (self._channel_counter + 1) & 0xffffff
+ self._channel_counter = (self._channel_counter + 1) & 0xFFFFFF
chanid = self._channel_counter
- self._channel_counter = (self._channel_counter + 1) & 0xffffff
+ self._channel_counter = (self._channel_counter + 1) & 0xFFFFFF
return chanid
def _unlink_channel(self, chanid):
@@ -2592,7 +2595,7 @@ class Transport(threading.Thread, ClosingContextManager):
def _activate_inbound(self):
"""switch on newly negotiated encryption parameters for
- inbound traffic"""
+ inbound traffic"""
block_size = self._cipher_info[self.remote_cipher]["block-size"]
if self.server_mode:
IV_in = self._compute_key("A", block_size)
@@ -2628,7 +2631,7 @@ class Transport(threading.Thread, ClosingContextManager):
def _activate_outbound(self):
"""switch on newly negotiated encryption parameters for
- outbound traffic"""
+ outbound traffic"""
m = Message()
m.add_byte(cMSG_NEWKEYS)
self._send_message(m)
@@ -3000,10 +3003,10 @@ class Transport(threading.Thread, ClosingContextManager):
}
-# TODO 3.0: drop this, we barely use it ourselves, it badly replicates the
+# TODO 4.0: drop this, we barely use it ourselves, it badly replicates the
# Transport-internal algorithm management, AND does so in a way which doesn't
# honor newer things like disabled_algorithms!
-class SecurityOptions(object):
+class SecurityOptions:
"""
Simple object containing the security preferences of an ssh transport.
These are tuples of acceptable ciphers, digests, key types, and key
@@ -3084,7 +3087,7 @@ class SecurityOptions(object):
self._set("_preferred_compression", "_compression_info", x)
-class ChannelMap(object):
+class ChannelMap:
def __init__(self):
# (id -> Channel)
self._map = weakref.WeakValueDictionary()
diff --git a/paramiko/util.py b/paramiko/util.py
index 5e2f6392..76941ff4 100644
--- a/paramiko/util.py
+++ b/paramiko/util.py
@@ -20,16 +20,21 @@
Useful functions used by the rest of paramiko.
"""
-from __future__ import generators
-import errno
import sys
import struct
import traceback
import threading
import logging
-from paramiko.common import DEBUG, zero_byte, xffffffff, max_byte, byte_ord, byte_chr
+from paramiko.common import (
+ DEBUG,
+ zero_byte,
+ xffffffff,
+ max_byte,
+ byte_ord,
+ byte_chr,
+)
from paramiko.config import SSHConfig
@@ -67,7 +72,7 @@ def deflate_long(n, add_sign_padding=True):
for i in enumerate(s):
if (n == 0) and (i[1] != 0):
break
- if (n == -1) and (i[1] != 0xff):
+ if (n == -1) and (i[1] != 0xFF):
break
else:
# degenerate case, n was either 0 or -1
@@ -143,10 +148,10 @@ def generate_key_bytes(hash_alg, salt, key, nbytes):
:param function hash_alg: A function which creates a new hash object, such
as ``hashlib.sha256``.
:param salt: data to salt the hash with.
- :type salt: byte string
+ :type bytes salt: Hash salt bytes.
:param str key: human-entered password or passphrase.
:param int nbytes: number of bytes to generate.
- :return: Key data `str`
+ :return: Key data, as `bytes`.
"""
keydata = bytes()
digest = bytes()
@@ -252,7 +257,7 @@ def log_to_file(filename, level=DEBUG):
# make only one filter object, so it doesn't get applied more than once
-class PFilter(object):
+class PFilter:
def filter(self, record):
record._threadid = get_thread_id()
return True
@@ -267,16 +272,6 @@ def get_logger(name):
return logger
-def retry_on_signal(function):
- """Retries function until it doesn't raise an EINTR error"""
- while True:
- try:
- return function()
- except EnvironmentError as e:
- if e.errno != errno.EINTR:
- raise
-
-
def constant_time_bytes_eq(a, b):
if len(a) != len(b):
return False
@@ -287,7 +282,7 @@ def constant_time_bytes_eq(a, b):
return res == 0
-class ClosingContextManager(object):
+class ClosingContextManager:
def __enter__(self):
return self
@@ -305,12 +300,12 @@ def asbytes(s):
"""
try:
# Attempt to run through our version of b(), which does the Right Thing
- # for string/unicode/buffer (Py2) or bytes/str (Py3), and raises
- # TypeError if it's not one of those types.
+ # for unicode strings vs bytestrings, and raises TypeError if it's not
+ # one of those types.
return b(s)
except TypeError:
try:
- # If it wasn't a string/byte/buffer type object, try calling an
+ # If it wasn't a string/byte/buffer-ish object, try calling an
# asbytes() method, which many of our internal classes implement.
return s.asbytes()
except AttributeError:
@@ -330,6 +325,7 @@ def b(s, encoding="utf8"):
else:
raise TypeError("Expected unicode or bytes, got {!r}".format(s))
+
# TODO: clean this up / force callers to assume bytes OR unicode
def u(s, encoding="utf8"):
"""cast bytes or unicode to unicode"""
diff --git a/paramiko/win_pageant.py b/paramiko/win_pageant.py
index 93f74621..c927de65 100644
--- a/paramiko/win_pageant.py
+++ b/paramiko/win_pageant.py
@@ -28,15 +28,12 @@ import struct
from paramiko.common import zero_byte
from paramiko.util import b
-try:
- import _thread as thread # Python 3.x
-except ImportError:
- import thread # Python 2.5-2.7
+import _thread as thread
from . import _winapi
-_AGENT_COPYDATA_ID = 0x804e50ba
+_AGENT_COPYDATA_ID = 0x804E50BA
_AGENT_MAX_MSGLEN = 8192
# Note: The WM_COPYDATA value is pulled from win32con, as a workaround
# so we do not have to import this huge library just for this one variable.
@@ -87,7 +84,7 @@ def _query_pageant(msg):
return None
# create a name for the mmap
- map_name = "PageantRequest%08x" % thread.get_ident()
+ map_name = f"PageantRequest{thread.get_ident():08x}"
pymap = _winapi.MemoryMap(
map_name, _AGENT_MAX_MSGLEN, _winapi.get_security_attributes_for_user()
@@ -114,7 +111,7 @@ def _query_pageant(msg):
return None
-class PageantConnection(object):
+class PageantConnection:
"""
Mock "connection" to an agent which roughly approximates the behavior of
a unix local-domain socket (as used by Agent). Requests are sent to the
diff --git a/pytest.ini b/pytest.ini
index be207cd8..62fef863 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -4,7 +4,3 @@
addopts = -p no:relaxed
# Loop on failure
looponfailroots = tests paramiko
-# Ignore some warnings we cannot easily handle.
-filterwarnings =
- ignore::DeprecationWarning:pkg_resources
- ignore::cryptography.utils.CryptographyDeprecationWarning
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 59b47d60..00000000
--- a/setup.cfg
+++ /dev/null
@@ -1,8 +0,0 @@
-[wheel]
-universal = 1
-
-[metadata]
-license_file = LICENSE
-
-[coverage:run]
-omit = paramiko/_winapi.py
diff --git a/setup_helper.py b/setup_helper.py
index fc4e755f..f290ea3f 100644
--- a/setup_helper.py
+++ b/setup_helper.py
@@ -116,7 +116,7 @@ def make_tarball(
mode = "w:" + tarfile_compress_flag.get(compress, "")
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
- log.info("Creating tar file %s with mode %s" % (archive_name, mode))
+ log.info(f"Creating tar file {archive_name} with mode {mode}")
uid = _get_uid(owner)
gid = _get_gid(group)
@@ -134,12 +134,7 @@ def make_tarball(
tar = tarfile.open(archive_name, mode=mode)
# This recursively adds everything underneath base_dir
try:
- try:
- # Support for the `filter' parameter was added in Python 2.7,
- # earlier versions will raise TypeError.
- tar.add(base_dir, filter=_set_uid_gid)
- except TypeError:
- tar.add(base_dir)
+ tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
diff --git a/sites/docs/conf.py b/sites/docs/conf.py
index 4805a03c..79958e6d 100644
--- a/sites/docs/conf.py
+++ b/sites/docs/conf.py
@@ -10,7 +10,10 @@ from shared_conf import *
extensions.extend(["sphinx.ext.autodoc"])
# Autodoc settings
-autodoc_default_flags = ["members", "special-members"]
+autodoc_default_options = {
+ "members": True,
+ "special-members": True,
+}
# Default is 'local' building, but reference the public www site when building
# under RTD.
diff --git a/sites/www/changelog.rst b/sites/www/changelog.rst
index 595e2fde..162d01bf 100644
--- a/sites/www/changelog.rst
+++ b/sites/www/changelog.rst
@@ -2,6 +2,49 @@
Changelog
=========
+- :support:`-` ``paramiko.util.retry_on_signal`` (and any internal uses of
+ same, and also any internal retries of ``EINTR`` on eg socket operations) has
+ been removed. As of Python 3.5, per `PEP 475
+ <https://peps.python.org/pep-0475/>`_, this functionality (and retrying
+ ``EINTR`` generally) is now part of the standard library.
+
+ .. warning::
+ This change is backwards incompatible if you were explicitly
+ importing/using this particular function. The observable behavior otherwise
+ should not be changing.
+
+- :support:`732` (also re: :issue:`630`) `~paramiko.config.SSHConfig` used to
+ straight-up delete the ``proxycommand`` key from config lookup results when
+ the source config said ``ProxyCommand none``. This has been altered to
+ preserve the key and give it the Python value ``None``, thus making the
+ Python representation more in line with the source config file.
+
+ .. warning::
+ This change is backwards incompatible if you were relying on the old (1.x,
+ 2.x) behavior for some reason (eg assuming all ``proxycommand`` values were
+ valid).
+
+- :support:`-` The behavior of private key classes' (ie anything inheriting
+ from `~paramiko.pkey.PKey`) private key writing methods used to perform a
+ manual, extra ``chmod`` call after writing. This hasn't been strictly
+ necessary since the mid 2.x release line (when key writing started giving the
+ ``mode`` argument to `os.open`), and has now been removed entirely.
+
+ This should only be observable if you were mocking Paramiko's system calls
+ during your own testing, or similar.
+- :support:`-` ``PKey.__cmp__`` has been removed. Ordering-oriented comparison
+ of key files is unlikely to have ever made sense (the old implementation
+ attempted to order by the hashes of the key material) and so we have not
+ bothered setting up ``__lt__`` and friends at this time. The class continues
+ to have its original ``__eq__`` untouched.
+- :bug:`- major` A handful of lower-level classes (notably
+ `paramiko.message.Message` and `paramiko.pkey.PKey`) previously returned
+ `bytes` objects from their implementation of ``__str__``, even under Python
+ 3; and there was never any ``__bytes__`` method.
+
+ These issues have been fixed by renaming ``__str__`` to ``__bytes__`` and
+ relying on Python's default "stringification returns the output of
+ ``__repr__``" behavior re: any real attempts to ``str()`` such objects.
- :support:`-` ``paramiko.common.asbytes`` has been moved to
`paramiko.util.asbytes`.
diff --git a/tasks.py b/tasks.py
index ae408059..9d2903c8 100644
--- a/tasks.py
+++ b/tasks.py
@@ -97,7 +97,7 @@ def guard(ctx, opts=""):
# Until we stop bundling docs w/ releases. Need to discover use cases first.
# TODO: would be nice to tie this into our own version of build() too, but
# still have publish() use that build()...really need to try out classes!
-# TODO 3.0: I'd like to just axe the 'built docs in sdist', none of my other
+# TODO 4.0: I'd like to just axe the 'built docs in sdist', none of my other
# projects do it.
@task
def publish_(
diff --git a/tests/conftest.py b/tests/conftest.py
index 2b509c5c..b28d2a17 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -10,9 +10,11 @@ from .loop import LoopSocket
from .stub_sftp import StubServer, StubSFTPServer
from .util import _support
+from icecream import ic, install as install_ic
-# TODO: not a huge fan of conftest.py files, see if we can move these somewhere
-# 'nicer'.
+
+install_ic()
+ic.configureOutput(includeContext=True)
# Perform logging by default; pytest will capture and thus hide it normally,
diff --git a/tests/loop.py b/tests/loop.py
index 51dc6308..a3740013 100644
--- a/tests/loop.py
+++ b/tests/loop.py
@@ -22,7 +22,7 @@ import threading
from paramiko.util import asbytes
-class LoopSocket(object):
+class LoopSocket:
"""
A LoopSocket looks like a normal socket, but all data written to it is
delivered on the read-end of another LoopSocket, and vice versa. It's
diff --git a/tests/test_channelfile.py b/tests/test_channelfile.py
index 65929416..e2b6306c 100644
--- a/tests/test_channelfile.py
+++ b/tests/test_channelfile.py
@@ -3,7 +3,7 @@ from unittest.mock import patch, MagicMock
from paramiko import Channel, ChannelFile, ChannelStderrFile, ChannelStdinFile
-class ChannelFileBase(object):
+class ChannelFileBase:
@patch("paramiko.channel.ChannelFile._set_mode")
def test_defaults_to_unbuffered_reading(self, setmode):
self.klass(Channel(None))
@@ -31,7 +31,7 @@ class TestChannelFile(ChannelFileBase):
klass = ChannelFile
-class TestChannelStderrFile(object):
+class TestChannelStderrFile:
def test_read_calls_channel_recv_stderr(self):
chan = MagicMock()
cf = ChannelStderrFile(chan)
diff --git a/tests/test_client.py b/tests/test_client.py
index a0dcab1a..dae5b13a 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -20,7 +20,6 @@
Some unit tests for SSHClient.
"""
-from __future__ import with_statement, print_function
import gc
import os
@@ -63,7 +62,7 @@ class NullServer(paramiko.ServerInterface):
self.__allowed_keys = kwargs.pop("allowed_keys", [])
# And allow them to set a (single...meh) expected public blob (cert)
self.__expected_public_blob = kwargs.pop("public_blob", None)
- super(NullServer, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def get_allowed_auths(self, username):
if username == "slowdive":
@@ -203,7 +202,7 @@ class ClientTest(unittest.TestCase):
# Client setup
self.tc = SSHClient()
self.tc.get_host_keys().add(
- "[%s]:%d" % (self.addr, self.port), "ssh-rsa", public_host_key
+ f"[{self.addr}]:{self.port}", "ssh-rsa", public_host_key
)
# Actual connection
@@ -391,7 +390,7 @@ class SSHClientTest(ClientTest):
verify that SSHClient's AutoAddPolicy works.
"""
threading.Thread(target=self._run).start()
- hostname = "[%s]:%d" % (self.addr, self.port)
+ hostname = f"[{self.addr}]:{self.port}"
key_file = _support("test_ecdsa_256.key")
public_host_key = paramiko.ECDSAKey.from_private_key_file(key_file)
@@ -425,7 +424,7 @@ class SSHClientTest(ClientTest):
client = SSHClient()
assert len(client.get_host_keys()) == 0
- host_id = "[%s]:%d" % (self.addr, self.port)
+ host_id = f"[{self.addr}]:{self.port}"
client.get_host_keys().add(host_id, "ssh-rsa", public_host_key)
assert len(client.get_host_keys()) == 1
@@ -443,7 +442,7 @@ class SSHClientTest(ClientTest):
verify that when an SSHClient is collected, its transport (and the
transport's packetizer) is closed.
"""
- # Skipped on PyPy because it fails on travis for unknown reasons
+ # Skipped on PyPy because it fails on CI for unknown reasons
if platform.python_implementation() == "PyPy":
return
@@ -465,9 +464,9 @@ class SSHClientTest(ClientTest):
# force a collection to see whether the SSHClient object is deallocated
# 2 GCs are needed on PyPy, time is needed for Python 3
- # TODO: this still fails randomly under CircleCI under Python 3.7, 3.8
- # at the very least. bumped sleep 0.3->1.0s but the underlying
- # functionality should get reevaluated once we drop Python 2.
+ # TODO 4.0: this still fails randomly under CircleCI under Python 3.7,
+ # 3.8 at the very least. bumped sleep 0.3->1.0s but the underlying
+ # functionality should get reevaluated now we've dropped Python 2.
time.sleep(1)
gc.collect()
gc.collect()
@@ -524,7 +523,7 @@ class SSHClientTest(ClientTest):
self.tc = SSHClient()
self.tc.get_host_keys().add(
- "[%s]:%d" % (self.addr, self.port), "ssh-rsa", public_host_key
+ f"[{self.addr}]:{self.port}", "ssh-rsa", public_host_key
)
# Connect with a half second banner timeout.
kwargs = dict(self.connect_kwargs, banner_timeout=0.5)
@@ -594,7 +593,7 @@ class SSHClientTest(ClientTest):
paramiko.SSHException,
self.tc.connect,
password="pygmalion",
- **self.connect_kwargs
+ **self.connect_kwargs,
)
@requires_gss_auth
@@ -615,12 +614,12 @@ class SSHClientTest(ClientTest):
self.tc.connect,
password="pygmalion",
gss_kex=True,
- **self.connect_kwargs
+ **self.connect_kwargs,
)
def _client_host_key_bad(self, host_key):
threading.Thread(target=self._run).start()
- hostname = "[%s]:%d" % (self.addr, self.port)
+ hostname = f"[{self.addr}]:{self.port}"
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.WarningPolicy())
@@ -631,12 +630,12 @@ class SSHClientTest(ClientTest):
paramiko.BadHostKeyException,
self.tc.connect,
password="pygmalion",
- **self.connect_kwargs
+ **self.connect_kwargs,
)
def _client_host_key_good(self, ktype, kfile):
threading.Thread(target=self._run).start()
- hostname = "[%s]:%d" % (self.addr, self.port)
+ hostname = f"[{self.addr}]:{self.port}"
self.tc = SSHClient()
self.tc.set_missing_host_key_policy(paramiko.RejectPolicy())
@@ -803,7 +802,7 @@ class PasswordPassphraseTests(ClientTest):
@requires_sha1_signing
def test_password_kwarg_used_for_passphrase_when_no_passphrase_kwarg_given(
- self
+ self,
): # noqa
# Backwards compatibility: passphrase in the password field.
self._test_connection(
@@ -814,7 +813,7 @@ class PasswordPassphraseTests(ClientTest):
@raises(AuthenticationException) # TODO: more granular
@requires_sha1_signing
def test_password_kwarg_not_used_for_passphrase_when_passphrase_kwarg_given( # noqa
- self
+ self,
):
# Sanity: if we're given both fields, the password field is NOT used as
# a passphrase.
diff --git a/tests/test_config.py b/tests/test_config.py
index 67a03e63..a2c60a32 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -52,7 +52,7 @@ def load_config(name):
return SSHConfig.from_path(_config(name))
-class TestSSHConfig(object):
+class TestSSHConfig:
def setup(self):
self.config = load_config("robey")
@@ -460,7 +460,7 @@ Host param3 parara
with raises(ConfigParseError):
load_config("invalid")
- def test_proxycommand_none_issue_418(self):
+ def test_proxycommand_none_issue_415(self):
config = SSHConfig.from_text(
"""
Host proxycommand-standard-none
@@ -472,10 +472,12 @@ Host proxycommand-with-equals-none
)
for host, values in {
"proxycommand-standard-none": {
- "hostname": "proxycommand-standard-none"
+ "hostname": "proxycommand-standard-none",
+ "proxycommand": None,
},
"proxycommand-with-equals-none": {
- "hostname": "proxycommand-with-equals-none"
+ "hostname": "proxycommand-with-equals-none",
+ "proxycommand": None,
},
}.items():
@@ -495,13 +497,11 @@ Host *
ProxyCommand default-proxy
"""
)
- # When bug is present, the full stripping-out of specific-host's
- # ProxyCommand means it actually appears to pick up the default
- # ProxyCommand value instead, due to cascading. It should (for
- # backwards compatibility reasons in 1.x/2.x) appear completely blank,
- # as if the host had no ProxyCommand whatsoever.
- # Threw another unrelated host in there just for sanity reasons.
- assert "proxycommand" not in config.lookup("specific-host")
+ # In versions <3.0, 'None' ProxyCommands got deleted, and this itself
+ # caused bugs. In 3.0, we more cleanly map "none" to None. This test
+ # has been altered accordingly but left around to ensure no
+ # regressions.
+ assert config.lookup("specific-host")["proxycommand"] is None
assert config.lookup("other-host")["proxycommand"] == "other-proxy"
cmd = config.lookup("some-random-host")["proxycommand"]
assert cmd == "default-proxy"
@@ -511,7 +511,7 @@ Host *
assert result["hostname"] == "prefix.whatever"
-class TestSSHConfigDict(object):
+class TestSSHConfigDict:
def test_SSHConfigDict_construct_empty(self):
assert not SSHConfigDict()
@@ -570,7 +570,7 @@ Host *
assert config.lookup("anything-else").as_int("port") == 3333
-class TestHostnameCanonicalization(object):
+class TestHostnameCanonicalization:
# NOTE: this class uses on-disk configs, and ones with real (at time of
# writing) DNS names, so that one can easily test OpenSSH's behavior using
# "ssh -F path/to/file.config -G <target>".
@@ -669,7 +669,7 @@ class TestHostnameCanonicalization(object):
@mark.skip
-class TestCanonicalizationOfCNAMEs(object):
+class TestCanonicalizationOfCNAMEs:
def test_permitted_cnames_may_be_one_to_one_mapping(self):
# CanonicalizePermittedCNAMEs *.foo.com:*.bar.com
pass
@@ -695,7 +695,7 @@ class TestCanonicalizationOfCNAMEs(object):
pass
-class TestMatchAll(object):
+class TestMatchAll:
def test_always_matches(self):
result = load_config("match-all").lookup("general")
assert result["user"] == "awesome"
@@ -745,7 +745,7 @@ def _expect(success_on):
@mark.skipif(Result is None, reason="requires invoke package")
-class TestMatchExec(object):
+class TestMatchExec:
@patch("paramiko.config.invoke", new=None)
@patch("paramiko.config.invoke_import_error", new=ImportError("meh"))
def test_raises_invoke_ImportErrors_at_runtime(self):
@@ -825,7 +825,7 @@ class TestMatchExec(object):
assert result["hostname"] == "pingable.target"
-class TestMatchHost(object):
+class TestMatchHost:
def test_matches_target_name_when_no_hostname(self):
result = load_config("match-host").lookup("target")
assert result["user"] == "rand"
@@ -875,7 +875,7 @@ class TestMatchHost(object):
load_config("match-host-no-arg")
-class TestMatchOriginalHost(object):
+class TestMatchOriginalHost:
def test_matches_target_host_not_hostname(self):
result = load_config("match-orighost").lookup("target")
assert result["hostname"] == "bogus"
@@ -908,7 +908,7 @@ class TestMatchOriginalHost(object):
load_config("match-orighost-no-arg")
-class TestMatchUser(object):
+class TestMatchUser:
def test_matches_configured_username(self):
result = load_config("match-user-explicit").lookup("anything")
assert result["hostname"] == "dumb"
@@ -955,7 +955,7 @@ class TestMatchUser(object):
# NOTE: highly derivative of previous suite due to the former's use of
# localuser fallback. Doesn't seem worth conflating/refactoring right now.
-class TestMatchLocalUser(object):
+class TestMatchLocalUser:
@patch("paramiko.config.getpass.getuser")
def test_matches_local_username(self, getuser):
getuser.return_value = "gandalf"
@@ -996,7 +996,7 @@ class TestMatchLocalUser(object):
load_config("match-localuser-no-arg")
-class TestComplexMatching(object):
+class TestComplexMatching:
# NOTE: this is still a cherry-pick of a few levels of complexity, there's
# no point testing literally all possible combinations.
diff --git a/tests/test_file.py b/tests/test_file.py
index b0147450..364bbce2 100644
--- a/tests/test_file.py
+++ b/tests/test_file.py
@@ -21,7 +21,6 @@ Some unit tests for the BufferedFile abstraction.
"""
import unittest
-import sys
from io import BytesIO
from paramiko.common import linefeed_byte, crlf, cr_byte
@@ -189,7 +188,7 @@ class BufferedFileTest(unittest.TestCase):
self.assertRaises(TypeError, f.write, object())
def test_write_unicode_as_binary(self):
- text = u"\xa7 why is writing text to a binary file allowed?\n"
+ text = "\xa7 why is writing text to a binary file allowed?\n"
with LoopbackFile("rb+") as f:
f.write(text)
self.assertEqual(f.read(), text.encode("utf-8"))
diff --git a/tests/test_gssapi.py b/tests/test_gssapi.py
index 23c3ef42..671f1ba0 100644
--- a/tests/test_gssapi.py
+++ b/tests/test_gssapi.py
@@ -37,7 +37,7 @@ from .util import needs_gssapi, KerberosTestCase, update_env
@needs_gssapi
class GSSAPITest(KerberosTestCase):
def setUp(self):
- super(GSSAPITest, self).setUp()
+ super().setUp()
# TODO: these vars should all come from os.environ or whatever the
# approved pytest method is for runtime-configuring test data.
self.krb5_mech = "1.2.840.113554.1.2.2"
diff --git a/tests/test_kex.py b/tests/test_kex.py
index 24fb8b81..c3bf2b0f 100644
--- a/tests/test_kex.py
+++ b/tests/test_kex.py
@@ -47,7 +47,7 @@ from paramiko.kex_curve25519 import KexCurve25519
def dummy_urandom(n):
- return byte_chr(0xcc) * n
+ return byte_chr(0xCC) * n
def dummy_generate_key_pair(obj):
@@ -69,7 +69,7 @@ def dummy_generate_key_pair(obj):
)
-class FakeKey(object):
+class FakeKey:
def __str__(self):
return "fake-key"
@@ -80,7 +80,7 @@ class FakeKey(object):
return b"fake-sig"
-class FakeModulusPack(object):
+class FakeModulusPack:
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa
G = 2
@@ -88,7 +88,7 @@ class FakeModulusPack(object):
return self.G, self.P
-class FakeTransport(object):
+class FakeTransport:
local_version = "SSH-2.0-paramiko_1.0"
remote_version = "SSH-2.0-lame"
local_kex_init = "local-kex-init"
diff --git a/tests/test_kex_gss.py b/tests/test_kex_gss.py
index 26659ae3..d4868f4a 100644
--- a/tests/test_kex_gss.py
+++ b/tests/test_kex_gss.py
@@ -101,7 +101,7 @@ class GSSKexTest(KerberosTestCase):
self.tc = paramiko.SSHClient()
self.tc.get_host_keys().add(
- "[%s]:%d" % (self.hostname, self.port), "ssh-rsa", public_host_key
+ f"[{self.hostname}]:{self.port}", "ssh-rsa", public_host_key
)
self.tc.connect(
self.hostname,
diff --git a/tests/test_message.py b/tests/test_message.py
index 23b06858..3c5f961b 100644
--- a/tests/test_message.py
+++ b/tests/test_message.py
@@ -48,18 +48,18 @@ class MessageTest(unittest.TestCase):
msg = Message()
msg.add_boolean(True)
msg.add_boolean(False)
- msg.add_byte(byte_chr(0xf3))
+ msg.add_byte(byte_chr(0xF3))
- msg.add_bytes(zero_byte + byte_chr(0x3f))
+ msg.add_bytes(zero_byte + byte_chr(0x3F))
msg.add_list(["huey", "dewey", "louie"])
self.assertEqual(msg.asbytes(), self.__b)
msg = Message()
msg.add_int64(5)
- msg.add_int64(0xf5e4d3c2b109)
+ msg.add_int64(0xF5E4D3C2B109)
msg.add_mpint(17)
- msg.add_mpint(0xf5e4d3c2b109)
- msg.add_mpint(-0x65e4d3c2b109)
+ msg.add_mpint(0xF5E4D3C2B109)
+ msg.add_mpint(-0x65E4D3C2B109)
self.assertEqual(msg.asbytes(), self.__c)
def test_decode(self):
@@ -73,22 +73,22 @@ class MessageTest(unittest.TestCase):
msg = Message(self.__b)
self.assertEqual(msg.get_boolean(), True)
self.assertEqual(msg.get_boolean(), False)
- self.assertEqual(msg.get_byte(), byte_chr(0xf3))
- self.assertEqual(msg.get_bytes(2), zero_byte + byte_chr(0x3f))
+ self.assertEqual(msg.get_byte(), byte_chr(0xF3))
+ self.assertEqual(msg.get_bytes(2), zero_byte + byte_chr(0x3F))
self.assertEqual(msg.get_list(), ["huey", "dewey", "louie"])
msg = Message(self.__c)
self.assertEqual(msg.get_int64(), 5)
- self.assertEqual(msg.get_int64(), 0xf5e4d3c2b109)
+ self.assertEqual(msg.get_int64(), 0xF5E4D3C2B109)
self.assertEqual(msg.get_mpint(), 17)
- self.assertEqual(msg.get_mpint(), 0xf5e4d3c2b109)
- self.assertEqual(msg.get_mpint(), -0x65e4d3c2b109)
+ self.assertEqual(msg.get_mpint(), 0xF5E4D3C2B109)
+ self.assertEqual(msg.get_mpint(), -0x65E4D3C2B109)
def test_add(self):
msg = Message()
msg.add(5)
msg.add(0x1122334455)
- msg.add(0xf00000000000000000)
+ msg.add(0xF00000000000000000)
msg.add(True)
msg.add("cat")
msg.add(["a", "b"])
@@ -98,10 +98,16 @@ class MessageTest(unittest.TestCase):
msg = Message(self.__d)
self.assertEqual(msg.get_adaptive_int(), 5)
self.assertEqual(msg.get_adaptive_int(), 0x1122334455)
- self.assertEqual(msg.get_adaptive_int(), 0xf00000000000000000)
+ self.assertEqual(msg.get_adaptive_int(), 0xF00000000000000000)
self.assertEqual(msg.get_so_far(), self.__d[:29])
self.assertEqual(msg.get_remainder(), self.__d[29:])
msg.rewind()
self.assertEqual(msg.get_adaptive_int(), 5)
self.assertEqual(msg.get_so_far(), self.__d[:4])
self.assertEqual(msg.get_remainder(), self.__d[4:])
+
+ def test_bytes_str_and_repr(self):
+ msg = Message(self.__d)
+ assert str(msg) == f"paramiko.Message({self.__d!r})"
+ assert repr(msg) == str(msg)
+ assert bytes(msg) == msg.asbytes() == self.__d
diff --git a/tests/test_packetizer.py b/tests/test_packetizer.py
index 27dee358..d4dd58ad 100644
--- a/tests/test_packetizer.py
+++ b/tests/test_packetizer.py
@@ -34,7 +34,7 @@ from .loop import LoopSocket
x55 = byte_chr(0x55)
-x1f = byte_chr(0x1f)
+x1f = byte_chr(0x1F)
class PacketizerTest(unittest.TestCase):
diff --git a/tests/test_pkey.py b/tests/test_pkey.py
index e2d0a1af..4d74d8aa 100644
--- a/tests/test_pkey.py
+++ b/tests/test_pkey.py
@@ -731,11 +731,9 @@ class KeyTest(unittest.TestCase):
key.write_private_key_file(new, password=newpassword)
# Expected open via os module
os_.open.assert_called_once_with(
- new, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, o600
+ new, flags=os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode=o600
)
os_.fdopen.assert_called_once_with(os_.open.return_value, "w")
- # Old chmod still around for backwards compat
- os_.chmod.assert_called_once_with(new, o600)
assert (
key._write_private_key.call_args[0][0]
== os_.fdopen.return_value.__enter__.return_value
diff --git a/tests/test_proxy.py b/tests/test_proxy.py
index d50cc562..83bdc040 100644
--- a/tests/test_proxy.py
+++ b/tests/test_proxy.py
@@ -7,7 +7,7 @@ from pytest import raises
from paramiko import ProxyCommand, ProxyCommandFailure
-class TestProxyCommand(object):
+class TestProxyCommand:
@patch("paramiko.proxy.subprocess")
def test_init_takes_command_string(self, subprocess):
ProxyCommand(command_line="do a thing")
diff --git a/tests/test_sftp.py b/tests/test_sftp.py
index c273feaa..2cd68d94 100644
--- a/tests/test_sftp.py
+++ b/tests/test_sftp.py
@@ -34,8 +34,9 @@ from tempfile import mkstemp
import pytest
from paramiko.common import o777, o600, o666, o644
-from tests import requireNonAsciiLocale
from paramiko.sftp_attr import SFTPAttributes
+from paramiko.util import b, u
+from tests import requireNonAsciiLocale
from .util import needs_builtin
from .util import slow
@@ -92,7 +93,7 @@ utf8_folder = b"/\xc3\xbcnic\xc3\xb8\x64\x65"
@slow
-class TestSFTP(object):
+class TestSFTP:
def test_file(self, sftp):
"""
verify that we can create a file.
@@ -627,11 +628,8 @@ class TestSFTP(object):
sftp.open(sftp.FOLDER + "/unusual.txt", "wx").close()
try:
- try:
+ with pytest.raises(IOError):
sftp.open(sftp.FOLDER + "/unusual.txt", "wx")
- self.fail("expected exception")
- except IOError:
- pass
finally:
sftp.unlink(sftp.FOLDER + "/unusual.txt")
@@ -641,15 +639,13 @@ class TestSFTP(object):
"""
with sftp.open(sftp.FOLDER + "/something", "w") as f:
f.write("okay")
-
try:
sftp.rename(
sftp.FOLDER + "/something", sftp.FOLDER + "/" + unicode_folder
)
sftp.open(b(sftp.FOLDER) + utf8_folder, "r")
- except Exception as e:
- self.fail("exception " + str(e))
- sftp.unlink(b(sftp.FOLDER) + utf8_folder)
+ finally:
+ sftp.unlink(b(sftp.FOLDER) + utf8_folder)
def test_utf8_chdir(self, sftp):
sftp.mkdir(sftp.FOLDER + "/" + unicode_folder)
@@ -779,18 +775,18 @@ class TestSFTP(object):
def test_non_utf8_data(self, sftp):
"""Test write() and read() of non utf8 data"""
try:
- with sftp.open("%s/nonutf8data" % sftp.FOLDER, "w") as f:
+ with sftp.open(f"{sftp.FOLDER}/nonutf8data", "w") as f:
f.write(NON_UTF8_DATA)
- with sftp.open("%s/nonutf8data" % sftp.FOLDER, "r") as f:
+ with sftp.open(f"{sftp.FOLDER}/nonutf8data", "r") as f:
data = f.read()
assert data == NON_UTF8_DATA
- with sftp.open("%s/nonutf8data" % sftp.FOLDER, "wb") as f:
+ with sftp.open(f"{sftp.FOLDER}/nonutf8data", "wb") as f:
f.write(NON_UTF8_DATA)
- with sftp.open("%s/nonutf8data" % sftp.FOLDER, "rb") as f:
+ with sftp.open(f"{sftp.FOLDER}/nonutf8data", "rb") as f:
data = f.read()
assert data == NON_UTF8_DATA
finally:
- sftp.remove("%s/nonutf8data" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/nonutf8data")
@requireNonAsciiLocale("LC_TIME")
def test_sftp_attributes_locale_time(self, sftp):
@@ -811,26 +807,26 @@ class TestSFTP(object):
"""Test write() using a buffer instance."""
data = 3 * b"A potentially large block of data to chunk up.\n"
try:
- with sftp.open("%s/write_buffer" % sftp.FOLDER, "wb") as f:
+ with sftp.open(f"{sftp.FOLDER}/write_buffer", "wb") as f:
for offset in range(0, len(data), 8):
f.write(buffer(data, offset, 8)) # noqa
- with sftp.open("%s/write_buffer" % sftp.FOLDER, "rb") as f:
+ with sftp.open(f"{sftp.FOLDER}/write_buffer", "rb") as f:
assert f.read() == data
finally:
- sftp.remove("%s/write_buffer" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/write_buffer")
@needs_builtin("memoryview")
def test_write_memoryview(self, sftp):
"""Test write() using a memoryview instance."""
data = 3 * b"A potentially large block of data to chunk up.\n"
try:
- with sftp.open("%s/write_memoryview" % sftp.FOLDER, "wb") as f:
+ with sftp.open(f"{sftp.FOLDER}/write_memoryview", "wb") as f:
view = memoryview(data)
for offset in range(0, len(data), 8):
f.write(view[offset : offset + 8])
- with sftp.open("%s/write_memoryview" % sftp.FOLDER, "rb") as f:
+ with sftp.open(f"{sftp.FOLDER}/write_memoryview", "rb") as f:
assert f.read() == data
finally:
- sftp.remove("%s/write_memoryview" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/write_memoryview")
diff --git a/tests/test_sftp_big.py b/tests/test_sftp_big.py
index 4643bcaa..5192f657 100644
--- a/tests/test_sftp_big.py
+++ b/tests/test_sftp_big.py
@@ -34,7 +34,7 @@ from .util import slow
@slow
-class TestBigSFTP(object):
+class TestBigSFTP:
def test_lots_of_files(self, sftp):
"""
create a bunch of files over the same session.
@@ -42,24 +42,23 @@ class TestBigSFTP(object):
numfiles = 100
try:
for i in range(numfiles):
- with sftp.open(
- "%s/file%d.txt" % (sftp.FOLDER, i), "w", 1
- ) as f:
- f.write("this is file #%d.\n" % i)
- sftp.chmod("%s/file%d.txt" % (sftp.FOLDER, i), o660)
+ target = f"{sftp.FOLDER}/file{i}.txt"
+ with sftp.open(target, "w", 1) as f:
+ f.write(f"this is file #{i}.\n")
+ sftp.chmod(target, o660)
# now make sure every file is there, by creating a list of filenmes
# and reading them in random order.
numlist = list(range(numfiles))
while len(numlist) > 0:
r = numlist[random.randint(0, len(numlist) - 1)]
- with sftp.open("%s/file%d.txt" % (sftp.FOLDER, r)) as f:
- assert f.readline() == "this is file #%d.\n" % r
+ with sftp.open(f"{sftp.FOLDER}/file{r}.txt") as f:
+ assert f.readline() == f"this is file #{r}.\n"
numlist.remove(r)
finally:
for i in range(numfiles):
try:
- sftp.remove("%s/file%d.txt" % (sftp.FOLDER, i))
+ sftp.remove(f"{sftp.FOLDER}/file{i}.txt")
except:
pass
@@ -70,7 +69,7 @@ class TestBigSFTP(object):
kblob = 1024 * b"x"
start = time.time()
try:
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "w") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w") as f:
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
@@ -78,21 +77,21 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
end = time.time()
- sys.stderr.write("%ds " % round(end - start))
+ sys.stderr.write(f"{round(end - start)}s")
start = time.time()
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f:
for n in range(1024):
data = f.read(1024)
assert data == kblob
end = time.time()
- sys.stderr.write("%ds " % round(end - start))
+ sys.stderr.write(f"{round(end - start)}s")
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_big_file_pipelined(self, sftp):
"""
@@ -101,7 +100,7 @@ class TestBigSFTP(object):
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
start = time.time()
try:
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -110,13 +109,13 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
end = time.time()
- sys.stderr.write("%ds " % round(end - start))
+ sys.stderr.write(f"{round(end - start)}s")
start = time.time()
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
@@ -135,14 +134,14 @@ class TestBigSFTP(object):
n += chunk
end = time.time()
- sys.stderr.write("%ds " % round(end - start))
+ sys.stderr.write(f"{round(end - start)}s")
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_prefetch_seek(self, sftp):
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -151,14 +150,14 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
start = time.time()
k2blob = kblob + kblob
chunk = 793
for i in range(10):
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
base_offset = (512 * 1024) + 17 * random.randint(
@@ -175,14 +174,14 @@ class TestBigSFTP(object):
assert data == k2blob[n_offset : n_offset + chunk]
offset += chunk
end = time.time()
- sys.stderr.write("%ds " % round(end - start))
+ sys.stderr.write(f"{round(end - start)}s")
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_readv_seek(self, sftp):
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -191,14 +190,14 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
start = time.time()
k2blob = kblob + kblob
chunk = 793
for i in range(10):
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
base_offset = (512 * 1024) + 17 * random.randint(
1000, 2000
)
@@ -215,9 +214,9 @@ class TestBigSFTP(object):
n_offset = offset % 1024
assert next(ret) == k2blob[n_offset : n_offset + chunk]
end = time.time()
- sys.stderr.write("%ds " % round(end - start))
+ sys.stderr.write(f"{round(end - start)}s")
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_lots_of_prefetching(self, sftp):
"""
@@ -226,7 +225,7 @@ class TestBigSFTP(object):
"""
kblob = 1024 * b"x"
try:
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "w") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -235,14 +234,14 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
for i in range(10):
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
for n in range(1024):
@@ -252,7 +251,7 @@ class TestBigSFTP(object):
sys.stderr.write(".")
sys.stderr.write(" ")
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_prefetch_readv(self, sftp):
"""
@@ -260,7 +259,7 @@ class TestBigSFTP(object):
"""
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -269,10 +268,10 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
data = f.read(1024)
@@ -293,7 +292,7 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_large_readv(self, sftp):
"""
@@ -302,7 +301,7 @@ class TestBigSFTP(object):
"""
kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
@@ -311,10 +310,10 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
- with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f:
data = list(f.readv([(23 * 1024, 128 * 1024)]))
assert len(data) == 1
data = data[0]
@@ -322,7 +321,7 @@ class TestBigSFTP(object):
sys.stderr.write(" ")
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_big_file_big_buffer(self, sftp):
"""
@@ -330,16 +329,14 @@ class TestBigSFTP(object):
"""
mblob = 1024 * 1024 * "x"
try:
- with sftp.open(
- "%s/hongry.txt" % sftp.FOLDER, "w", 128 * 1024
- ) as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w", 128 * 1024) as f:
f.write(mblob)
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
def test_big_file_renegotiate(self, sftp):
"""
@@ -349,26 +346,22 @@ class TestBigSFTP(object):
t.packetizer.REKEY_BYTES = 512 * 1024
k32blob = 32 * 1024 * "x"
try:
- with sftp.open(
- "%s/hongry.txt" % sftp.FOLDER, "w", 128 * 1024
- ) as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w", 128 * 1024) as f:
for i in range(32):
f.write(k32blob)
assert (
- sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024
)
assert t.H != t.session_id
# try to read it too.
- with sftp.open(
- "%s/hongry.txt" % sftp.FOLDER, "r", 128 * 1024
- ) as f:
+ with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r", 128 * 1024) as f:
file_size = f.stat().st_size
f.prefetch(file_size)
total = 0
while total < 1024 * 1024:
total += len(f.read(32 * 1024))
finally:
- sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+ sftp.remove(f"{sftp.FOLDER}/hongry.txt")
t.packetizer.REKEY_BYTES = pow(2, 30)
diff --git a/tests/test_ssh_gss.py b/tests/test_ssh_gss.py
index 4d171854..a8175ccb 100644
--- a/tests/test_ssh_gss.py
+++ b/tests/test_ssh_gss.py
@@ -106,7 +106,7 @@ class GSSAuthTest(KerberosTestCase):
self.tc = paramiko.SSHClient()
self.tc.set_missing_host_key_policy(paramiko.WarningPolicy())
self.tc.get_host_keys().add(
- "[%s]:%d" % (self.addr, self.port), "ssh-rsa", public_host_key
+ f"[{self.addr}]:{self.port}", "ssh-rsa", public_host_key
)
self.tc.connect(
hostname=self.addr,
@@ -114,7 +114,7 @@ class GSSAuthTest(KerberosTestCase):
username=self.username,
gss_host=self.hostname,
gss_auth=True,
- **kwargs
+ **kwargs,
)
self.event.wait(1.0)
diff --git a/tests/test_transport.py b/tests/test_transport.py
index 177e83da..4d28199a 100644
--- a/tests/test_transport.py
+++ b/tests/test_transport.py
@@ -20,7 +20,6 @@
Some unit tests for the ssh2 protocol in Transport.
"""
-from __future__ import with_statement
from binascii import hexlify
from contextlib import contextmanager
@@ -686,7 +685,7 @@ class TransportTest(unittest.TestCase):
self.assertEqual(chan.send_ready(), True)
total = 0
K = "*" * 1024
- limit = 1 + (64 * 2 ** 15)
+ limit = 1 + (64 * 2**15)
while total < limit:
chan.send(K)
total += len(K)
@@ -874,7 +873,7 @@ class TransportTest(unittest.TestCase):
for val, correct in [
(4095, MIN_PACKET_SIZE),
(None, DEFAULT_MAX_PACKET_SIZE),
- (2 ** 32, MAX_WINDOW_SIZE),
+ (2**32, MAX_WINDOW_SIZE),
]:
self.assertEqual(self.tc._sanitize_packet_size(val), correct)
@@ -885,7 +884,7 @@ class TransportTest(unittest.TestCase):
for val, correct in [
(32767, MIN_WINDOW_SIZE),
(None, DEFAULT_WINDOW_SIZE),
- (2 ** 32, MAX_WINDOW_SIZE),
+ (2**32, MAX_WINDOW_SIZE),
]:
self.assertEqual(self.tc._sanitize_window_size(val), correct)
@@ -904,7 +903,7 @@ class TransportTest(unittest.TestCase):
class SlowPacketizer(Packetizer):
def read_message(self):
time.sleep(1)
- return super(SlowPacketizer, self).read_message()
+ return super().read_message()
# NOTE: prettttty sure since the replaced .packetizer Packetizer is now
# no longer doing anything with its copy of the socket...everything'll
@@ -950,7 +949,7 @@ class TransportTest(unittest.TestCase):
verify behaviours sending various instances to a channel
"""
self.setup_test_server()
- text = u"\xa7 slice me nicely"
+ text = "\xa7 slice me nicely"
with self.tc.open_session() as chan:
schan = self.ts.accept(1.0)
if schan is None:
diff --git a/tests/test_util.py b/tests/test_util.py
index 1869a7bb..ec03846b 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -21,7 +21,6 @@ Some unit tests for utility functions.
"""
from binascii import hexlify
-import errno
import os
from hashlib import sha1
import unittest
@@ -29,7 +28,6 @@ import unittest
import paramiko
import paramiko.util
from paramiko.util import safe_string
-from paramiko.common import byte_ord
test_hosts_file = """\
@@ -87,12 +85,12 @@ class UtilTest(unittest.TestCase):
assert name in paramiko.__all__
def test_generate_key_bytes(self):
- x = paramiko.util.generate_key_bytes(
+ key_bytes = paramiko.util.generate_key_bytes(
sha1, b"ABCDEFGH", "This is my secret passphrase.", 64
)
- hex = "".join(["%02x" % byte_ord(c) for c in x])
+ hexy = "".join([f"{byte:02x}" for byte in key_bytes])
hexpected = "9110e2f6793b69363e58173e9436b13a5a4b339005741d5c680e505f57d871347b4239f14fb5c46e857d5e100424873ba849ac699cea98d729e57b3e84378e8b" # noqa
- assert hex == hexpected
+ assert hexy == hexpected
def test_host_keys(self):
with open("hostfile.temp", "w") as f:
@@ -109,39 +107,6 @@ class UtilTest(unittest.TestCase):
finally:
os.unlink("hostfile.temp")
- def test_eintr_retry(self):
- assert "foo" == paramiko.util.retry_on_signal(lambda: "foo")
-
- # Variables that are set by raises_intr
- intr_errors_remaining = [3]
- call_count = [0]
-
- def raises_intr():
- call_count[0] += 1
- if intr_errors_remaining[0] > 0:
- intr_errors_remaining[0] -= 1
- raise IOError(errno.EINTR, "file", "interrupted system call")
-
- self.assertTrue(paramiko.util.retry_on_signal(raises_intr) is None)
- assert 0 == intr_errors_remaining[0]
- assert 4 == call_count[0]
-
- def raises_ioerror_not_eintr():
- raise IOError(errno.ENOENT, "file", "file not found")
-
- self.assertRaises(
- IOError,
- lambda: paramiko.util.retry_on_signal(raises_ioerror_not_eintr),
- )
-
- def raises_other_exception():
- raise AssertionError("foo")
-
- self.assertRaises(
- AssertionError,
- lambda: paramiko.util.retry_on_signal(raises_other_exception),
- )
-
def test_clamp_value(self):
assert 32768 == paramiko.util.clamp_value(32767, 32768, 32769)
assert 32767 == paramiko.util.clamp_value(32767, 32765, 32769)
diff --git a/tests/util.py b/tests/util.py
index f3efb6b0..0639f8ae 100644
--- a/tests/util.py
+++ b/tests/util.py
@@ -61,7 +61,7 @@ if (
): # add other vars as needed
# The environment provides the required information
- class DummyK5Realm(object):
+ class DummyK5Realm:
def __init__(self):
for k in os.environ:
if not k.startswith("K5TEST_"):
@@ -78,7 +78,6 @@ if (
def tearDownClass(cls):
del cls.realm
-
else:
try:
# Try to setup a kerberos environment
@@ -138,8 +137,8 @@ def is_low_entropy():
"""
Attempts to detect whether running interpreter is low-entropy.
- Classified as being in 32-bit mode under Python 2, or 32-bit mode and with
- the hash seed set to zero under Python 3.
+ "low-entropy" is defined as being in 32-bit mode and with the hash seed set
+ to zero.
"""
is_32bit = struct.calcsize("P") == 32 / 8
# I don't see a way to tell internally if the hash seed was set this