summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJon Dufresne <jon.dufresne@gmail.com>2020-08-06 15:15:02 -0700
committerGitHub <noreply@github.com>2020-08-06 15:15:02 -0700
commit8c5a41baf0bd2a1388d601e5b49d06b91997ccb8 (patch)
tree89cad14d95927f8ea696706c2b1399dde17fa244
parentc6f13c3b69d32257ab75ba9d824e5b555f91572c (diff)
downloadredis-py-8c5a41baf0bd2a1388d601e5b49d06b91997ccb8.tar.gz
Remove support for end-of-life Python 2.7 (#1318)
Remove support for end-of-life Python 2.7 Python 2.7 is end of life. It is no longer receiving bug fixes, including for security issues. Python 2.7 went EOL on 2020-01-01. For additional details on support Python versions, see: Supported: https://devguide.python.org/#status-of-python-branches EOL: https://devguide.python.org/devcycle/#end-of-life-branches Removing support for EOL Pythons will reduce testing and maintenance resources while allowing the library to move towards a modern Python 3 style. Python 2.7 users can continue to use the previous version of redis-py. Was able to simplify the code: - Removed redis._compat module - Removed __future__ imports - Removed object from class definition (all classes are new style) - Removed long (Python 3 unified numeric types) - Removed deprecated __nonzero__ method - Use simpler Python 3 super() syntax - Use unified OSError exception - Use yield from syntax Co-authored-by: Andy McCurdy <andy@andymccurdy.com>
-rw-r--r--.dockerignore2
-rw-r--r--CHANGES1
-rw-r--r--README.rst2
-rw-r--r--benchmarks/base.py5
-rw-r--r--benchmarks/basic_operations.py12
-rw-r--r--benchmarks/command_packer_benchmark.py5
-rw-r--r--docs/conf.py26
-rw-r--r--redis/_compat.py188
-rwxr-xr-xredis/client.py420
-rwxr-xr-xredis/connection.py129
-rw-r--r--redis/lock.py10
-rw-r--r--redis/sentinel.py20
-rw-r--r--redis/utils.py15
-rw-r--r--setup.cfg8
-rw-r--r--tests/conftest.py8
-rw-r--r--tests/test_commands.py38
-rw-r--r--tests/test_connection.py2
-rw-r--r--tests/test_connection_pool.py20
-rw-r--r--tests/test_encoding.py31
-rw-r--r--tests/test_lock.py6
-rw-r--r--tests/test_monitor.py8
-rw-r--r--tests/test_multiprocessing.py2
-rw-r--r--tests/test_pipeline.py27
-rw-r--r--tests/test_pubsub.py36
-rw-r--r--tests/test_scripting.py3
-rw-r--r--tests/test_sentinel.py5
-rw-r--r--tox.ini3
27 files changed, 378 insertions, 654 deletions
diff --git a/.dockerignore b/.dockerignore
index 08ea4a6..7b9bc9b 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,3 +1,5 @@
**/__pycache__
**/*.pyc
.tox
+.coverage
+.coverage.*
diff --git a/CHANGES b/CHANGES
index 2988fde..b164991 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,4 +1,5 @@
* (in development)
+ * Removed support for end of life Python 2.7.
* Provide a development and testing environment via docker. Thanks
@abrookins. #1365
* Added support for the LPOS command available in Redis 6.0.6. Thanks
diff --git a/README.rst b/README.rst
index 4f2cec8..438e33e 100644
--- a/README.rst
+++ b/README.rst
@@ -87,7 +87,7 @@ provide an upgrade path for users migrating from 2.X to 3.0.
Python Version Support
^^^^^^^^^^^^^^^^^^^^^^
-redis-py 3.0 supports Python 2.7 and Python 3.5+.
+redis-py supports Python 3.5+.
Client Classes: Redis and StrictRedis
diff --git a/benchmarks/base.py b/benchmarks/base.py
index 44e9341..8c13afe 100644
--- a/benchmarks/base.py
+++ b/benchmarks/base.py
@@ -3,10 +3,9 @@ import itertools
import redis
import sys
import timeit
-from redis._compat import izip
-class Benchmark(object):
+class Benchmark:
ARGUMENTS = ()
def __init__(self):
@@ -34,7 +33,7 @@ class Benchmark(object):
group_names = [group['name'] for group in self.ARGUMENTS]
group_values = [group['values'] for group in self.ARGUMENTS]
for value_set in itertools.product(*group_values):
- pairs = list(izip(group_names, value_set))
+ pairs = list(zip(group_names, value_set))
arg_string = ', '.join(['%s=%s' % (p[0], p[1]) for p in pairs])
sys.stdout.write('Benchmark: %s... ' % arg_string)
sys.stdout.flush()
diff --git a/benchmarks/basic_operations.py b/benchmarks/basic_operations.py
index a4b675d..9446343 100644
--- a/benchmarks/basic_operations.py
+++ b/benchmarks/basic_operations.py
@@ -1,13 +1,8 @@
-from __future__ import print_function
import redis
import time
-import sys
from functools import wraps
from argparse import ArgumentParser
-if sys.version_info[0] == 3:
- long = int
-
def parse_args():
parser = ArgumentParser()
@@ -47,9 +42,9 @@ def run():
def timer(func):
@wraps(func)
def wrapper(*args, **kwargs):
- start = time.clock()
+ start = time.monotonic()
ret = func(*args, **kwargs)
- duration = time.clock() - start
+ duration = time.monotonic() - start
if 'num' in kwargs:
count = kwargs['num']
else:
@@ -57,7 +52,7 @@ def timer(func):
print('{} - {} Requests'.format(func.__name__, count))
print('Duration = {}'.format(duration))
print('Rate = {}'.format(count/duration))
- print('')
+ print()
return ret
return wrapper
@@ -185,7 +180,6 @@ def hmset(conn, num, pipeline_size, data_size):
set_data = {'str_value': 'string',
'int_value': 123456,
- 'long_value': long(123456),
'float_value': 123456.0}
for i in range(num):
conn.hmset('hmset_key', set_data)
diff --git a/benchmarks/command_packer_benchmark.py b/benchmarks/command_packer_benchmark.py
index 1216df6..823a8c8 100644
--- a/benchmarks/command_packer_benchmark.py
+++ b/benchmarks/command_packer_benchmark.py
@@ -1,7 +1,6 @@
import socket
from redis.connection import (Connection, SYM_STAR, SYM_DOLLAR, SYM_EMPTY,
SYM_CRLF)
-from redis._compat import imap
from base import Benchmark
@@ -29,7 +28,7 @@ class StringJoiningConnection(Connection):
args_output = SYM_EMPTY.join([
SYM_EMPTY.join(
(SYM_DOLLAR, str(len(k)).encode(), SYM_CRLF, k, SYM_CRLF))
- for k in imap(self.encoder.encode, args)])
+ for k in map(self.encoder.encode, args)])
output = SYM_EMPTY.join(
(SYM_STAR, str(len(args)).encode(), SYM_CRLF, args_output))
return output
@@ -61,7 +60,7 @@ class ListJoiningConnection(Connection):
buff = SYM_EMPTY.join(
(SYM_STAR, str(len(args)).encode(), SYM_CRLF))
- for k in imap(self.encoder.encode, args):
+ for k in map(self.encoder.encode, args):
if len(buff) > 6000 or len(k) > 6000:
buff = SYM_EMPTY.join(
(buff, SYM_DOLLAR, str(len(k)).encode(), SYM_CRLF))
diff --git a/docs/conf.py b/docs/conf.py
index 690be03..3eb3f33 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-#
# redis-py documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 8 00:47:08 2013.
#
@@ -43,8 +41,8 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
-project = u'redis-py'
-copyright = u'2016, Andy McCurdy'
+project = 'redis-py'
+copyright = '2016, Andy McCurdy'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -188,8 +186,8 @@ latex_elements = {
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
- ('index', 'redis-py.tex', u'redis-py Documentation',
- u'Andy McCurdy', 'manual'),
+ ('index', 'redis-py.tex', 'redis-py Documentation',
+ 'Andy McCurdy', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -218,8 +216,8 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- ('index', 'redis-py', u'redis-py Documentation',
- [u'Andy McCurdy'], 1)
+ ('index', 'redis-py', 'redis-py Documentation',
+ ['Andy McCurdy'], 1)
]
# If true, show URL addresses after external links.
@@ -232,8 +230,8 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- ('index', 'redis-py', u'redis-py Documentation',
- u'Andy McCurdy', 'redis-py',
+ ('index', 'redis-py', 'redis-py Documentation',
+ 'Andy McCurdy', 'redis-py',
'One line description of project.', 'Miscellaneous'),
]
@@ -246,7 +244,7 @@ texinfo_documents = [
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
-epub_title = u'redis-py'
-epub_author = u'Andy McCurdy'
-epub_publisher = u'Andy McCurdy'
-epub_copyright = u'2011, Andy McCurdy'
+epub_title = 'redis-py'
+epub_author = 'Andy McCurdy'
+epub_publisher = 'Andy McCurdy'
+epub_copyright = '2011, Andy McCurdy'
diff --git a/redis/_compat.py b/redis/_compat.py
deleted file mode 100644
index a0036de..0000000
--- a/redis/_compat.py
+++ /dev/null
@@ -1,188 +0,0 @@
-"""Internal module for Python 2 backwards compatibility."""
-# flake8: noqa
-import errno
-import socket
-import sys
-
-
-def sendall(sock, *args, **kwargs):
- return sock.sendall(*args, **kwargs)
-
-
-def shutdown(sock, *args, **kwargs):
- return sock.shutdown(*args, **kwargs)
-
-
-def ssl_wrap_socket(context, sock, *args, **kwargs):
- return context.wrap_socket(sock, *args, **kwargs)
-
-
-# For Python older than 3.5, retry EINTR.
-if sys.version_info[0] < 3 or (sys.version_info[0] == 3 and
- sys.version_info[1] < 5):
- # Adapted from https://bugs.python.org/review/23863/patch/14532/54418
- import time
-
- # Wrapper for handling interruptable system calls.
- def _retryable_call(s, func, *args, **kwargs):
- # Some modules (SSL) use the _fileobject wrapper directly and
- # implement a smaller portion of the socket interface, thus we
- # need to let them continue to do so.
- timeout, deadline = None, 0.0
- attempted = False
- try:
- timeout = s.gettimeout()
- except AttributeError:
- pass
-
- if timeout:
- deadline = time.time() + timeout
-
- try:
- while True:
- if attempted and timeout:
- now = time.time()
- if now >= deadline:
- raise socket.error(errno.EWOULDBLOCK, "timed out")
- else:
- # Overwrite the timeout on the socket object
- # to take into account elapsed time.
- s.settimeout(deadline - now)
- try:
- attempted = True
- return func(*args, **kwargs)
- except socket.error as e:
- if e.args[0] == errno.EINTR:
- continue
- raise
- finally:
- # Set the existing timeout back for future
- # calls.
- if timeout:
- s.settimeout(timeout)
-
- def recv(sock, *args, **kwargs):
- return _retryable_call(sock, sock.recv, *args, **kwargs)
-
- def recv_into(sock, *args, **kwargs):
- return _retryable_call(sock, sock.recv_into, *args, **kwargs)
-
-else: # Python 3.5 and above automatically retry EINTR
- def recv(sock, *args, **kwargs):
- return sock.recv(*args, **kwargs)
-
- def recv_into(sock, *args, **kwargs):
- return sock.recv_into(*args, **kwargs)
-
-if sys.version_info[0] < 3:
- # In Python 3, the ssl module raises socket.timeout whereas it raises
- # SSLError in Python 2. For compatibility between versions, ensure
- # socket.timeout is raised for both.
- import functools
-
- try:
- from ssl import SSLError as _SSLError
- except ImportError:
- class _SSLError(Exception):
- """A replacement in case ssl.SSLError is not available."""
- pass
-
- _EXPECTED_SSL_TIMEOUT_MESSAGES = (
- "The handshake operation timed out",
- "The read operation timed out",
- "The write operation timed out",
- )
-
- def _handle_ssl_timeout(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- try:
- return func(*args, **kwargs)
- except _SSLError as e:
- message = len(e.args) == 1 and unicode(e.args[0]) or ''
- if any(x in message for x in _EXPECTED_SSL_TIMEOUT_MESSAGES):
- # Raise socket.timeout for compatibility with Python 3.
- raise socket.timeout(*e.args)
- raise
- return wrapper
-
- recv = _handle_ssl_timeout(recv)
- recv_into = _handle_ssl_timeout(recv_into)
- sendall = _handle_ssl_timeout(sendall)
- shutdown = _handle_ssl_timeout(shutdown)
- ssl_wrap_socket = _handle_ssl_timeout(ssl_wrap_socket)
-
-if sys.version_info[0] < 3:
- from urllib import unquote
- from urlparse import parse_qs, urlparse
- from itertools import imap, izip
- from string import letters as ascii_letters
- from Queue import Queue
-
- # special unicode handling for python2 to avoid UnicodeDecodeError
- def safe_unicode(obj, *args):
- """ return the unicode representation of obj """
- try:
- return unicode(obj, *args)
- except UnicodeDecodeError:
- # obj is byte string
- ascii_text = str(obj).encode('string_escape')
- return unicode(ascii_text)
-
- def iteritems(x):
- return x.iteritems()
-
- def iterkeys(x):
- return x.iterkeys()
-
- def itervalues(x):
- return x.itervalues()
-
- def nativestr(x):
- return x if isinstance(x, str) else x.encode('utf-8', 'replace')
-
- def next(x):
- return x.next()
-
- unichr = unichr
- xrange = xrange
- basestring = basestring
- unicode = unicode
- long = long
- BlockingIOError = socket.error
-else:
- from urllib.parse import parse_qs, unquote, urlparse
- from string import ascii_letters
- from queue import Queue
-
- def iteritems(x):
- return iter(x.items())
-
- def iterkeys(x):
- return iter(x.keys())
-
- def itervalues(x):
- return iter(x.values())
-
- def nativestr(x):
- return x if isinstance(x, str) else x.decode('utf-8', 'replace')
-
- def safe_unicode(value):
- if isinstance(value, bytes):
- value = value.decode('utf-8', 'replace')
- return str(value)
-
- next = next
- unichr = chr
- imap = map
- izip = zip
- xrange = range
- basestring = str
- unicode = str
- long = int
- BlockingIOError = BlockingIOError
-
-try: # Python 3
- from queue import LifoQueue, Empty, Full
-except ImportError: # Python 2
- from Queue import LifoQueue, Empty, Full
diff --git a/redis/client.py b/redis/client.py
index 5c95e9f..881fd65 100755
--- a/redis/client.py
+++ b/redis/client.py
@@ -1,4 +1,3 @@
-from __future__ import unicode_literals
from itertools import chain
import datetime
import warnings
@@ -7,8 +6,6 @@ import threading
import time as mod_time
import re
import hashlib
-from redis._compat import (basestring, imap, iteritems, iterkeys,
- itervalues, izip, long, nativestr, safe_unicode)
from redis.connection import (ConnectionPool, UnixDomainSocketConnection,
SSLConnection)
from redis.lock import Lock
@@ -24,6 +21,7 @@ from redis.exceptions import (
WatchError,
ModuleError,
)
+from redis.utils import safe_str, str_if_bytes
SYM_EMPTY = b''
EMPTY_RESPONSE = 'EMPTY_RESPONSE'
@@ -35,7 +33,7 @@ def list_or_args(keys, args):
iter(keys)
# a string or bytes instance can be iterated, but indicates
# keys wasn't passed as a list
- if isinstance(keys, (basestring, bytes)):
+ if isinstance(keys, (bytes, str)):
keys = [keys]
else:
keys = list(keys)
@@ -61,45 +59,38 @@ def string_keys_to_dict(key_string, callback):
return dict.fromkeys(key_string.split(), callback)
-def dict_merge(*dicts):
- merged = {}
- for d in dicts:
- merged.update(d)
- return merged
-
-
class CaseInsensitiveDict(dict):
"Case insensitive dict implementation. Assumes string keys only."
def __init__(self, data):
- for k, v in iteritems(data):
+ for k, v in data.items():
self[k.upper()] = v
def __contains__(self, k):
- return super(CaseInsensitiveDict, self).__contains__(k.upper())
+ return super().__contains__(k.upper())
def __delitem__(self, k):
- super(CaseInsensitiveDict, self).__delitem__(k.upper())
+ super().__delitem__(k.upper())
def __getitem__(self, k):
- return super(CaseInsensitiveDict, self).__getitem__(k.upper())
+ return super().__getitem__(k.upper())
def get(self, k, default=None):
- return super(CaseInsensitiveDict, self).get(k.upper(), default)
+ return super().get(k.upper(), default)
def __setitem__(self, k, v):
- super(CaseInsensitiveDict, self).__setitem__(k.upper(), v)
+ super().__setitem__(k.upper(), v)
def update(self, data):
data = CaseInsensitiveDict(data)
- super(CaseInsensitiveDict, self).update(data)
+ super().update(data)
def parse_debug_object(response):
"Parse the results of Redis's DEBUG OBJECT command into a Python dict"
# The 'type' of the object is the first item in the response, but isn't
# prefixed with a name
- response = nativestr(response)
+ response = str_if_bytes(response)
response = 'type:' + response
response = dict(kv.split(':') for kv in response.split())
@@ -123,7 +114,7 @@ def parse_object(response, infotype):
def parse_info(response):
"Parse the result of Redis's INFO command into a Python dict"
info = {}
- response = nativestr(response)
+ response = str_if_bytes(response)
def get_value(value):
if ',' not in value or '=' not in value:
@@ -163,7 +154,7 @@ def parse_memory_stats(response, **kwargs):
stats = pairs_to_dict(response,
decode_keys=True,
decode_string_values=True)
- for key, value in iteritems(stats):
+ for key, value in stats.items():
if key.startswith('db.'):
stats[key] = pairs_to_dict(value,
decode_keys=True,
@@ -211,52 +202,48 @@ def parse_sentinel_state(item):
def parse_sentinel_master(response):
- return parse_sentinel_state(imap(nativestr, response))
+ return parse_sentinel_state(map(str_if_bytes, response))
def parse_sentinel_masters(response):
result = {}
for item in response:
- state = parse_sentinel_state(imap(nativestr, item))
+ state = parse_sentinel_state(map(str_if_bytes, item))
result[state['name']] = state
return result
def parse_sentinel_slaves_and_sentinels(response):
- return [parse_sentinel_state(imap(nativestr, item)) for item in response]
+ return [parse_sentinel_state(map(str_if_bytes, item)) for item in response]
def parse_sentinel_get_master(response):
return response and (response[0], int(response[1])) or None
-def nativestr_if_bytes(value):
- return nativestr(value) if isinstance(value, bytes) else value
-
-
def pairs_to_dict(response, decode_keys=False, decode_string_values=False):
"Create a dict given a list of key/value pairs"
if response is None:
return {}
if decode_keys or decode_string_values:
# the iter form is faster, but I don't know how to make that work
- # with a nativestr() map
+ # with a str_if_bytes() map
keys = response[::2]
if decode_keys:
- keys = imap(nativestr, keys)
+ keys = map(str_if_bytes, keys)
values = response[1::2]
if decode_string_values:
- values = imap(nativestr_if_bytes, values)
- return dict(izip(keys, values))
+ values = map(str_if_bytes, values)
+ return dict(zip(keys, values))
else:
it = iter(response)
- return dict(izip(it, it))
+ return dict(zip(it, it))
def pairs_to_dict_typed(response, type_info):
it = iter(response)
result = {}
- for key, value in izip(it, it):
+ for key, value in zip(it, it):
if key in type_info:
try:
value = type_info[key](value)
@@ -277,7 +264,7 @@ def zset_score_pairs(response, **options):
return response
score_cast_func = options.get('score_cast_func', float)
it = iter(response)
- return list(izip(it, imap(score_cast_func, it)))
+ return list(zip(it, map(score_cast_func, it)))
def sort_return_tuples(response, **options):
@@ -288,7 +275,7 @@ def sort_return_tuples(response, **options):
if not response or not options.get('groups'):
return response
n = options['groups']
- return list(izip(*[response[i::n] for i in range(n)]))
+ return list(zip(*[response[i::n] for i in range(n)]))
def int_or_none(response):
@@ -297,12 +284,6 @@ def int_or_none(response):
return int(response)
-def nativestr_or_none(response):
- if response is None:
- return None
- return nativestr(response)
-
-
def parse_stream_list(response):
if response is None:
return None
@@ -315,12 +296,12 @@ def parse_stream_list(response):
return data
-def pairs_to_dict_with_nativestr_keys(response):
+def pairs_to_dict_with_str_keys(response):
return pairs_to_dict(response, decode_keys=True)
def parse_list_of_dicts(response):
- return list(imap(pairs_to_dict_with_nativestr_keys, response))
+ return list(map(pairs_to_dict_with_str_keys, response))
def parse_xclaim(response, **options):
@@ -349,7 +330,7 @@ def parse_xread(response):
def parse_xpending(response, **options):
if options.get('parse_detail', False):
return parse_xpending_range(response)
- consumers = [{'name': n, 'pending': long(p)} for n, p in response[3] or []]
+ consumers = [{'name': n, 'pending': int(p)} for n, p in response[3] or []]
return {
'pending': response[0],
'min': response[1],
@@ -360,7 +341,7 @@ def parse_xpending(response, **options):
def parse_xpending_range(response):
k = ('message_id', 'consumer', 'time_since_delivered', 'times_delivered')
- return [dict(izip(k, r)) for r in response]
+ return [dict(zip(k, r)) for r in response]
def float_or_none(response):
@@ -370,7 +351,7 @@ def float_or_none(response):
def bool_ok(response):
- return nativestr(response) == 'OK'
+ return str_if_bytes(response) == 'OK'
def parse_zadd(response, **options):
@@ -383,32 +364,32 @@ def parse_zadd(response, **options):
def parse_client_list(response, **options):
clients = []
- for c in nativestr(response).splitlines():
+ for c in str_if_bytes(response).splitlines():
# Values might contain '='
clients.append(dict(pair.split('=', 1) for pair in c.split(' ')))
return clients
def parse_config_get(response, **options):
- response = [nativestr(i) if i is not None else None for i in response]
+ response = [str_if_bytes(i) if i is not None else None for i in response]
return response and pairs_to_dict(response) or {}
def parse_scan(response, **options):
cursor, r = response
- return long(cursor), r
+ return int(cursor), r
def parse_hscan(response, **options):
cursor, r = response
- return long(cursor), r and pairs_to_dict(r) or {}
+ return int(cursor), r and pairs_to_dict(r) or {}
def parse_zscan(response, **options):
score_cast_func = options.get('score_cast_func', float)
cursor, r = response
it = iter(r)
- return long(cursor), list(izip(it, imap(score_cast_func, it)))
+ return int(cursor), list(zip(it, map(score_cast_func, it)))
def parse_slowlog_get(response, **options):
@@ -422,7 +403,7 @@ def parse_slowlog_get(response, **options):
def parse_cluster_info(response, **options):
- response = nativestr(response)
+ response = str_if_bytes(response)
return dict(line.split(':') for line in response.splitlines() if line)
@@ -445,10 +426,7 @@ def _parse_node_line(line):
def parse_cluster_nodes(response, **options):
- response = nativestr(response)
- raw_lines = response
- if isinstance(response, basestring):
- raw_lines = response.splitlines()
+ raw_lines = str_if_bytes(response).splitlines()
return dict(_parse_node_line(line) for line in raw_lines)
@@ -488,9 +466,9 @@ def parse_pubsub_numsub(response, **options):
def parse_client_kill(response, **options):
- if isinstance(response, (long, int)):
- return int(response)
- return nativestr(response) == 'OK'
+ if isinstance(response, int):
+ return response
+ return str_if_bytes(response) == 'OK'
def parse_acl_getuser(response, **options):
@@ -499,9 +477,9 @@ def parse_acl_getuser(response, **options):
data = pairs_to_dict(response, decode_keys=True)
# convert everything but user-defined data in 'keys' to native strings
- data['flags'] = list(map(nativestr, data['flags']))
- data['passwords'] = list(map(nativestr, data['passwords']))
- data['commands'] = nativestr(data['commands'])
+ data['flags'] = list(map(str_if_bytes, data['flags']))
+ data['passwords'] = list(map(str_if_bytes, data['passwords']))
+ data['commands'] = str_if_bytes(data['commands'])
# split 'commands' into separate 'categories' and 'commands' lists
commands, categories = [], []
@@ -523,7 +501,7 @@ def parse_module_result(response):
return True
-class Redis(object):
+class Redis:
"""
Implementation of the Redis protocol.
@@ -533,13 +511,13 @@ class Redis(object):
Connection and Pipeline derive from this, implementing how
the commands are sent and received to the Redis server
"""
- RESPONSE_CALLBACKS = dict_merge(
- string_keys_to_dict(
+ RESPONSE_CALLBACKS = {
+ **string_keys_to_dict(
'AUTH EXPIRE EXPIREAT HEXISTS HMSET MOVE MSETNX PERSIST '
'PSETEX RENAMENX SISMEMBER SMOVE SETEX SETNX',
bool
),
- string_keys_to_dict(
+ **string_keys_to_dict(
'BITCOUNT BITPOS DECRBY DEL EXISTS GEOADD GETBIT HDEL HLEN '
'HSTRLEN INCRBY LINSERT LLEN LPUSHX PFADD PFCOUNT RPUSHX SADD '
'SCARD SDIFFSTORE SETBIT SETRANGE SINTERSTORE SREM STRLEN '
@@ -547,128 +525,126 @@ class Redis(object):
'ZREMRANGEBYLEX ZREMRANGEBYRANK ZREMRANGEBYSCORE',
int
),
- string_keys_to_dict(
+ **string_keys_to_dict(
'INCRBYFLOAT HINCRBYFLOAT',
float
),
- string_keys_to_dict(
+ **string_keys_to_dict(
# these return OK, or int if redis-server is >=1.3.4
'LPUSH RPUSH',
- lambda r: isinstance(r, (long, int)) and r or nativestr(r) == 'OK'
+ lambda r: isinstance(r, int) and r or str_if_bytes(r) == 'OK'
),
- string_keys_to_dict('SORT', sort_return_tuples),
- string_keys_to_dict('ZSCORE ZINCRBY GEODIST', float_or_none),
- string_keys_to_dict(
+ **string_keys_to_dict('SORT', sort_return_tuples),
+ **string_keys_to_dict('ZSCORE ZINCRBY GEODIST', float_or_none),
+ **string_keys_to_dict(
'FLUSHALL FLUSHDB LSET LTRIM MSET PFMERGE READONLY READWRITE '
'RENAME SAVE SELECT SHUTDOWN SLAVEOF SWAPDB WATCH UNWATCH ',
bool_ok
),
- string_keys_to_dict('BLPOP BRPOP', lambda r: r and tuple(r) or None),
- string_keys_to_dict(
+ **string_keys_to_dict('BLPOP BRPOP', lambda r: r and tuple(r) or None),
+ **string_keys_to_dict(
'SDIFF SINTER SMEMBERS SUNION',
lambda r: r and set(r) or set()
),
- string_keys_to_dict(
+ **string_keys_to_dict(
'ZPOPMAX ZPOPMIN ZRANGE ZRANGEBYSCORE ZREVRANGE ZREVRANGEBYSCORE',
zset_score_pairs
),
- string_keys_to_dict('BZPOPMIN BZPOPMAX', \
- lambda r: r and (r[0], r[1], float(r[2])) or None),
- string_keys_to_dict('ZRANK ZREVRANK', int_or_none),
- string_keys_to_dict('XREVRANGE XRANGE', parse_stream_list),
- string_keys_to_dict('XREAD XREADGROUP', parse_xread),
- string_keys_to_dict('BGREWRITEAOF BGSAVE', lambda r: True),
- {
- 'ACL CAT': lambda r: list(map(nativestr, r)),
- 'ACL DELUSER': int,
- 'ACL GENPASS': nativestr,
- 'ACL GETUSER': parse_acl_getuser,
- 'ACL LIST': lambda r: list(map(nativestr, r)),
- 'ACL LOAD': bool_ok,
- 'ACL SAVE': bool_ok,
- 'ACL SETUSER': bool_ok,
- 'ACL USERS': lambda r: list(map(nativestr, r)),
- 'ACL WHOAMI': nativestr,
- 'CLIENT GETNAME': lambda r: r and nativestr(r),
- 'CLIENT ID': int,
- 'CLIENT KILL': parse_client_kill,
- 'CLIENT LIST': parse_client_list,
- 'CLIENT SETNAME': bool_ok,
- 'CLIENT UNBLOCK': lambda r: r and int(r) == 1 or False,
- 'CLIENT PAUSE': bool_ok,
- 'CLUSTER ADDSLOTS': bool_ok,
- 'CLUSTER COUNT-FAILURE-REPORTS': lambda x: int(x),
- 'CLUSTER COUNTKEYSINSLOT': lambda x: int(x),
- 'CLUSTER DELSLOTS': bool_ok,
- 'CLUSTER FAILOVER': bool_ok,
- 'CLUSTER FORGET': bool_ok,
- 'CLUSTER INFO': parse_cluster_info,
- 'CLUSTER KEYSLOT': lambda x: int(x),
- 'CLUSTER MEET': bool_ok,
- 'CLUSTER NODES': parse_cluster_nodes,
- 'CLUSTER REPLICATE': bool_ok,
- 'CLUSTER RESET': bool_ok,
- 'CLUSTER SAVECONFIG': bool_ok,
- 'CLUSTER SET-CONFIG-EPOCH': bool_ok,
- 'CLUSTER SETSLOT': bool_ok,
- 'CLUSTER SLAVES': parse_cluster_nodes,
- 'CONFIG GET': parse_config_get,
- 'CONFIG RESETSTAT': bool_ok,
- 'CONFIG SET': bool_ok,
- 'DEBUG OBJECT': parse_debug_object,
- 'GEOHASH': lambda r: list(map(nativestr_or_none, r)),
- 'GEOPOS': lambda r: list(map(lambda ll: (float(ll[0]),
- float(ll[1]))
- if ll is not None else None, r)),
- 'GEORADIUS': parse_georadius_generic,
- 'GEORADIUSBYMEMBER': parse_georadius_generic,
- 'HGETALL': lambda r: r and pairs_to_dict(r) or {},
- 'HSCAN': parse_hscan,
- 'INFO': parse_info,
- 'LASTSAVE': timestamp_to_datetime,
- 'MEMORY PURGE': bool_ok,
- 'MEMORY STATS': parse_memory_stats,
- 'MEMORY USAGE': int_or_none,
- 'OBJECT': parse_object,
- 'PING': lambda r: nativestr(r) == 'PONG',
- 'PUBSUB NUMSUB': parse_pubsub_numsub,
- 'RANDOMKEY': lambda r: r and r or None,
- 'SCAN': parse_scan,
- 'SCRIPT EXISTS': lambda r: list(imap(bool, r)),
- 'SCRIPT FLUSH': bool_ok,
- 'SCRIPT KILL': bool_ok,
- 'SCRIPT LOAD': nativestr,
- 'SENTINEL GET-MASTER-ADDR-BY-NAME': parse_sentinel_get_master,
- 'SENTINEL MASTER': parse_sentinel_master,
- 'SENTINEL MASTERS': parse_sentinel_masters,
- 'SENTINEL MONITOR': bool_ok,
- 'SENTINEL REMOVE': bool_ok,
- 'SENTINEL SENTINELS': parse_sentinel_slaves_and_sentinels,
- 'SENTINEL SET': bool_ok,
- 'SENTINEL SLAVES': parse_sentinel_slaves_and_sentinels,
- 'SET': lambda r: r and nativestr(r) == 'OK',
- 'SLOWLOG GET': parse_slowlog_get,
- 'SLOWLOG LEN': int,
- 'SLOWLOG RESET': bool_ok,
- 'SSCAN': parse_scan,
- 'TIME': lambda x: (int(x[0]), int(x[1])),
- 'XCLAIM': parse_xclaim,
- 'XGROUP CREATE': bool_ok,
- 'XGROUP DELCONSUMER': int,
- 'XGROUP DESTROY': bool,
- 'XGROUP SETID': bool_ok,
- 'XINFO CONSUMERS': parse_list_of_dicts,
- 'XINFO GROUPS': parse_list_of_dicts,
- 'XINFO STREAM': parse_xinfo_stream,
- 'XPENDING': parse_xpending,
- 'ZADD': parse_zadd,
- 'ZSCAN': parse_zscan,
- 'MODULE LOAD': parse_module_result,
- 'MODULE UNLOAD': parse_module_result,
- 'MODULE LIST': lambda response: [pairs_to_dict(module)
- for module in response],
- }
- )
+ **string_keys_to_dict('BZPOPMIN BZPOPMAX', \
+ lambda r:
+ r and (r[0], r[1], float(r[2])) or None),
+ **string_keys_to_dict('ZRANK ZREVRANK', int_or_none),
+ **string_keys_to_dict('XREVRANGE XRANGE', parse_stream_list),
+ **string_keys_to_dict('XREAD XREADGROUP', parse_xread),
+ **string_keys_to_dict('BGREWRITEAOF BGSAVE', lambda r: True),
+ 'ACL CAT': lambda r: list(map(str_if_bytes, r)),
+ 'ACL DELUSER': int,
+ 'ACL GENPASS': str_if_bytes,
+ 'ACL GETUSER': parse_acl_getuser,
+ 'ACL LIST': lambda r: list(map(str_if_bytes, r)),
+ 'ACL LOAD': bool_ok,
+ 'ACL SAVE': bool_ok,
+ 'ACL SETUSER': bool_ok,
+ 'ACL USERS': lambda r: list(map(str_if_bytes, r)),
+ 'ACL WHOAMI': str_if_bytes,
+ 'CLIENT GETNAME': str_if_bytes,
+ 'CLIENT ID': int,
+ 'CLIENT KILL': parse_client_kill,
+ 'CLIENT LIST': parse_client_list,
+ 'CLIENT SETNAME': bool_ok,
+ 'CLIENT UNBLOCK': lambda r: r and int(r) == 1 or False,
+ 'CLIENT PAUSE': bool_ok,
+ 'CLUSTER ADDSLOTS': bool_ok,
+ 'CLUSTER COUNT-FAILURE-REPORTS': lambda x: int(x),
+ 'CLUSTER COUNTKEYSINSLOT': lambda x: int(x),
+ 'CLUSTER DELSLOTS': bool_ok,
+ 'CLUSTER FAILOVER': bool_ok,
+ 'CLUSTER FORGET': bool_ok,
+ 'CLUSTER INFO': parse_cluster_info,
+ 'CLUSTER KEYSLOT': lambda x: int(x),
+ 'CLUSTER MEET': bool_ok,
+ 'CLUSTER NODES': parse_cluster_nodes,
+ 'CLUSTER REPLICATE': bool_ok,
+ 'CLUSTER RESET': bool_ok,
+ 'CLUSTER SAVECONFIG': bool_ok,
+ 'CLUSTER SET-CONFIG-EPOCH': bool_ok,
+ 'CLUSTER SETSLOT': bool_ok,
+ 'CLUSTER SLAVES': parse_cluster_nodes,
+ 'CONFIG GET': parse_config_get,
+ 'CONFIG RESETSTAT': bool_ok,
+ 'CONFIG SET': bool_ok,
+ 'DEBUG OBJECT': parse_debug_object,
+ 'GEOHASH': lambda r: list(map(str_if_bytes, r)),
+ 'GEOPOS': lambda r: list(map(lambda ll: (float(ll[0]),
+ float(ll[1]))
+ if ll is not None else None, r)),
+ 'GEORADIUS': parse_georadius_generic,
+ 'GEORADIUSBYMEMBER': parse_georadius_generic,
+ 'HGETALL': lambda r: r and pairs_to_dict(r) or {},
+ 'HSCAN': parse_hscan,
+ 'INFO': parse_info,
+ 'LASTSAVE': timestamp_to_datetime,
+ 'MEMORY PURGE': bool_ok,
+ 'MEMORY STATS': parse_memory_stats,
+ 'MEMORY USAGE': int_or_none,
+ 'MODULE LOAD': parse_module_result,
+ 'MODULE UNLOAD': parse_module_result,
+ 'MODULE LIST': lambda r: [pairs_to_dict(m) for m in r],
+ 'OBJECT': parse_object,
+ 'PING': lambda r: str_if_bytes(r) == 'PONG',
+ 'PUBSUB NUMSUB': parse_pubsub_numsub,
+ 'RANDOMKEY': lambda r: r and r or None,
+ 'SCAN': parse_scan,
+ 'SCRIPT EXISTS': lambda r: list(map(bool, r)),
+ 'SCRIPT FLUSH': bool_ok,
+ 'SCRIPT KILL': bool_ok,
+ 'SCRIPT LOAD': str_if_bytes,
+ 'SENTINEL GET-MASTER-ADDR-BY-NAME': parse_sentinel_get_master,
+ 'SENTINEL MASTER': parse_sentinel_master,
+ 'SENTINEL MASTERS': parse_sentinel_masters,
+ 'SENTINEL MONITOR': bool_ok,
+ 'SENTINEL REMOVE': bool_ok,
+ 'SENTINEL SENTINELS': parse_sentinel_slaves_and_sentinels,
+ 'SENTINEL SET': bool_ok,
+ 'SENTINEL SLAVES': parse_sentinel_slaves_and_sentinels,
+ 'SET': lambda r: r and str_if_bytes(r) == 'OK',
+ 'SLOWLOG GET': parse_slowlog_get,
+ 'SLOWLOG LEN': int,
+ 'SLOWLOG RESET': bool_ok,
+ 'SSCAN': parse_scan,
+ 'TIME': lambda x: (int(x[0]), int(x[1])),
+ 'XCLAIM': parse_xclaim,
+ 'XGROUP CREATE': bool_ok,
+ 'XGROUP DELCONSUMER': int,
+ 'XGROUP DESTROY': bool,
+ 'XGROUP SETID': bool_ok,
+ 'XINFO CONSUMERS': parse_list_of_dicts,
+ 'XINFO GROUPS': parse_list_of_dicts,
+ 'XINFO STREAM': parse_xinfo_stream,
+ 'XPENDING': parse_xpending,
+ 'ZADD': parse_zadd,
+ 'ZSCAN': parse_zscan,
+ }
@classmethod
def from_url(cls, url, db=None, **kwargs):
@@ -1233,7 +1209,7 @@ class Redis(object):
Suspend all the Redis clients for the specified amount of time
:param timeout: milliseconds to pause clients
"""
- if not isinstance(timeout, (int, long)):
+ if not isinstance(timeout, int):
raise DataError("CLIENT PAUSE timeout must be an integer")
return self.execute_command('CLIENT PAUSE', str(timeout))
@@ -1688,7 +1664,7 @@ class Redis(object):
can be cast to a string via str().
"""
items = []
- for pair in iteritems(mapping):
+ for pair in mapping.items():
items.extend(pair)
return self.execute_command('MSET', *items)
@@ -1700,7 +1676,7 @@ class Redis(object):
Returns a boolean indicating if the operation was successful.
"""
items = []
- for pair in iteritems(mapping):
+ for pair in mapping.items():
items.extend(pair)
return self.execute_command('MSETNX', *items)
@@ -2109,7 +2085,7 @@ class Redis(object):
# Otherwise assume it's an interable and we want to get multiple
# values. We can't just iterate blindly because strings are
# iterable.
- if isinstance(get, (bytes, basestring)):
+ if isinstance(get, (bytes, str)):
pieces.append(b'GET')
pieces.append(get)
else:
@@ -2125,7 +2101,7 @@ class Redis(object):
pieces.append(store)
if groups:
- if not get or isinstance(get, (bytes, basestring)) or len(get) < 2:
+ if not get or isinstance(get, (bytes, str)) or len(get) < 2:
raise DataError('when using "groups" the "get" argument '
'must be specified and contain at least '
'two keys')
@@ -2177,8 +2153,7 @@ class Redis(object):
while cursor != 0:
cursor, data = self.scan(cursor=cursor, match=match,
count=count, _type=_type)
- for item in data:
- yield item
+ yield from data
def sscan(self, name, cursor=0, match=None, count=None):
"""
@@ -2209,8 +2184,7 @@ class Redis(object):
while cursor != 0:
cursor, data = self.sscan(name, cursor=cursor,
match=match, count=count)
- for item in data:
- yield item
+ yield from data
def hscan(self, name, cursor=0, match=None, count=None):
"""
@@ -2241,8 +2215,7 @@ class Redis(object):
while cursor != 0:
cursor, data = self.hscan(name, cursor=cursor,
match=match, count=count)
- for item in data.items():
- yield item
+ yield from data.items()
def zscan(self, name, cursor=0, match=None, count=None,
score_cast_func=float):
@@ -2281,8 +2254,7 @@ class Redis(object):
cursor, data = self.zscan(name, cursor=cursor, match=match,
count=count,
score_cast_func=score_cast_func)
- for item in data:
- yield item
+ yield from data
# SET COMMANDS
def sadd(self, name, *values):
@@ -2386,7 +2358,7 @@ class Redis(object):
"""
pieces = []
if maxlen is not None:
- if not isinstance(maxlen, (int, long)) or maxlen < 1:
+ if not isinstance(maxlen, int) or maxlen < 1:
raise DataError('XADD maxlen must be a positive integer')
pieces.append(b'MAXLEN')
if approximate:
@@ -2395,7 +2367,7 @@ class Redis(object):
pieces.append(id)
if not isinstance(fields, dict) or len(fields) == 0:
raise DataError('XADD fields must be a non-empty dict')
- for pair in iteritems(fields):
+ for pair in fields.items():
pieces.extend(pair)
return self.execute_command('XADD', name, *pieces)
@@ -2424,7 +2396,7 @@ class Redis(object):
justid: optional boolean, false by default. Return just an array of IDs
of messages successfully claimed, without returning the actual message
"""
- if not isinstance(min_idle_time, (int, long)) or min_idle_time < 0:
+ if not isinstance(min_idle_time, int) or min_idle_time < 0:
raise DataError("XCLAIM min_idle_time must be a non negative "
"integer")
if not isinstance(message_ids, (list, tuple)) or not message_ids:
@@ -2436,15 +2408,15 @@ class Redis(object):
pieces.extend(list(message_ids))
if idle is not None:
- if not isinstance(idle, (int, long)):
+ if not isinstance(idle, int):
raise DataError("XCLAIM idle must be an integer")
pieces.extend((b'IDLE', str(idle)))
if time is not None:
- if not isinstance(time, (int, long)):
+ if not isinstance(time, int):
raise DataError("XCLAIM time must be an integer")
pieces.extend((b'TIME', str(time)))
if retrycount is not None:
- if not isinstance(retrycount, (int, long)):
+ if not isinstance(retrycount, int):
raise DataError("XCLAIM retrycount must be an integer")
pieces.extend((b'RETRYCOUNT', str(retrycount)))
@@ -2560,7 +2532,7 @@ class Redis(object):
if min is None or max is None or count is None:
raise DataError("XPENDING must be provided with min, max "
"and count parameters, or none of them. ")
- if not isinstance(count, (int, long)) or count < -1:
+ if not isinstance(count, int) or count < -1:
raise DataError("XPENDING count must be a integer >= -1")
pieces.extend((min, max, str(count)))
if consumername is not None:
@@ -2584,7 +2556,7 @@ class Redis(object):
"""
pieces = [min, max]
if count is not None:
- if not isinstance(count, (int, long)) or count < 1:
+ if not isinstance(count, int) or count < 1:
raise DataError('XRANGE count must be a positive integer')
pieces.append(b'COUNT')
pieces.append(str(count))
@@ -2602,19 +2574,19 @@ class Redis(object):
"""
pieces = []
if block is not None:
- if not isinstance(block, (int, long)) or block < 0:
+ if not isinstance(block, int) or block < 0:
raise DataError('XREAD block must be a non-negative integer')
pieces.append(b'BLOCK')
pieces.append(str(block))
if count is not None:
- if not isinstance(count, (int, long)) or count < 1:
+ if not isinstance(count, int) or count < 1:
raise DataError('XREAD count must be a positive integer')
pieces.append(b'COUNT')
pieces.append(str(count))
if not isinstance(streams, dict) or len(streams) == 0:
raise DataError('XREAD streams must be a non empty dict')
pieces.append(b'STREAMS')
- keys, values = izip(*iteritems(streams))
+ keys, values = zip(*streams.items())
pieces.extend(keys)
pieces.extend(values)
return self.execute_command('XREAD', *pieces)
@@ -2634,12 +2606,12 @@ class Redis(object):
"""
pieces = [b'GROUP', groupname, consumername]
if count is not None:
- if not isinstance(count, (int, long)) or count < 1:
+ if not isinstance(count, int) or count < 1:
raise DataError("XREADGROUP count must be a positive integer")
pieces.append(b'COUNT')
pieces.append(str(count))
if block is not None:
- if not isinstance(block, (int, long)) or block < 0:
+ if not isinstance(block, int) or block < 0:
raise DataError("XREADGROUP block must be a non-negative "
"integer")
pieces.append(b'BLOCK')
@@ -2666,7 +2638,7 @@ class Redis(object):
"""
pieces = [max, min]
if count is not None:
- if not isinstance(count, (int, long)) or count < 1:
+ if not isinstance(count, int) or count < 1:
raise DataError('XREVRANGE count must be a positive integer')
pieces.append(b'COUNT')
pieces.append(str(count))
@@ -2729,7 +2701,7 @@ class Redis(object):
if incr:
pieces.append(b'INCR')
options['as_score'] = True
- for pair in iteritems(mapping):
+ for pair in mapping.items():
pieces.append(pair[1])
pieces.append(pair[0])
return self.execute_command('ZADD', name, *pieces, **options)
@@ -3015,7 +2987,7 @@ class Redis(object):
def _zaggregate(self, command, dest, keys, aggregate=None):
pieces = [command, dest, len(keys)]
if isinstance(keys, dict):
- keys, weights = iterkeys(keys), itervalues(keys)
+ keys, weights = keys.keys(), keys.values()
else:
weights = None
pieces.extend(keys)
@@ -3117,7 +3089,7 @@ class Redis(object):
if not mapping:
raise DataError("'hmset' with 'mapping' of length 0")
items = []
- for pair in iteritems(mapping):
+ for pair in mapping.items():
items.extend(pair)
return self.execute_command('HMSET', name, *items)
@@ -3379,7 +3351,7 @@ class Redis(object):
StrictRedis = Redis
-class Monitor(object):
+class Monitor:
"""
Monitor is useful for handling the MONITOR command to the redis server.
next_command() method returns one command from monitor
@@ -3445,7 +3417,7 @@ class Monitor(object):
yield self.next_command()
-class PubSub(object):
+class PubSub:
"""
PubSub provides publish, subscribe and listen support to Redis channels.
@@ -3513,12 +3485,12 @@ class PubSub(object):
self.pending_unsubscribe_patterns.clear()
if self.channels:
channels = {}
- for k, v in iteritems(self.channels):
+ for k, v in self.channels.items():
channels[self.encoder.decode(k, force=True)] = v
self.subscribe(**channels)
if self.patterns:
patterns = {}
- for k, v in iteritems(self.patterns):
+ for k, v in self.patterns.items():
patterns[self.encoder.decode(k, force=True)] = v
self.psubscribe(**patterns)
@@ -3601,7 +3573,7 @@ class PubSub(object):
"""
encode = self.encoder.encode
decode = self.encoder.decode
- return {decode(encode(k)): v for k, v in iteritems(data)}
+ return {decode(encode(k)): v for k, v in data.items()}
def psubscribe(self, *args, **kwargs):
"""
@@ -3615,7 +3587,7 @@ class PubSub(object):
args = list_or_args(args[0], args[1:])
new_patterns = dict.fromkeys(args)
new_patterns.update(kwargs)
- ret_val = self.execute_command('PSUBSCRIBE', *iterkeys(new_patterns))
+ ret_val = self.execute_command('PSUBSCRIBE', *new_patterns.keys())
# update the patterns dict AFTER we send the command. we don't want to
# subscribe twice to these patterns, once for the command and again
# for the reconnection.
@@ -3649,7 +3621,7 @@ class PubSub(object):
args = list_or_args(args[0], args[1:])
new_channels = dict.fromkeys(args)
new_channels.update(kwargs)
- ret_val = self.execute_command('SUBSCRIBE', *iterkeys(new_channels))
+ ret_val = self.execute_command('SUBSCRIBE', *new_channels.keys())
# update the channels dict AFTER we send the command. we don't want to
# subscribe twice to these channels, once for the command and again
# for the reconnection.
@@ -3704,7 +3676,7 @@ class PubSub(object):
with a message handler, the handler is invoked instead of a parsed
message being returned.
"""
- message_type = nativestr(response[0])
+ message_type = str_if_bytes(response[0])
if message_type == 'pmessage':
message = {
'type': message_type,
@@ -3758,11 +3730,11 @@ class PubSub(object):
return message
def run_in_thread(self, sleep_time=0, daemon=False):
- for channel, handler in iteritems(self.channels):
+ for channel, handler in self.channels.items():
if handler is None:
raise PubSubError("Channel: '%s' has no handler registered" %
channel)
- for pattern, handler in iteritems(self.patterns):
+ for pattern, handler in self.patterns.items():
if handler is None:
raise PubSubError("Pattern: '%s' has no handler registered" %
pattern)
@@ -3774,7 +3746,7 @@ class PubSub(object):
class PubSubWorkerThread(threading.Thread):
def __init__(self, pubsub, sleep_time, daemon=False):
- super(PubSubWorkerThread, self).__init__()
+ super().__init__()
self.daemon = daemon
self.pubsub = pubsub
self.sleep_time = sleep_time
@@ -3845,12 +3817,8 @@ class Pipeline(Redis):
def __len__(self):
return len(self.command_stack)
- def __nonzero__(self):
- "Pipeline instances should always evaluate to True on Python 2.7"
- return True
-
def __bool__(self):
- "Pipeline instances should always evaluate to True on Python 3+"
+ "Pipeline instances should always evaluate to True"
return True
def reset(self):
@@ -4007,7 +3975,7 @@ class Pipeline(Redis):
# We have to run response callbacks manually
data = []
- for r, cmd in izip(response, commands):
+ for r, cmd in zip(response, commands):
if not isinstance(r, Exception):
args, options = cmd
command_name = args[0]
@@ -4040,9 +4008,9 @@ class Pipeline(Redis):
raise r
def annotate_exception(self, exception, number, command):
- cmd = ' '.join(imap(safe_unicode, command))
+ cmd = ' '.join(map(safe_str, command))
msg = 'Command # %d (%s) of pipeline caused error: %s' % (
- number, cmd, safe_unicode(exception.args[0]))
+ number, cmd, exception.args[0])
exception.args = (msg,) + exception.args[1:]
def parse_response(self, connection, command_name, **options):
@@ -4063,7 +4031,7 @@ class Pipeline(Redis):
# get buffered in the pipeline.
exists = immediate('SCRIPT EXISTS', *shas)
if not all(exists):
- for s, exist in izip(scripts, exists):
+ for s, exist in zip(scripts, exists):
if not exist:
s.sha = immediate('SCRIPT LOAD', s.script)
@@ -4117,7 +4085,7 @@ class Pipeline(Redis):
return self.watching and self.execute_command('UNWATCH') or True
-class Script(object):
+class Script:
"An executable Lua script object returned by ``register_script``"
def __init__(self, registered_client, script):
@@ -4125,7 +4093,7 @@ class Script(object):
self.script = script
# Precalculate and store the SHA1 hex digest of the script.
- if isinstance(script, basestring):
+ if isinstance(script, str):
# We need the encoding from the client in order to generate an
# accurate byte representation of the script
encoder = registered_client.connection_pool.get_encoder()
@@ -4151,7 +4119,7 @@ class Script(object):
return client.evalsha(self.sha, len(keys), *args)
-class BitFieldOperation(object):
+class BitFieldOperation:
"""
Command builder for BITFIELD commands.
"""
diff --git a/redis/connection.py b/redis/connection.py
index 22d3902..a29f9b2 100755
--- a/redis/connection.py
+++ b/redis/connection.py
@@ -1,7 +1,8 @@
-from __future__ import unicode_literals
from distutils.version import StrictVersion
from itertools import chain
from time import time
+from queue import LifoQueue, Empty, Full
+from urllib.parse import parse_qs, unquote, urlparse
import errno
import io
import os
@@ -9,11 +10,6 @@ import socket
import threading
import warnings
-from redis._compat import (xrange, imap, unicode, long,
- nativestr, basestring, iteritems,
- LifoQueue, Empty, Full, urlparse, parse_qs,
- recv, recv_into, unquote, BlockingIOError,
- sendall, shutdown, ssl_wrap_socket)
from redis.exceptions import (
AuthenticationError,
AuthenticationWrongNumberOfArgsError,
@@ -31,7 +27,7 @@ from redis.exceptions import (
TimeoutError,
ModuleError,
)
-from redis.utils import HIREDIS_AVAILABLE
+from redis.utils import HIREDIS_AVAILABLE, str_if_bytes
try:
import ssl
@@ -50,16 +46,6 @@ if ssl_available:
else:
NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLError] = 2
-# In Python 2.7 a socket.error is raised for a nonblocking read.
-# The _compat module aliases BlockingIOError to socket.error to be
-# Python 2/3 compatible.
-# However this means that all socket.error exceptions need to be handled
-# properly within these exception handlers.
-# We need to make sure socket.error is included in these handlers and
-# provide a dummy error number that will never match a real exception.
-if socket.error not in NONBLOCKING_EXCEPTION_ERROR_NUMBERS:
- NONBLOCKING_EXCEPTION_ERROR_NUMBERS[socket.error] = -999999
-
NONBLOCKING_EXCEPTIONS = tuple(NONBLOCKING_EXCEPTION_ERROR_NUMBERS.keys())
if HIREDIS_AVAILABLE:
@@ -101,7 +87,7 @@ MODULE_EXPORTS_DATA_TYPES_ERROR = "Error unloading module: the module " \
"types, can't unload"
-class Encoder(object):
+class Encoder:
"Encode strings to bytes-like and decode bytes-like to strings"
def __init__(self, encoding, encoding_errors, decode_responses):
@@ -117,17 +103,14 @@ class Encoder(object):
# special case bool since it is a subclass of int
raise DataError("Invalid input of type: 'bool'. Convert to a "
"bytes, string, int or float first.")
- elif isinstance(value, float):
+ elif isinstance(value, (int, float)):
value = repr(value).encode()
- elif isinstance(value, (int, long)):
- # python 2 repr() on longs is '123L', so use str() instead
- value = str(value).encode()
- elif not isinstance(value, basestring):
+ elif not isinstance(value, str):
# a value we don't know how to deal with. throw an error
typename = type(value).__name__
raise DataError("Invalid input of type: '%s'. Convert to a "
"bytes, string, int or float first." % typename)
- if isinstance(value, unicode):
+ if isinstance(value, str):
value = value.encode(self.encoding, self.encoding_errors)
return value
@@ -141,7 +124,7 @@ class Encoder(object):
return value
-class BaseParser(object):
+class BaseParser:
EXCEPTION_CLASSES = {
'ERR': {
'max number of clients reached': ConnectionError,
@@ -180,7 +163,7 @@ class BaseParser(object):
return ResponseError(response)
-class SocketBuffer(object):
+class SocketBuffer:
def __init__(self, socket, socket_read_size, socket_timeout):
self._sock = socket
self.socket_read_size = socket_read_size
@@ -208,7 +191,7 @@ class SocketBuffer(object):
if custom_timeout:
sock.settimeout(timeout)
while True:
- data = recv(self._sock, socket_read_size)
+ data = self._sock.recv(socket_read_size)
# an empty string indicates the server shutdown the socket
if isinstance(data, bytes) and len(data) == 0:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
@@ -345,7 +328,7 @@ class PythonParser(BaseParser):
# server returned an error
if byte == b'-':
- response = nativestr(response)
+ response = response.decode('utf-8', errors='replace')
error = self.parse_error(response)
# if the error is a ConnectionError, raise immediately so the user
# is notified
@@ -361,7 +344,7 @@ class PythonParser(BaseParser):
pass
# int value
elif byte == b':':
- response = long(response)
+ response = int(response)
# bulk response
elif byte == b'$':
length = int(response)
@@ -373,7 +356,7 @@ class PythonParser(BaseParser):
length = int(response)
if length == -1:
return None
- response = [self.read_response() for i in xrange(length)]
+ response = [self.read_response() for i in range(length)]
if isinstance(response, bytes):
response = self.encoder.decode(response)
return response
@@ -437,12 +420,12 @@ class HiredisParser(BaseParser):
if custom_timeout:
sock.settimeout(timeout)
if HIREDIS_USE_BYTE_BUFFER:
- bufflen = recv_into(self._sock, self._buffer)
+ bufflen = self._sock.recv_into(self._buffer)
if bufflen == 0:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
self._reader.feed(self._buffer, 0, bufflen)
else:
- buffer = recv(self._sock, self.socket_read_size)
+ buffer = self._sock.recv(self.socket_read_size)
# an empty string indicates the server shutdown the socket
if not isinstance(buffer, bytes) or len(buffer) == 0:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
@@ -507,7 +490,7 @@ else:
DefaultParser = PythonParser
-class Connection(object):
+class Connection:
"Manages TCP communication to and from a Redis server"
def __init__(self, host='localhost', port=6379, db=0, password=None,
@@ -606,7 +589,7 @@ class Connection(object):
# TCP_KEEPALIVE
if self.socket_keepalive:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
- for k, v in iteritems(self.socket_keepalive_options):
+ for k, v in self.socket_keepalive_options.items():
sock.setsockopt(socket.IPPROTO_TCP, k, v)
# set the socket_connect_timeout before we connect
@@ -619,14 +602,14 @@ class Connection(object):
sock.settimeout(self.socket_timeout)
return sock
- except socket.error as _:
+ except OSError as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
- raise socket.error("socket.getaddrinfo returned an empty list")
+ raise OSError("socket.getaddrinfo returned an empty list")
def _error_message(self, exception):
# args for socket.error can either be (errno, "message")
@@ -662,19 +645,19 @@ class Connection(object):
self.send_command('AUTH', self.password, check_health=False)
auth_response = self.read_response()
- if nativestr(auth_response) != 'OK':
+ if str_if_bytes(auth_response) != 'OK':
raise AuthenticationError('Invalid Username or Password')
# if a client_name is given, set it
if self.client_name:
self.send_command('CLIENT', 'SETNAME', self.client_name)
- if nativestr(self.read_response()) != 'OK':
+ if str_if_bytes(self.read_response()) != 'OK':
raise ConnectionError('Error setting client name')
# if a database is specified, switch to it
if self.db:
self.send_command('SELECT', self.db)
- if nativestr(self.read_response()) != 'OK':
+ if str_if_bytes(self.read_response()) != 'OK':
raise ConnectionError('Invalid Database')
def disconnect(self):
@@ -684,9 +667,9 @@ class Connection(object):
return
try:
if os.getpid() == self.pid:
- shutdown(self._sock, socket.SHUT_RDWR)
+ self._sock.shutdown(socket.SHUT_RDWR)
self._sock.close()
- except socket.error:
+ except OSError:
pass
self._sock = None
@@ -695,13 +678,13 @@ class Connection(object):
if self.health_check_interval and time() > self.next_health_check:
try:
self.send_command('PING', check_health=False)
- if nativestr(self.read_response()) != 'PONG':
+ if str_if_bytes(self.read_response()) != 'PONG':
raise ConnectionError(
'Bad response from PING health check')
except (ConnectionError, TimeoutError):
self.disconnect()
self.send_command('PING', check_health=False)
- if nativestr(self.read_response()) != 'PONG':
+ if str_if_bytes(self.read_response()) != 'PONG':
raise ConnectionError(
'Bad response from PING health check')
@@ -716,7 +699,7 @@ class Connection(object):
if isinstance(command, str):
command = [command]
for item in command:
- sendall(self._sock, item)
+ self._sock.sendall(item)
except socket.timeout:
self.disconnect()
raise TimeoutError("Timeout writing to socket")
@@ -777,7 +760,7 @@ class Connection(object):
# arguments to be sent separately, so split the first argument
# manually. These arguments should be bytestrings so that they are
# not encoded.
- if isinstance(args[0], unicode):
+ if isinstance(args[0], str):
args = tuple(args[0].encode().split()) + args[1:]
elif b' ' in args[0]:
args = tuple(args[0].split()) + args[1:]
@@ -785,7 +768,7 @@ class Connection(object):
buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF))
buffer_cutoff = self._buffer_cutoff
- for arg in imap(self.encoder.encode, args):
+ for arg in map(self.encoder.encode, args):
# to avoid large string mallocs, chunk the command into the
# output list if we're sending large values or memoryviews
arg_length = len(arg)
@@ -838,13 +821,13 @@ class SSLConnection(Connection):
if not ssl_available:
raise RedisError("Python wasn't built with SSL support")
- super(SSLConnection, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.keyfile = ssl_keyfile
self.certfile = ssl_certfile
if ssl_cert_reqs is None:
ssl_cert_reqs = ssl.CERT_NONE
- elif isinstance(ssl_cert_reqs, basestring):
+ elif isinstance(ssl_cert_reqs, str):
CERT_REQS = {
'none': ssl.CERT_NONE,
'optional': ssl.CERT_OPTIONAL,
@@ -861,27 +844,16 @@ class SSLConnection(Connection):
def _connect(self):
"Wrap the socket with SSL support"
- sock = super(SSLConnection, self)._connect()
- if hasattr(ssl, "create_default_context"):
- context = ssl.create_default_context()
- context.check_hostname = self.check_hostname
- context.verify_mode = self.cert_reqs
- if self.certfile and self.keyfile:
- context.load_cert_chain(certfile=self.certfile,
- keyfile=self.keyfile)
- if self.ca_certs:
- context.load_verify_locations(self.ca_certs)
- sock = ssl_wrap_socket(context, sock, server_hostname=self.host)
- else:
- # In case this code runs in a version which is older than 2.7.9,
- # we want to fall back to old code
- sock = ssl_wrap_socket(ssl,
- sock,
- cert_reqs=self.cert_reqs,
- keyfile=self.keyfile,
- certfile=self.certfile,
- ca_certs=self.ca_certs)
- return sock
+ sock = super()._connect()
+ context = ssl.create_default_context()
+ context.check_hostname = self.check_hostname
+ context.verify_mode = self.cert_reqs
+ if self.certfile and self.keyfile:
+ context.load_cert_chain(certfile=self.certfile,
+ keyfile=self.keyfile)
+ if self.ca_certs:
+ context.load_verify_locations(self.ca_certs)
+ return context.wrap_socket(sock, server_hostname=self.host)
class UnixDomainSocketConnection(Connection):
@@ -941,7 +913,7 @@ FALSE_STRINGS = ('0', 'F', 'FALSE', 'N', 'NO')
def to_bool(value):
if value is None or value == '':
return None
- if isinstance(value, basestring) and value.upper() in FALSE_STRINGS:
+ if isinstance(value, str) and value.upper() in FALSE_STRINGS:
return False
return bool(value)
@@ -957,7 +929,7 @@ URL_QUERY_ARGUMENT_PARSERS = {
}
-class ConnectionPool(object):
+class ConnectionPool:
"""
Create a connection pool. ``If max_connections`` is set, then this
object raises :py:class:`~redis.ConnectionError` when the pool's
@@ -1019,7 +991,7 @@ class ConnectionPool(object):
url = urlparse(url)
url_options = {}
- for name, value in iteritems(parse_qs(url.query)):
+ for name, value in parse_qs(url.query).items():
if value and len(value) > 0:
parser = URL_QUERY_ARGUMENT_PARSERS.get(name)
if parser:
@@ -1096,7 +1068,7 @@ class ConnectionPool(object):
def __init__(self, connection_class=Connection, max_connections=None,
**connection_kwargs):
max_connections = max_connections or 2 ** 31
- if not isinstance(max_connections, (int, long)) or max_connections < 0:
+ if not isinstance(max_connections, int) or max_connections < 0:
raise ValueError('"max_connections" must be a positive integer')
self.connection_class = connection_class
@@ -1173,14 +1145,7 @@ class ConnectionPool(object):
# that time it is assumed that the child is deadlocked and a
# redis.ChildDeadlockedError error is raised.
if self.pid != os.getpid():
- # python 2.7 doesn't support a timeout option to lock.acquire()
- # we have to mimic lock timeouts ourselves.
- timeout_at = time() + 5
- acquired = False
- while time() < timeout_at:
- acquired = self._fork_lock.acquire(False)
- if acquired:
- break
+ acquired = self._fork_lock.acquire(timeout=5)
if not acquired:
raise ChildDeadlockedError
# reset() the instance for the new process if another thread
@@ -1323,7 +1288,7 @@ class BlockingConnectionPool(ConnectionPool):
self.queue_class = queue_class
self.timeout = timeout
- super(BlockingConnectionPool, self).__init__(
+ super().__init__(
connection_class=connection_class,
max_connections=max_connections,
**connection_kwargs)
diff --git a/redis/lock.py b/redis/lock.py
index 5c47748..e51f169 100644
--- a/redis/lock.py
+++ b/redis/lock.py
@@ -1,11 +1,11 @@
import threading
import time as mod_time
import uuid
+from types import SimpleNamespace
from redis.exceptions import LockError, LockNotOwnedError
-from redis.utils import dummy
-class Lock(object):
+class Lock:
"""
A shared, distributed Lock. Using Redis for locking allows the Lock
to be shared across processes and/or machines.
@@ -129,7 +129,11 @@ class Lock(object):
self.blocking = blocking
self.blocking_timeout = blocking_timeout
self.thread_local = bool(thread_local)
- self.local = threading.local() if self.thread_local else dummy()
+ self.local = (
+ threading.local()
+ if self.thread_local
+ else SimpleNamespace()
+ )
self.local.token = None
self.register_scripts()
diff --git a/redis/sentinel.py b/redis/sentinel.py
index 203c859..b9d77f1 100644
--- a/redis/sentinel.py
+++ b/redis/sentinel.py
@@ -5,7 +5,7 @@ from redis.client import Redis
from redis.connection import ConnectionPool, Connection
from redis.exceptions import (ConnectionError, ResponseError, ReadOnlyError,
TimeoutError)
-from redis._compat import iteritems, nativestr, xrange
+from redis.utils import str_if_bytes
class MasterNotFoundError(ConnectionError):
@@ -19,7 +19,7 @@ class SlaveNotFoundError(ConnectionError):
class SentinelManagedConnection(Connection):
def __init__(self, **kwargs):
self.connection_pool = kwargs.pop('connection_pool')
- super(SentinelManagedConnection, self).__init__(**kwargs)
+ super().__init__(**kwargs)
def __repr__(self):
pool = self.connection_pool
@@ -31,10 +31,10 @@ class SentinelManagedConnection(Connection):
def connect_to(self, address):
self.host, self.port = address
- super(SentinelManagedConnection, self).connect()
+ super().connect()
if self.connection_pool.check_connection:
self.send_command('PING')
- if nativestr(self.read_response()) != 'PONG':
+ if str_if_bytes(self.read_response()) != 'PONG':
raise ConnectionError('PING failed')
def connect(self):
@@ -52,7 +52,7 @@ class SentinelManagedConnection(Connection):
def read_response(self):
try:
- return super(SentinelManagedConnection, self).read_response()
+ return super().read_response()
except ReadOnlyError:
if self.connection_pool.is_master:
# When talking to a master, a ReadOnlyError when likely
@@ -78,7 +78,7 @@ class SentinelConnectionPool(ConnectionPool):
'connection_class', SentinelManagedConnection)
self.is_master = kwargs.pop('is_master', True)
self.check_connection = kwargs.pop('check_connection', False)
- super(SentinelConnectionPool, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.connection_kwargs['connection_pool'] = weakref.proxy(self)
self.service_name = service_name
self.sentinel_manager = sentinel_manager
@@ -91,7 +91,7 @@ class SentinelConnectionPool(ConnectionPool):
)
def reset(self):
- super(SentinelConnectionPool, self).reset()
+ super().reset()
self.master_address = None
self.slave_rr_counter = None
@@ -119,7 +119,7 @@ class SentinelConnectionPool(ConnectionPool):
if slaves:
if self.slave_rr_counter is None:
self.slave_rr_counter = random.randint(0, len(slaves) - 1)
- for _ in xrange(len(slaves)):
+ for _ in range(len(slaves)):
self.slave_rr_counter = (
self.slave_rr_counter + 1) % len(slaves)
slave = slaves[self.slave_rr_counter]
@@ -132,7 +132,7 @@ class SentinelConnectionPool(ConnectionPool):
raise SlaveNotFoundError('No slave found for %r' % (self.service_name))
-class Sentinel(object):
+class Sentinel:
"""
Redis Sentinel cluster client
@@ -168,7 +168,7 @@ class Sentinel(object):
if sentinel_kwargs is None:
sentinel_kwargs = {
k: v
- for k, v in iteritems(connection_kwargs)
+ for k, v in connection_kwargs.items()
if k.startswith('socket_')
}
self.sentinel_kwargs = sentinel_kwargs
diff --git a/redis/utils.py b/redis/utils.py
index 6ef6fd4..3664708 100644
--- a/redis/utils.py
+++ b/redis/utils.py
@@ -26,8 +26,13 @@ def pipeline(redis_obj):
p.execute()
-class dummy(object):
- """
- Instances of this class can be used as an attribute container.
- """
- pass
+def str_if_bytes(value):
+ return (
+ value.decode('utf-8', errors='replace')
+ if isinstance(value, bytes)
+ else value
+ )
+
+
+def safe_str(value):
+ return str(str_if_bytes(value))
diff --git a/setup.cfg b/setup.cfg
index 430cba0..3fdea4e 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -17,9 +17,8 @@ classifiers =
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
- Programming Language :: Python :: 2
- Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
+ Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
@@ -30,13 +29,10 @@ classifiers =
[options]
packages = redis
-python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+python_requires = >=3.5
[options.extras_require]
hiredis = hiredis>=0.1.3
[flake8]
exclude = .venv,.tox,dist,docs,build,*.egg,redis_install,env,venv,.undodir
-
-[bdist_wheel]
-universal = 1
diff --git a/tests/conftest.py b/tests/conftest.py
index caca8cc..26893db 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,11 +1,9 @@
-import random
-
import pytest
+import random
import redis
-from mock import Mock
-
-from redis._compat import urlparse
from distutils.version import StrictVersion
+from unittest.mock import Mock
+from urllib.parse import urlparse
# redis 6 release candidates report a version number of 5.9.x. Use this
diff --git a/tests/test_commands.py b/tests/test_commands.py
index 91bcbb3..38e2d1a 100644
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -1,13 +1,11 @@
-from __future__ import unicode_literals
import binascii
import datetime
import pytest
import re
import redis
import time
+from string import ascii_letters
-from redis._compat import (unichr, ascii_letters, iteritems, iterkeys,
- itervalues, long, basestring)
from redis.client import parse_info
from redis import exceptions
@@ -44,7 +42,7 @@ def get_stream_message(client, stream, message_id):
# RESPONSE CALLBACKS
-class TestResponseCallbacks(object):
+class TestResponseCallbacks:
"Tests for the response callback system"
def test_response_callbacks(self, r):
@@ -58,7 +56,7 @@ class TestResponseCallbacks(object):
assert r.response_callbacks['del'] == r.response_callbacks['DEL']
-class TestRedisCommands(object):
+class TestRedisCommands:
def test_command_on_invalid_key_type(self, r):
r.lpush('a', '1')
with pytest.raises(redis.ResponseError):
@@ -93,7 +91,7 @@ class TestRedisCommands(object):
@skip_if_server_version_lt(REDIS_6_VERSION)
def test_acl_genpass(self, r):
password = r.acl_genpass()
- assert isinstance(password, basestring)
+ assert isinstance(password, str)
@skip_if_server_version_lt(REDIS_6_VERSION)
def test_acl_getuser_setuser(self, r, request):
@@ -237,7 +235,7 @@ class TestRedisCommands(object):
@skip_if_server_version_lt(REDIS_6_VERSION)
def test_acl_whoami(self, r):
username = r.acl_whoami()
- assert isinstance(username, basestring)
+ assert isinstance(username, str)
def test_client_list(self, r):
clients = r.client_list()
@@ -411,7 +409,7 @@ class TestRedisCommands(object):
def test_slowlog_get(self, r, slowlog):
assert r.slowlog_reset()
- unicode_string = unichr(3456) + 'abcd' + unichr(3421)
+ unicode_string = chr(3456) + 'abcd' + chr(3421)
r.get(unicode_string)
slowlog = r.slowlog_get()
assert isinstance(slowlog, list)
@@ -646,7 +644,7 @@ class TestRedisCommands(object):
assert r.get('a') is None
byte_string = b'value'
integer = 5
- unicode_string = unichr(3456) + 'abcd' + unichr(3421)
+ unicode_string = chr(3456) + 'abcd' + chr(3421)
assert r.set('byte_string', byte_string)
assert r.set('integer', 5)
assert r.set('unicode_string', unicode_string)
@@ -733,7 +731,7 @@ class TestRedisCommands(object):
def test_mset(self, r):
d = {'a': b'1', 'b': b'2', 'c': b'3'}
assert r.mset(d)
- for k, v in iteritems(d):
+ for k, v in d.items():
assert r[k] == v
def test_msetnx(self, r):
@@ -741,7 +739,7 @@ class TestRedisCommands(object):
assert r.msetnx(d)
d2 = {'a': b'x', 'd': b'4'}
assert not r.msetnx(d2)
- for k, v in iteritems(d):
+ for k, v in d.items():
assert r[k] == v
assert r.get('d') is None
@@ -1692,7 +1690,7 @@ class TestRedisCommands(object):
def test_hkeys(self, r):
h = {b'a1': b'1', b'a2': b'2', b'a3': b'3'}
r.hset('a', mapping=h)
- local_keys = list(iterkeys(h))
+ local_keys = list(h.keys())
remote_keys = r.hkeys('a')
assert (sorted(local_keys) == sorted(remote_keys))
@@ -1722,7 +1720,7 @@ class TestRedisCommands(object):
def test_hvals(self, r):
h = {b'a1': b'1', b'a2': b'2', b'a3': b'3'}
r.hset('a', mapping=h)
- local_vals = list(itervalues(h))
+ local_vals = list(h.values())
remote_vals = r.hvals('a')
assert sorted(local_vals) == sorted(remote_vals)
@@ -2320,8 +2318,8 @@ class TestRedisCommands(object):
]
# we can't determine the idle time, so just make sure it's an int
- assert isinstance(info[0].pop('idle'), (int, long))
- assert isinstance(info[1].pop('idle'), (int, long))
+ assert isinstance(info[0].pop('idle'), int)
+ assert isinstance(info[1].pop('idle'), int)
assert info == expected
@skip_if_server_version_lt('5.0.0')
@@ -2650,7 +2648,7 @@ class TestRedisCommands(object):
r.set('foo', 'bar')
stats = r.memory_stats()
assert isinstance(stats, dict)
- for key, value in iteritems(stats):
+ for key, value in stats.items():
if key.startswith('db.'):
assert isinstance(value, dict)
@@ -2665,7 +2663,7 @@ class TestRedisCommands(object):
assert not r.module_list()
-class TestBinarySave(object):
+class TestBinarySave:
def test_binary_get_set(self, r):
assert r.set(' foo bar ', '123')
@@ -2691,14 +2689,14 @@ class TestBinarySave(object):
b'foo\tbar\x07': [b'7', b'8', b'9'],
}
# fill in lists
- for key, value in iteritems(mapping):
+ for key, value in mapping.items():
r.rpush(key, *value)
# check that KEYS returns all the keys as they are
- assert sorted(r.keys('*')) == sorted(iterkeys(mapping))
+ assert sorted(r.keys('*')) == sorted(mapping.keys())
# check that it is possible to get list content by key name
- for key, value in iteritems(mapping):
+ for key, value in mapping.items():
assert r.lrange(key, 0, -1) == value
def test_22_info(self, r):
diff --git a/tests/test_connection.py b/tests/test_connection.py
index 5ca9254..128bac7 100644
--- a/tests/test_connection.py
+++ b/tests/test_connection.py
@@ -1,4 +1,4 @@
-import mock
+from unittest import mock
import pytest
from redis.exceptions import InvalidResponse
diff --git a/tests/test_connection_pool.py b/tests/test_connection_pool.py
index c49ecda..c26090b 100644
--- a/tests/test_connection_pool.py
+++ b/tests/test_connection_pool.py
@@ -1,9 +1,9 @@
import os
-import mock
import pytest
import re
import redis
import time
+from unittest import mock
from threading import Thread
from redis.connection import ssl_available, to_bool
@@ -11,7 +11,7 @@ from .conftest import skip_if_server_version_lt, _get_client, REDIS_6_VERSION
from .test_pubsub import wait_for_message
-class DummyConnection(object):
+class DummyConnection:
description_format = "DummyConnection<>"
def __init__(self, **kwargs):
@@ -25,7 +25,7 @@ class DummyConnection(object):
return False
-class TestConnectionPool(object):
+class TestConnectionPool:
def get_pool(self, connection_kwargs=None, max_connections=None,
connection_class=redis.Connection):
connection_kwargs = connection_kwargs or {}
@@ -93,7 +93,7 @@ class TestConnectionPool(object):
assert repr(pool) == expected
-class TestBlockingConnectionPool(object):
+class TestBlockingConnectionPool:
def get_pool(self, connection_kwargs=None, max_connections=10, timeout=20):
connection_kwargs = connection_kwargs or {}
pool = redis.BlockingConnectionPool(connection_class=DummyConnection,
@@ -179,7 +179,7 @@ class TestBlockingConnectionPool(object):
assert repr(pool) == expected
-class TestConnectionPoolURLParsing(object):
+class TestConnectionPoolURLParsing:
def test_defaults(self):
pool = redis.ConnectionPool.from_url('redis://localhost')
assert pool.connection_class == redis.Connection
@@ -411,7 +411,7 @@ class TestConnectionPoolURLParsing(object):
)
-class TestConnectionPoolUnixSocketURLParsing(object):
+class TestConnectionPoolUnixSocketURLParsing:
def test_defaults(self):
pool = redis.ConnectionPool.from_url('unix:///socket')
assert pool.connection_class == redis.UnixDomainSocketConnection
@@ -519,7 +519,7 @@ class TestConnectionPoolUnixSocketURLParsing(object):
}
-class TestSSLConnectionURLParsing(object):
+class TestSSLConnectionURLParsing:
@pytest.mark.skipif(not ssl_available, reason="SSL not installed")
def test_defaults(self):
pool = redis.ConnectionPool.from_url('rediss://localhost')
@@ -561,7 +561,7 @@ class TestSSLConnectionURLParsing(object):
assert pool.get_connection('_').check_hostname is True
-class TestConnection(object):
+class TestConnection:
def test_on_connect_error(self):
"""
An error in Connection.on_connect should disconnect from the server
@@ -658,7 +658,7 @@ class TestConnection(object):
r.execute_command('DEBUG', 'ERROR', 'ERR invalid password')
-class TestMultiConnectionClient(object):
+class TestMultiConnectionClient:
@pytest.fixture()
def r(self, request):
return _get_client(redis.Redis,
@@ -671,7 +671,7 @@ class TestMultiConnectionClient(object):
assert r.get('a') == b'123'
-class TestHealthCheck(object):
+class TestHealthCheck:
interval = 60
@pytest.fixture()
diff --git a/tests/test_encoding.py b/tests/test_encoding.py
index ea7db7f..706654f 100644
--- a/tests/test_encoding.py
+++ b/tests/test_encoding.py
@@ -1,13 +1,11 @@
-from __future__ import unicode_literals
import pytest
import redis
-from redis._compat import unichr, unicode
from redis.connection import Connection
from .conftest import _get_client
-class TestEncoding(object):
+class TestEncoding:
@pytest.fixture()
def r(self, request):
return _get_client(redis.Redis, request=request, decode_responses=True)
@@ -21,21 +19,21 @@ class TestEncoding(object):
)
def test_simple_encoding(self, r_no_decode):
- unicode_string = unichr(3456) + 'abcd' + unichr(3421)
+ unicode_string = chr(3456) + 'abcd' + chr(3421)
r_no_decode['unicode-string'] = unicode_string.encode('utf-8')
cached_val = r_no_decode['unicode-string']
assert isinstance(cached_val, bytes)
assert unicode_string == cached_val.decode('utf-8')
def test_simple_encoding_and_decoding(self, r):
- unicode_string = unichr(3456) + 'abcd' + unichr(3421)
+ unicode_string = chr(3456) + 'abcd' + chr(3421)
r['unicode-string'] = unicode_string
cached_val = r['unicode-string']
- assert isinstance(cached_val, unicode)
+ assert isinstance(cached_val, str)
assert unicode_string == cached_val
def test_memoryview_encoding(self, r_no_decode):
- unicode_string = unichr(3456) + 'abcd' + unichr(3421)
+ unicode_string = chr(3456) + 'abcd' + chr(3421)
unicode_string_view = memoryview(unicode_string.encode('utf-8'))
r_no_decode['unicode-string-memoryview'] = unicode_string_view
cached_val = r_no_decode['unicode-string-memoryview']
@@ -44,21 +42,21 @@ class TestEncoding(object):
assert unicode_string == cached_val.decode('utf-8')
def test_memoryview_encoding_and_decoding(self, r):
- unicode_string = unichr(3456) + 'abcd' + unichr(3421)
+ unicode_string = chr(3456) + 'abcd' + chr(3421)
unicode_string_view = memoryview(unicode_string.encode('utf-8'))
r['unicode-string-memoryview'] = unicode_string_view
cached_val = r['unicode-string-memoryview']
- assert isinstance(cached_val, unicode)
+ assert isinstance(cached_val, str)
assert unicode_string == cached_val
def test_list_encoding(self, r):
- unicode_string = unichr(3456) + 'abcd' + unichr(3421)
+ unicode_string = chr(3456) + 'abcd' + chr(3421)
result = [unicode_string, unicode_string, unicode_string]
r.rpush('a', *result)
assert r.lrange('a', 0, -1) == result
-class TestEncodingErrors(object):
+class TestEncodingErrors:
def test_ignore(self, request):
r = _get_client(redis.Redis, request=request, decode_responses=True,
encoding_errors='ignore')
@@ -72,7 +70,7 @@ class TestEncodingErrors(object):
assert r.get('a') == 'foo\ufffd'
-class TestMemoryviewsAreNotPacked(object):
+class TestMemoryviewsAreNotPacked:
def test_memoryviews_are_not_packed(self):
c = Connection()
arg = memoryview(b'some_arg')
@@ -84,7 +82,7 @@ class TestMemoryviewsAreNotPacked(object):
assert cmds[3] is arg
-class TestCommandsAreNotEncoded(object):
+class TestCommandsAreNotEncoded:
@pytest.fixture()
def r(self, request):
return _get_client(redis.Redis, request=request, encoding='utf-16')
@@ -93,7 +91,7 @@ class TestCommandsAreNotEncoded(object):
r.set('hello', 'world')
-class TestInvalidUserInput(object):
+class TestInvalidUserInput:
def test_boolean_fails(self, r):
with pytest.raises(redis.DataError):
r.set('a', True)
@@ -103,12 +101,9 @@ class TestInvalidUserInput(object):
r.set('a', None)
def test_user_type_fails(self, r):
- class Foo(object):
+ class Foo:
def __str__(self):
return 'Foo'
- def __unicode__(self):
- return 'Foo'
-
with pytest.raises(redis.DataError):
r.set('a', Foo())
diff --git a/tests/test_lock.py b/tests/test_lock.py
index e8bd874..8bb3c7e 100644
--- a/tests/test_lock.py
+++ b/tests/test_lock.py
@@ -7,7 +7,7 @@ from redis.lock import Lock
from .conftest import _get_client
-class TestLock(object):
+class TestLock:
@pytest.fixture()
def r_decoded(self, request):
return _get_client(Redis, request=request, decode_responses=True)
@@ -220,9 +220,9 @@ class TestLock(object):
lock.reacquire()
-class TestLockClassSelection(object):
+class TestLockClassSelection:
def test_lock_class_argument(self, r):
- class MyLock(object):
+ class MyLock:
def __init__(self, *args, **kwargs):
pass
diff --git a/tests/test_monitor.py b/tests/test_monitor.py
index ee5dc6e..1013202 100644
--- a/tests/test_monitor.py
+++ b/tests/test_monitor.py
@@ -1,9 +1,7 @@
-from __future__ import unicode_literals
-from redis._compat import unicode
from .conftest import wait_for_command
-class TestMonitor(object):
+class TestMonitor:
def test_wait_command_not_found(self, r):
"Make sure the wait_for_command func works when command is not found"
with r.monitor() as m:
@@ -17,8 +15,8 @@ class TestMonitor(object):
assert isinstance(response['time'], float)
assert response['db'] == 9
assert response['client_type'] in ('tcp', 'unix')
- assert isinstance(response['client_address'], unicode)
- assert isinstance(response['client_port'], unicode)
+ assert isinstance(response['client_address'], str)
+ assert isinstance(response['client_port'], str)
assert response['command'] == 'PING'
def test_command_with_quoted_key(self, r):
diff --git a/tests/test_multiprocessing.py b/tests/test_multiprocessing.py
index 235e3ce..2d27c4e 100644
--- a/tests/test_multiprocessing.py
+++ b/tests/test_multiprocessing.py
@@ -17,7 +17,7 @@ def exit_callback(callback, *args):
callback(*args)
-class TestMultiprocessing(object):
+class TestMultiprocessing:
# Test connection sharing between forks.
# See issue #1085 for details.
diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py
index 4f22153..9bc4a9f 100644
--- a/tests/test_pipeline.py
+++ b/tests/test_pipeline.py
@@ -1,12 +1,10 @@
-from __future__ import unicode_literals
import pytest
import redis
-from redis._compat import unichr, unicode
from .conftest import wait_for_command
-class TestPipeline(object):
+class TestPipeline:
def test_pipeline_is_true(self, r):
"Ensure pipeline instances are not false-y"
with r.pipeline() as pipe:
@@ -124,8 +122,8 @@ class TestPipeline(object):
pipe.set('a', 1).set('b', 2).lpush('c', 3).set('d', 4)
with pytest.raises(redis.ResponseError) as ex:
pipe.execute()
- assert unicode(ex.value).startswith('Command # 3 (LPUSH c 3) of '
- 'pipeline caused error: ')
+ assert str(ex.value).startswith('Command # 3 (LPUSH c 3) of '
+ 'pipeline caused error: ')
# make sure the pipe was restored to a working state
assert pipe.set('z', 'zzz').execute() == [True]
@@ -166,8 +164,8 @@ class TestPipeline(object):
with pytest.raises(redis.ResponseError) as ex:
pipe.execute()
- assert unicode(ex.value).startswith('Command # 2 (ZREM b) of '
- 'pipeline caused error: ')
+ assert str(ex.value).startswith('Command # 2 (ZREM b) of '
+ 'pipeline caused error: ')
# make sure the pipe was restored to a working state
assert pipe.set('z', 'zzz').execute() == [True]
@@ -181,8 +179,8 @@ class TestPipeline(object):
with pytest.raises(redis.ResponseError) as ex:
pipe.execute()
- assert unicode(ex.value).startswith('Command # 2 (ZREM b) of '
- 'pipeline caused error: ')
+ assert str(ex.value).startswith('Command # 2 (ZREM b) of '
+ 'pipeline caused error: ')
# make sure the pipe was restored to a working state
assert pipe.set('z', 'zzz').execute() == [True]
@@ -319,13 +317,13 @@ class TestPipeline(object):
with pytest.raises(redis.ResponseError) as ex:
pipe.execute()
- assert unicode(ex.value).startswith('Command # 1 (LLEN a) of '
- 'pipeline caused error: ')
+ assert str(ex.value).startswith('Command # 1 (LLEN a) of '
+ 'pipeline caused error: ')
assert r['a'] == b'1'
def test_exec_error_in_no_transaction_pipeline_unicode_command(self, r):
- key = unichr(3456) + 'abcd' + unichr(3421)
+ key = chr(3456) + 'abcd' + chr(3421)
r[key] = 1
with r.pipeline(transaction=False) as pipe:
pipe.llen(key)
@@ -334,9 +332,8 @@ class TestPipeline(object):
with pytest.raises(redis.ResponseError) as ex:
pipe.execute()
- expected = unicode('Command # 1 (LLEN %s) of pipeline caused '
- 'error: ') % key
- assert unicode(ex.value).startswith(expected)
+ expected = 'Command # 1 (LLEN %s) of pipeline caused error: ' % key
+ assert str(ex.value).startswith(expected)
assert r[key] == b'1'
diff --git a/tests/test_pubsub.py b/tests/test_pubsub.py
index bf13483..ab9f09c 100644
--- a/tests/test_pubsub.py
+++ b/tests/test_pubsub.py
@@ -1,10 +1,8 @@
-from __future__ import unicode_literals
import pytest
import time
import redis
from redis.exceptions import ConnectionError
-from redis._compat import basestring, unichr
from .conftest import _get_client
from .conftest import skip_if_server_version_lt
@@ -28,7 +26,7 @@ def make_message(type, channel, data, pattern=None):
'type': type,
'pattern': pattern and pattern.encode('utf-8') or None,
'channel': channel and channel.encode('utf-8') or None,
- 'data': data.encode('utf-8') if isinstance(data, basestring) else data
+ 'data': data.encode('utf-8') if isinstance(data, str) else data
}
@@ -40,7 +38,7 @@ def make_subscribe_test_data(pubsub, type):
'unsub_type': 'unsubscribe',
'sub_func': pubsub.subscribe,
'unsub_func': pubsub.unsubscribe,
- 'keys': ['foo', 'bar', 'uni' + unichr(4456) + 'code']
+ 'keys': ['foo', 'bar', 'uni' + chr(4456) + 'code']
}
elif type == 'pattern':
return {
@@ -49,12 +47,12 @@ def make_subscribe_test_data(pubsub, type):
'unsub_type': 'punsubscribe',
'sub_func': pubsub.psubscribe,
'unsub_func': pubsub.punsubscribe,
- 'keys': ['f*', 'b*', 'uni' + unichr(4456) + '*']
+ 'keys': ['f*', 'b*', 'uni' + chr(4456) + '*']
}
assert False, 'invalid subscribe type: %s' % type
-class TestPubSubSubscribeUnsubscribe(object):
+class TestPubSubSubscribeUnsubscribe:
def _test_subscribe_unsubscribe(self, p, sub_type, unsub_type, sub_func,
unsub_func, keys):
@@ -255,7 +253,7 @@ class TestPubSubSubscribeUnsubscribe(object):
assert p.subscribed is True
-class TestPubSubMessages(object):
+class TestPubSubMessages:
def setup_method(self, method):
self.message = None
@@ -314,7 +312,7 @@ class TestPubSubMessages(object):
def test_unicode_channel_message_handler(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
- channel = 'uni' + unichr(4456) + 'code'
+ channel = 'uni' + chr(4456) + 'code'
channels = {channel: self.message_handler}
p.subscribe(**channels)
assert wait_for_message(p) is None
@@ -324,8 +322,8 @@ class TestPubSubMessages(object):
def test_unicode_pattern_message_handler(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
- pattern = 'uni' + unichr(4456) + '*'
- channel = 'uni' + unichr(4456) + 'code'
+ pattern = 'uni' + chr(4456) + '*'
+ channel = 'uni' + chr(4456) + 'code'
p.psubscribe(**{pattern: self.message_handler})
assert wait_for_message(p) is None
assert r.publish(channel, 'test message') == 1
@@ -342,12 +340,12 @@ class TestPubSubMessages(object):
assert expect in info.exconly()
-class TestPubSubAutoDecoding(object):
+class TestPubSubAutoDecoding:
"These tests only validate that we get unicode values back"
- channel = 'uni' + unichr(4456) + 'code'
- pattern = 'uni' + unichr(4456) + '*'
- data = 'abc' + unichr(4458) + '123'
+ channel = 'uni' + chr(4456) + 'code'
+ pattern = 'uni' + chr(4456) + '*'
+ data = 'abc' + chr(4458) + '123'
def make_message(self, type, channel, data, pattern=None):
return {
@@ -458,7 +456,7 @@ class TestPubSubAutoDecoding(object):
assert pubsub.patterns == {}
-class TestPubSubRedisDown(object):
+class TestPubSubRedisDown:
def test_channel_subscribe(self, r):
r = redis.Redis(host='localhost', port=6390)
@@ -467,7 +465,7 @@ class TestPubSubRedisDown(object):
p.subscribe('foo')
-class TestPubSubSubcommands(object):
+class TestPubSubSubcommands:
@skip_if_server_version_lt('2.8.0')
def test_pubsub_channels(self, r):
@@ -504,7 +502,7 @@ class TestPubSubSubcommands(object):
assert r.pubsub_numpat() == 3
-class TestPubSubPings(object):
+class TestPubSubPings:
@skip_if_server_version_lt('3.0.0')
def test_send_pubsub_ping(self, r):
@@ -525,7 +523,7 @@ class TestPubSubPings(object):
pattern=None)
-class TestPubSubConnectionKilled(object):
+class TestPubSubConnectionKilled:
@skip_if_server_version_lt('3.0.0')
def test_connection_error_raised_when_connection_dies(self, r):
@@ -539,7 +537,7 @@ class TestPubSubConnectionKilled(object):
wait_for_message(p)
-class TestPubSubTimeouts(object):
+class TestPubSubTimeouts:
def test_get_message_with_timeout_returns_none(self, r):
p = r.pubsub()
p.subscribe('foo')
diff --git a/tests/test_scripting.py b/tests/test_scripting.py
index b3d52a5..02c0f17 100644
--- a/tests/test_scripting.py
+++ b/tests/test_scripting.py
@@ -1,4 +1,3 @@
-from __future__ import unicode_literals
import pytest
from redis import exceptions
@@ -21,7 +20,7 @@ return "hello " .. name
"""
-class TestScripting(object):
+class TestScripting:
@pytest.fixture(autouse=True)
def reset_scripts(self, r):
r.script_flush()
diff --git a/tests/test_sentinel.py b/tests/test_sentinel.py
index c247c72..64a7c47 100644
--- a/tests/test_sentinel.py
+++ b/tests/test_sentinel.py
@@ -5,7 +5,6 @@ import pytest
from redis import exceptions
from redis.sentinel import (Sentinel, SentinelConnectionPool,
MasterNotFoundError, SlaveNotFoundError)
-from redis._compat import next
import redis.sentinel
@@ -14,7 +13,7 @@ def master_ip(master_host):
yield socket.gethostbyname(master_host)
-class SentinelTestClient(object):
+class SentinelTestClient:
def __init__(self, cluster, id):
self.cluster = cluster
self.id = id
@@ -32,7 +31,7 @@ class SentinelTestClient(object):
return self.cluster.slaves
-class SentinelTestCluster(object):
+class SentinelTestCluster:
def __init__(self, service_name='mymaster', ip='127.0.0.1', port=6379):
self.clients = {}
self.master = {
diff --git a/tox.ini b/tox.ini
index d1d97f4..c783bf0 100644
--- a/tox.ini
+++ b/tox.ini
@@ -3,12 +3,11 @@ addopts = -s
[tox]
minversion = 2.4
-envlist = {py27,py35,py36,py37,py38,pypy,pypy3}-{plain,hiredis}, flake8, covreport, codecov
+envlist = {py35,py36,py37,py38,pypy3}-{plain,hiredis}, flake8, covreport, codecov
[testenv]
deps =
coverage
- mock
pytest >= 2.7.0
extras =
hiredis: hiredis