summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGuido van Rossum <guido@python.org>2020-05-14 19:01:14 -0700
committerGuido van Rossum <guido@python.org>2020-05-14 19:01:14 -0700
commitf93a54c48fc1644012aa0d4ee3887c1d121ac40e (patch)
tree189c841245d15318e5895638105bdbc532cbcdf5
parent31641ff0e4b18c8d002d019f4506f0e8fb446983 (diff)
parent16ab07063cb564c1937714bd39d6915172f005b5 (diff)
downloadcpython-git-fix-traceback-syntax-error.tar.gz
Merge branch 'master' into fix-traceback-syntax-errorfix-traceback-syntax-error
-rw-r--r--.github/workflows/build.yml24
-rw-r--r--Doc/library/code.rst4
-rw-r--r--Doc/library/codeop.rst5
-rw-r--r--Doc/library/compileall.rst21
-rw-r--r--Doc/whatsnew/3.9.rst13
-rw-r--r--Grammar/python.gram30
-rw-r--r--Include/cpython/object.h2
-rw-r--r--Include/cpython/unicodeobject.h2
-rw-r--r--Include/internal/pycore_hashtable.h23
-rw-r--r--Include/internal/pycore_interp.h22
-rw-r--r--Lib/codeop.py3
-rw-r--r--Lib/compileall.py42
-rw-r--r--Lib/email/contentmanager.py14
-rw-r--r--Lib/gettext.py8
-rw-r--r--Lib/idlelib/zzdummy.py2
-rw-r--r--Lib/linecache.py6
-rw-r--r--Lib/test/test__xxsubinterpreters.py301
-rw-r--r--Lib/test/test_asyncio/test_tasks.py27
-rw-r--r--Lib/test/test_compileall.py224
-rw-r--r--Lib/test/test_dictcomps.py2
-rw-r--r--Lib/test/test_email/test_contentmanager.py15
-rw-r--r--Lib/test/test_exceptions.py2
-rw-r--r--Lib/test/test_generators.py21
-rw-r--r--Lib/test/test_genexps.py2
-rw-r--r--Lib/test/test_grammar.py5
-rw-r--r--Lib/test/test_json/test_recursion.py2
-rw-r--r--Lib/test/test_peg_parser.py27
-rw-r--r--Lib/test/test_syntax.py52
-rw-r--r--Misc/ACKS1
-rw-r--r--Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst1
-rw-r--r--Modules/_io/textio.c2
-rw-r--r--Modules/_testinternalcapi.c92
-rw-r--r--Modules/_xxsubinterpretersmodule.c1147
-rw-r--r--Modules/clinic/posixmodule.c.h91
-rw-r--r--Modules/posixmodule.c178
-rw-r--r--Objects/genobject.c22
-rw-r--r--Objects/unicodeobject.c70
-rw-r--r--Parser/pegen/parse.c119
-rw-r--r--Parser/pegen/pegen.c76
-rw-r--r--Parser/pegen/pegen.h4
-rw-r--r--Python/ast.c13
-rw-r--r--Python/hashtable.c180
-rw-r--r--Python/marshal.c2
-rwxr-xr-xTools/clinic/clinic.py9
46 files changed, 1118 insertions, 1793 deletions
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 6e6a6d2b78..dabfb79e9d 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -16,9 +16,27 @@ on:
- 3.7
jobs:
+ check_source:
+ name: 'Check for source changes'
+ runs-on: ubuntu-latest
+ outputs:
+ run_tests: ${{ steps.check.outputs.run_tests }}
+ steps:
+ - uses: actions/checkout@v2
+ - name: Check for source changes
+ id: check
+ run: |
+ if [ -z "GITHUB_BASE_REF" ]; then
+ echo '::set-output name=run_tests::true'
+ else
+ git fetch origin $GITHUB_BASE_REF --depth=1
+ git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true'
+ fi
build_win32:
name: 'Windows (x86)'
runs-on: windows-latest
+ needs: check_source
+ if: needs.check_source.outputs.run_tests == 'true'
steps:
- uses: actions/checkout@v1
- name: Build CPython
@@ -31,6 +49,8 @@ jobs:
build_win_amd64:
name: 'Windows (x64)'
runs-on: windows-latest
+ needs: check_source
+ if: needs.check_source.outputs.run_tests == 'true'
steps:
- uses: actions/checkout@v1
- name: Build CPython
@@ -43,6 +63,8 @@ jobs:
build_macos:
name: 'macOS'
runs-on: macos-latest
+ needs: check_source
+ if: needs.check_source.outputs.run_tests == 'true'
steps:
- uses: actions/checkout@v1
- name: Configure CPython
@@ -57,6 +79,8 @@ jobs:
build_ubuntu:
name: 'Ubuntu'
runs-on: ubuntu-latest
+ needs: check_source
+ if: needs.check_source.outputs.run_tests == 'true'
env:
OPENSSL_VER: 1.1.1f
steps:
diff --git a/Doc/library/code.rst b/Doc/library/code.rst
index 6708079f77..538e5afc78 100644
--- a/Doc/library/code.rst
+++ b/Doc/library/code.rst
@@ -56,8 +56,8 @@ build applications which provide an interactive interpreter prompt.
*source* is the source string; *filename* is the optional filename from which
source was read, defaulting to ``'<input>'``; and *symbol* is the optional
- grammar start symbol, which should be either ``'single'`` (the default) or
- ``'eval'``.
+ grammar start symbol, which should be ``'single'`` (the default), ``'eval'``
+ or ``'exec'``.
Returns a code object (the same as ``compile(source, filename, symbol)``) if the
command is complete and valid; ``None`` if the command is incomplete; raises
diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst
index a52d2c62c4..c66b9d3ec0 100644
--- a/Doc/library/codeop.rst
+++ b/Doc/library/codeop.rst
@@ -43,8 +43,9 @@ To do just the former:
:exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal.
The *symbol* argument determines whether *source* is compiled as a statement
- (``'single'``, the default) or as an :term:`expression` (``'eval'``). Any
- other value will cause :exc:`ValueError` to be raised.
+ (``'single'``, the default), as a sequence of statements (``'exec'``) or
+ as an :term:`expression` (``'eval'``). Any other value will
+ cause :exc:`ValueError` to be raised.
.. note::
diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst
index b1ae9d60e8..a511c7eda2 100644
--- a/Doc/library/compileall.rst
+++ b/Doc/library/compileall.rst
@@ -113,6 +113,11 @@ compile Python sources.
Ignore symlinks pointing outside the given directory.
+.. cmdoption:: --hardlink-dupes
+
+ If two ``.pyc`` files with different optimization level have
+ the same content, use hard links to consolidate duplicate files.
+
.. versionchanged:: 3.2
Added the ``-i``, ``-b`` and ``-h`` options.
@@ -125,7 +130,7 @@ compile Python sources.
Added the ``--invalidation-mode`` option.
.. versionchanged:: 3.9
- Added the ``-s``, ``-p``, ``-e`` options.
+ Added the ``-s``, ``-p``, ``-e`` and ``--hardlink-dupes`` options.
Raised the default recursion limit from 10 to
:py:func:`sys.getrecursionlimit()`.
Added the possibility to specify the ``-o`` option multiple times.
@@ -143,7 +148,7 @@ runtime.
Public functions
----------------
-.. function:: compile_dir(dir, maxlevels=sys.getrecursionlimit(), ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None)
+.. function:: compile_dir(dir, maxlevels=sys.getrecursionlimit(), ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False)
Recursively descend the directory tree named by *dir*, compiling all :file:`.py`
files along the way. Return a true value if all the files compiled successfully,
@@ -193,6 +198,9 @@ Public functions
the ``-s``, ``-p`` and ``-e`` options described above.
They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`.
+ If *hardlink_dupes* is true and two ``.pyc`` files with different optimization
+ level have the same content, use hard links to consolidate duplicate files.
+
.. versionchanged:: 3.2
Added the *legacy* and *optimize* parameter.
@@ -219,9 +227,9 @@ Public functions
Setting *workers* to 0 now chooses the optimal number of cores.
.. versionchanged:: 3.9
- Added *stripdir*, *prependdir* and *limit_sl_dest* arguments.
+ Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments.
-.. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None)
+.. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False)
Compile the file with path *fullname*. Return a true value if the file
compiled successfully, and a false value otherwise.
@@ -257,6 +265,9 @@ Public functions
the ``-s``, ``-p`` and ``-e`` options described above.
They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`.
+ If *hardlink_dupes* is true and two ``.pyc`` files with different optimization
+ level have the same content, use hard links to consolidate duplicate files.
+
.. versionadded:: 3.2
.. versionchanged:: 3.5
@@ -273,7 +284,7 @@ Public functions
The *invalidation_mode* parameter's default value is updated to None.
.. versionchanged:: 3.9
- Added *stripdir*, *prependdir* and *limit_sl_dest* arguments.
+ Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments.
.. function:: compile_path(skip_curdir=True, maxlevels=0, force=False, quiet=0, legacy=False, optimize=-1, invalidation_mode=None)
diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst
index c57d702dce..fbad0fba20 100644
--- a/Doc/whatsnew/3.9.rst
+++ b/Doc/whatsnew/3.9.rst
@@ -245,6 +245,16 @@ that schedules a shutdown for the default executor that waits on the
Added :class:`asyncio.PidfdChildWatcher`, a Linux-specific child watcher
implementation that polls process file descriptors. (:issue:`38692`)
+compileall
+----------
+
+Added new possibility to use hardlinks for duplicated ``.pyc`` files: *hardlink_dupes* parameter and --hardlink-dupes command line option.
+(Contributed by Lumír 'Frenzy' Balhar in :issue:`40495`.)
+
+Added new options for path manipulation in resulting ``.pyc`` files: *stripdir*, *prependdir*, *limit_sl_dest* parameters and -s, -p, -e command line options.
+Added the possibility to specify the option for an optimization level multiple times.
+(Contributed by Lumír 'Frenzy' Balhar in :issue:`38112`.)
+
concurrent.futures
------------------
@@ -964,3 +974,6 @@ Removed
* ``PyTuple_ClearFreeList()``
* ``PyUnicode_ClearFreeList()``: the Unicode free list has been removed in
Python 3.3.
+
+* Remove ``_PyUnicode_ClearStaticStrings()`` function.
+ (Contributed by Victor Stinner in :issue:`39465`.)
diff --git a/Grammar/python.gram b/Grammar/python.gram
index 84c89330e3..cca9209054 100644
--- a/Grammar/python.gram
+++ b/Grammar/python.gram
@@ -89,12 +89,12 @@ assignment[stmt_ty]:
"Variable annotation syntax is",
_Py_AnnAssign(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, c, 1, EXTRA)
) }
- | a=('(' b=inside_paren_ann_assign_target ')' { b }
- | ann_assign_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] {
+ | a=('(' b=single_target ')' { b }
+ | single_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] {
CHECK_VERSION(6, "Variable annotations syntax is", _Py_AnnAssign(a, b, c, 0, EXTRA)) }
| a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) tc=[TYPE_COMMENT] {
_Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
- | a=target b=augassign c=(yield_expr | star_expressions) {
+ | a=single_target b=augassign c=(yield_expr | star_expressions) {
_Py_AugAssign(a, b->kind, c, EXTRA) }
| invalid_assignment
@@ -185,7 +185,7 @@ try_stmt[stmt_ty]:
| 'try' ':' b=block f=finally_block { _Py_Try(b, NULL, NULL, f, EXTRA) }
| 'try' ':' b=block ex=except_block+ el=[else_block] f=[finally_block] { _Py_Try(b, ex, el, f, EXTRA) }
except_block[excepthandler_ty]:
- | 'except' e=expression t=['as' z=target { z }] ':' b=block {
+ | 'except' e=expression t=['as' z=NAME { z }] ':' b=block {
_Py_ExceptHandler(e, (t) ? ((expr_ty) t)->v.Name.id : NULL, b, EXTRA) }
| 'except' ':' b=block { _Py_ExceptHandler(NULL, NULL, b, EXTRA) }
finally_block[asdl_seq*]: 'finally' ':' a=block { a }
@@ -573,12 +573,11 @@ star_atom[expr_ty]:
| '(' a=[star_targets_seq] ')' { _Py_Tuple(a, Store, EXTRA) }
| '[' a=[star_targets_seq] ']' { _Py_List(a, Store, EXTRA) }
-inside_paren_ann_assign_target[expr_ty]:
- | ann_assign_subscript_attribute_target
+single_target[expr_ty]:
+ | single_subscript_attribute_target
| a=NAME { _PyPegen_set_expr_context(p, a, Store) }
- | '(' a=inside_paren_ann_assign_target ')' { a }
-
-ann_assign_subscript_attribute_target[expr_ty]:
+ | '(' a=single_target ')' { a }
+single_subscript_attribute_target[expr_ty]:
| a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) }
| a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) }
@@ -641,8 +640,17 @@ invalid_assignment:
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") }
| a=expression ':' expression ['=' annotated_rhs] {
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") }
- | a=expression ('=' | augassign) (yield_expr | star_expressions) {
- RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot assign to %s", _PyPegen_get_expr_name(a)) }
+ | a=star_expressions '=' (yield_expr | star_expressions) {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
+ _PyPegen_get_invalid_target(a),
+ "cannot assign to %s", _PyPegen_get_expr_name(_PyPegen_get_invalid_target(a))) }
+ | a=star_expressions augassign (yield_expr | star_expressions) {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
+ a,
+ "'%s' is an illegal expression for augmented assignment",
+ _PyPegen_get_expr_name(a)
+ )}
+
invalid_block:
| NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") }
invalid_comprehension:
diff --git a/Include/cpython/object.h b/Include/cpython/object.h
index 8bf05a3271..444f832f5b 100644
--- a/Include/cpython/object.h
+++ b/Include/cpython/object.h
@@ -36,7 +36,7 @@ PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void);
PyId_foo is a static variable, either on block level or file level. On first
usage, the string "foo" is interned, and the structures are linked. On interpreter
- shutdown, all strings are released (through _PyUnicode_ClearStaticStrings).
+ shutdown, all strings are released.
Alternatively, _Py_static_string allows choosing the variable name.
_PyUnicode_FromId returns a borrowed reference to the interned string.
diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h
index 9432687629..4fd674ffea 100644
--- a/Include/cpython/unicodeobject.h
+++ b/Include/cpython/unicodeobject.h
@@ -1215,8 +1215,6 @@ Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) PyUnicode_AsUnicodeCopy(
/* Return an interned Unicode object for an Identifier; may fail if there is no memory.*/
PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*);
-/* Clear all static strings. */
-PyAPI_FUNC(void) _PyUnicode_ClearStaticStrings(void);
/* Fast equality check when the inputs are known to be exact unicode types
and where the hash values are equal (i.e. a very probable match) */
diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h
index 2990f9e0c1..18757abc28 100644
--- a/Include/internal/pycore_hashtable.h
+++ b/Include/internal/pycore_hashtable.h
@@ -48,18 +48,18 @@ typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *
const void *key);
typedef struct {
- /* allocate a memory block */
+ // Allocate a memory block
void* (*malloc) (size_t size);
- /* release a memory block */
+ // Release a memory block
void (*free) (void *ptr);
} _Py_hashtable_allocator_t;
/* _Py_hashtable: table */
struct _Py_hashtable_t {
- size_t num_buckets;
- size_t entries; /* Total number of entries in the table. */
+ size_t nentries; // Total number of entries in the table
+ size_t nbuckets;
_Py_slist_t *buckets;
_Py_hashtable_get_entry_func get_entry_func;
@@ -70,10 +70,10 @@ struct _Py_hashtable_t {
_Py_hashtable_allocator_t alloc;
};
-/* hash a pointer (void*) */
+/* Hash a pointer (void*) */
PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr(const void *key);
-/* comparison using memcmp() */
+/* Comparison using memcmp() */
PyAPI_FUNC(int) _Py_hashtable_compare_direct(
const void *key1,
const void *key2);
@@ -129,13 +129,14 @@ _Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key)
Use _Py_hashtable_get_entry() to distinguish entry value equal to NULL
and entry not found. */
-extern void *_Py_hashtable_get(_Py_hashtable_t *ht, const void *key);
+PyAPI_FUNC(void*) _Py_hashtable_get(_Py_hashtable_t *ht, const void *key);
-// Remove a key and its associated value without calling key and value destroy
-// functions.
-// Return the removed value if the key was found.
-// Return NULL if the key was not found.
+/* Remove a key and its associated value without calling key and value destroy
+ functions.
+
+ Return the removed value if the key was found.
+ Return NULL if the key was not found. */
PyAPI_FUNC(void*) _Py_hashtable_steal(
_Py_hashtable_t *ht,
const void *key);
diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h
index 26e7a473a1..f04ea330d0 100644
--- a/Include/internal/pycore_interp.h
+++ b/Include/internal/pycore_interp.h
@@ -51,6 +51,19 @@ struct _ceval_state {
#endif
};
+/* fs_codec.encoding is initialized to NULL.
+ Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */
+struct _Py_unicode_fs_codec {
+ char *encoding; // Filesystem encoding (encoded to UTF-8)
+ int utf8; // encoding=="utf-8"?
+ char *errors; // Filesystem errors (encoded to UTF-8)
+ _Py_error_handler error_handler;
+};
+
+struct _Py_unicode_state {
+ struct _Py_unicode_fs_codec fs_codec;
+};
+
/* interpreter state */
@@ -97,14 +110,7 @@ struct _is {
PyObject *codec_error_registry;
int codecs_initialized;
- /* fs_codec.encoding is initialized to NULL.
- Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */
- struct {
- char *encoding; /* Filesystem encoding (encoded to UTF-8) */
- int utf8; /* encoding=="utf-8"? */
- char *errors; /* Filesystem errors (encoded to UTF-8) */
- _Py_error_handler error_handler;
- } fs_codec;
+ struct _Py_unicode_state unicode;
PyConfig config;
#ifdef HAVE_DLOPEN
diff --git a/Lib/codeop.py b/Lib/codeop.py
index 082285f94f..835e68c09b 100644
--- a/Lib/codeop.py
+++ b/Lib/codeop.py
@@ -112,7 +112,8 @@ def compile_command(source, filename="<input>", symbol="single"):
source -- the source string; may contain \n characters
filename -- optional filename from which source was read; default
"<input>"
- symbol -- optional grammar start symbol; "single" (default) or "eval"
+ symbol -- optional grammar start symbol; "single" (default), "exec"
+ or "eval"
Return value / exceptions raised:
diff --git a/Lib/compileall.py b/Lib/compileall.py
index abe6cffce5..fe7f450c55 100644
--- a/Lib/compileall.py
+++ b/Lib/compileall.py
@@ -15,6 +15,7 @@ import sys
import importlib.util
import py_compile
import struct
+import filecmp
from functools import partial
from pathlib import Path
@@ -47,7 +48,7 @@ def _walk_dir(dir, maxlevels, quiet=0):
def compile_dir(dir, maxlevels=None, ddir=None, force=False,
rx=None, quiet=0, legacy=False, optimize=-1, workers=1,
invalidation_mode=None, *, stripdir=None,
- prependdir=None, limit_sl_dest=None):
+ prependdir=None, limit_sl_dest=None, hardlink_dupes=False):
"""Byte-compile all modules in the given directory tree.
Arguments (only dir is required):
@@ -70,6 +71,7 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False,
after stripdir
limit_sl_dest: ignore symlinks if they are pointing outside of
the defined path
+ hardlink_dupes: hardlink duplicated pyc files
"""
ProcessPoolExecutor = None
if ddir is not None and (stripdir is not None or prependdir is not None):
@@ -104,7 +106,8 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False,
invalidation_mode=invalidation_mode,
stripdir=stripdir,
prependdir=prependdir,
- limit_sl_dest=limit_sl_dest),
+ limit_sl_dest=limit_sl_dest,
+ hardlink_dupes=hardlink_dupes),
files)
success = min(results, default=True)
else:
@@ -112,14 +115,15 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False,
if not compile_file(file, ddir, force, rx, quiet,
legacy, optimize, invalidation_mode,
stripdir=stripdir, prependdir=prependdir,
- limit_sl_dest=limit_sl_dest):
+ limit_sl_dest=limit_sl_dest,
+ hardlink_dupes=hardlink_dupes):
success = False
return success
def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
legacy=False, optimize=-1,
invalidation_mode=None, *, stripdir=None, prependdir=None,
- limit_sl_dest=None):
+ limit_sl_dest=None, hardlink_dupes=False):
"""Byte-compile one file.
Arguments (only fullname is required):
@@ -140,6 +144,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
after stripdir
limit_sl_dest: ignore symlinks if they are pointing outside of
the defined path.
+ hardlink_dupes: hardlink duplicated pyc files
"""
if ddir is not None and (stripdir is not None or prependdir is not None):
@@ -176,6 +181,14 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
if isinstance(optimize, int):
optimize = [optimize]
+ # Use set() to remove duplicates.
+ # Use sorted() to create pyc files in a deterministic order.
+ optimize = sorted(set(optimize))
+
+ if hardlink_dupes and len(optimize) < 2:
+ raise ValueError("Hardlinking of duplicated bytecode makes sense "
+ "only for more than one optimization level")
+
if rx is not None:
mo = rx.search(fullname)
if mo:
@@ -220,10 +233,16 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
if not quiet:
print('Compiling {!r}...'.format(fullname))
try:
- for opt_level, cfile in opt_cfiles.items():
+ for index, opt_level in enumerate(optimize):
+ cfile = opt_cfiles[opt_level]
ok = py_compile.compile(fullname, cfile, dfile, True,
optimize=opt_level,
invalidation_mode=invalidation_mode)
+ if index > 0 and hardlink_dupes:
+ previous_cfile = opt_cfiles[optimize[index - 1]]
+ if filecmp.cmp(cfile, previous_cfile, shallow=False):
+ os.unlink(cfile)
+ os.link(previous_cfile, cfile)
except py_compile.PyCompileError as err:
success = False
if quiet >= 2:
@@ -352,6 +371,9 @@ def main():
'Python interpreter itself (specified by -O).'))
parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest',
help='Ignore symlinks pointing outsite of the DIR')
+ parser.add_argument('--hardlink-dupes', action='store_true',
+ dest='hardlink_dupes',
+ help='Hardlink duplicated pyc files')
args = parser.parse_args()
compile_dests = args.compile_dest
@@ -371,6 +393,10 @@ def main():
if args.opt_levels is None:
args.opt_levels = [-1]
+ if len(args.opt_levels) == 1 and args.hardlink_dupes:
+ parser.error(("Hardlinking of duplicated bytecode makes sense "
+ "only for more than one optimization level."))
+
if args.ddir is not None and (
args.stripdir is not None or args.prependdir is not None
):
@@ -404,7 +430,8 @@ def main():
stripdir=args.stripdir,
prependdir=args.prependdir,
optimize=args.opt_levels,
- limit_sl_dest=args.limit_sl_dest):
+ limit_sl_dest=args.limit_sl_dest,
+ hardlink_dupes=args.hardlink_dupes):
success = False
else:
if not compile_dir(dest, maxlevels, args.ddir,
@@ -414,7 +441,8 @@ def main():
stripdir=args.stripdir,
prependdir=args.prependdir,
optimize=args.opt_levels,
- limit_sl_dest=args.limit_sl_dest):
+ limit_sl_dest=args.limit_sl_dest,
+ hardlink_dupes=args.hardlink_dupes):
success = False
return success
else:
diff --git a/Lib/email/contentmanager.py b/Lib/email/contentmanager.py
index b904ded94c..2b4b8757f4 100644
--- a/Lib/email/contentmanager.py
+++ b/Lib/email/contentmanager.py
@@ -146,13 +146,13 @@ def _encode_text(string, charset, cte, policy):
def normal_body(lines): return b'\n'.join(lines) + b'\n'
if cte==None:
# Use heuristics to decide on the "best" encoding.
- try:
- return '7bit', normal_body(lines).decode('ascii')
- except UnicodeDecodeError:
- pass
- if (policy.cte_type == '8bit' and
- max(len(x) for x in lines) <= policy.max_line_length):
- return '8bit', normal_body(lines).decode('ascii', 'surrogateescape')
+ if max(len(x) for x in lines) <= policy.max_line_length:
+ try:
+ return '7bit', normal_body(lines).decode('ascii')
+ except UnicodeDecodeError:
+ pass
+ if policy.cte_type == '8bit':
+ return '8bit', normal_body(lines).decode('ascii', 'surrogateescape')
sniff = embedded_body(lines[:10])
sniff_qp = quoprimime.body_encode(sniff.decode('latin-1'),
policy.max_line_length)
diff --git a/Lib/gettext.py b/Lib/gettext.py
index b98f501884..77b67aef42 100644
--- a/Lib/gettext.py
+++ b/Lib/gettext.py
@@ -46,7 +46,6 @@ internationalized, to the local language and cultural habits.
# find this format documented anywhere.
-import locale
import os
import re
import sys
@@ -210,6 +209,7 @@ def c2py(plural):
def _expand_lang(loc):
+ import locale
loc = locale.normalize(loc)
COMPONENT_CODESET = 1 << 0
COMPONENT_TERRITORY = 1 << 1
@@ -278,6 +278,7 @@ class NullTranslations:
import warnings
warnings.warn('lgettext() is deprecated, use gettext() instead',
DeprecationWarning, 2)
+ import locale
if self._fallback:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'.*\blgettext\b.*',
@@ -299,6 +300,7 @@ class NullTranslations:
import warnings
warnings.warn('lngettext() is deprecated, use ngettext() instead',
DeprecationWarning, 2)
+ import locale
if self._fallback:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'.*\blngettext\b.*',
@@ -462,6 +464,7 @@ class GNUTranslations(NullTranslations):
import warnings
warnings.warn('lgettext() is deprecated, use gettext() instead',
DeprecationWarning, 2)
+ import locale
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
@@ -476,6 +479,7 @@ class GNUTranslations(NullTranslations):
import warnings
warnings.warn('lngettext() is deprecated, use ngettext() instead',
DeprecationWarning, 2)
+ import locale
try:
tmsg = self._catalog[(msgid1, self.plural(n))]
except KeyError:
@@ -668,6 +672,7 @@ def ldgettext(domain, message):
import warnings
warnings.warn('ldgettext() is deprecated, use dgettext() instead',
DeprecationWarning, 2)
+ import locale
codeset = _localecodesets.get(domain)
try:
with warnings.catch_warnings():
@@ -695,6 +700,7 @@ def ldngettext(domain, msgid1, msgid2, n):
import warnings
warnings.warn('ldngettext() is deprecated, use dngettext() instead',
DeprecationWarning, 2)
+ import locale
codeset = _localecodesets.get(domain)
try:
with warnings.catch_warnings():
diff --git a/Lib/idlelib/zzdummy.py b/Lib/idlelib/zzdummy.py
index 8084499646..3c4b1d23b0 100644
--- a/Lib/idlelib/zzdummy.py
+++ b/Lib/idlelib/zzdummy.py
@@ -28,7 +28,7 @@ class ZzDummy:
text = self.text
text.undo_block_start()
for line in range(1, text.index('end')):
- text.insert('%d.0', ztest)
+ text.insert('%d.0', ztext)
text.undo_block_stop()
return "break"
diff --git a/Lib/linecache.py b/Lib/linecache.py
index ddd0abf2cf..fa5dbd09ea 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -71,10 +71,10 @@ def checkcache(filename=None):
try:
stat = os.stat(fullname)
except OSError:
- del cache[filename]
+ cache.pop(filename, None)
continue
if size != stat.st_size or mtime != stat.st_mtime:
- del cache[filename]
+ cache.pop(filename, None)
def updatecache(filename, module_globals=None):
@@ -84,7 +84,7 @@ def updatecache(filename, module_globals=None):
if filename in cache:
if len(cache[filename]) != 1:
- del cache[filename]
+ cache.pop(filename, None)
if not filename or (filename.startswith('<') and filename.endswith('>')):
return []
diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py
index 039c040ad3..e17bfde2c2 100644
--- a/Lib/test/test__xxsubinterpreters.py
+++ b/Lib/test/test__xxsubinterpreters.py
@@ -1,4 +1,3 @@
-import builtins
from collections import namedtuple
import contextlib
import itertools
@@ -867,11 +866,10 @@ class RunStringTests(TestBase):
yield
if msg is None:
self.assertEqual(str(caught.exception).split(':')[0],
- exctype.__name__)
+ str(exctype))
else:
self.assertEqual(str(caught.exception),
- "{}: {}".format(exctype.__name__, msg))
- self.assertIsInstance(caught.exception.__cause__, exctype)
+ "{}: {}".format(exctype, msg))
def test_invalid_syntax(self):
with self.assert_run_failed(SyntaxError):
@@ -1062,301 +1060,6 @@ class RunStringTests(TestBase):
self.assertEqual(retcode, 0)
-def build_exception(exctype, /, *args, **kwargs):
- # XXX Use __qualname__?
- name = exctype.__name__
- argreprs = [repr(a) for a in args]
- if kwargs:
- kwargreprs = [f'{k}={v!r}' for k, v in kwargs.items()]
- script = f'{name}({", ".join(argreprs)}, {", ".join(kwargreprs)})'
- else:
- script = f'{name}({", ".join(argreprs)})'
- expected = exctype(*args, **kwargs)
- return script, expected
-
-
-def build_exceptions(self, *exctypes, default=None, custom=None, bases=True):
- if not exctypes:
- raise NotImplementedError
- if not default:
- default = ((), {})
- elif isinstance(default, str):
- default = ((default,), {})
- elif type(default) is not tuple:
- raise NotImplementedError
- elif len(default) != 2:
- default = (default, {})
- elif type(default[0]) is not tuple:
- default = (default, {})
- elif type(default[1]) is not dict:
- default = (default, {})
- # else leave it alone
-
- for exctype in exctypes:
- customtype = None
- values = default
- if custom:
- if exctype in custom:
- customtype = exctype
- elif bases:
- for customtype in custom:
- if issubclass(exctype, customtype):
- break
- else:
- customtype = None
- if customtype is not None:
- values = custom[customtype]
- if values is None:
- continue
- args, kwargs = values
- script, expected = build_exception(exctype, *args, **kwargs)
- yield exctype, customtype, script, expected
-
-
-try:
- raise Exception
-except Exception as exc:
- assert exc.__traceback__ is not None
- Traceback = type(exc.__traceback__)
-
-
-class RunFailedTests(TestBase):
-
- BUILTINS = [v
- for v in vars(builtins).values()
- if (type(v) is type
- and issubclass(v, Exception)
- #and issubclass(v, BaseException)
- )
- ]
- BUILTINS_SPECIAL = [
- # These all have extra attributes (i.e. args/kwargs)
- SyntaxError,
- ImportError,
- UnicodeError,
- OSError,
- SystemExit,
- StopIteration,
- ]
-
- @classmethod
- def build_exceptions(cls, exctypes=None, default=(), custom=None):
- if exctypes is None:
- exctypes = cls.BUILTINS
- if custom is None:
- # Skip the "special" ones.
- custom = {et: None for et in cls.BUILTINS_SPECIAL}
- yield from build_exceptions(*exctypes, default=default, custom=custom)
-
- def assertExceptionsEqual(self, exc, expected, *, chained=True):
- if type(expected) is type:
- self.assertIs(type(exc), expected)
- return
- elif not isinstance(exc, Exception):
- self.assertEqual(exc, expected)
- elif not isinstance(expected, Exception):
- self.assertEqual(exc, expected)
- else:
- # Plain equality doesn't work, so we have to compare manually.
- self.assertIs(type(exc), type(expected))
- self.assertEqual(exc.args, expected.args)
- self.assertEqual(exc.__reduce__(), expected.__reduce__())
- if chained:
- self.assertExceptionsEqual(exc.__context__,
- expected.__context__)
- self.assertExceptionsEqual(exc.__cause__,
- expected.__cause__)
- self.assertEqual(exc.__suppress_context__,
- expected.__suppress_context__)
-
- def assertTracebacksEqual(self, tb, expected):
- if not isinstance(tb, Traceback):
- self.assertEqual(tb, expected)
- elif not isinstance(expected, Traceback):
- self.assertEqual(tb, expected)
- else:
- self.assertEqual(tb.tb_frame.f_code.co_name,
- expected.tb_frame.f_code.co_name)
- self.assertEqual(tb.tb_frame.f_code.co_filename,
- expected.tb_frame.f_code.co_filename)
- self.assertEqual(tb.tb_lineno, expected.tb_lineno)
- self.assertTracebacksEqual(tb.tb_next, expected.tb_next)
-
- # XXX Move this to TestBase?
- @contextlib.contextmanager
- def expected_run_failure(self, expected):
- exctype = expected if type(expected) is type else type(expected)
-
- with self.assertRaises(interpreters.RunFailedError) as caught:
- yield caught
- exc = caught.exception
-
- modname = exctype.__module__
- if modname == 'builtins' or modname == '__main__':
- exctypename = exctype.__name__
- else:
- exctypename = f'{modname}.{exctype.__name__}'
- if exctype is expected:
- self.assertEqual(str(exc).split(':')[0], exctypename)
- else:
- self.assertEqual(str(exc), f'{exctypename}: {expected}')
- self.assertExceptionsEqual(exc.__cause__, expected)
- if exc.__cause__ is not None:
- self.assertIsNotNone(exc.__cause__.__traceback__)
-
- def test_builtin_exceptions(self):
- interpid = interpreters.create()
- msg = '<a message>'
- for i, info in enumerate(self.build_exceptions(
- default=msg,
- custom={
- SyntaxError: ((msg, '<stdin>', 1, 3, 'a +?'), {}),
- ImportError: ((msg,), {'name': 'spam', 'path': '/x/spam.py'}),
- UnicodeError: None,
- #UnicodeError: ((), {}),
- #OSError: ((), {}),
- SystemExit: ((1,), {}),
- StopIteration: (('<a value>',), {}),
- },
- )):
- exctype, _, script, expected = info
- testname = f'{i+1} - {script}'
- script = f'raise {script}'
-
- with self.subTest(testname):
- with self.expected_run_failure(expected):
- interpreters.run_string(interpid, script)
-
- def test_custom_exception_from___main__(self):
- script = dedent("""
- class SpamError(Exception):
- def __init__(self, q):
- super().__init__(f'got {q}')
- self.q = q
- raise SpamError('eggs')
- """)
- expected = Exception(f'SpamError: got {"eggs"}')
-
- interpid = interpreters.create()
- with self.assertRaises(interpreters.RunFailedError) as caught:
- interpreters.run_string(interpid, script)
- cause = caught.exception.__cause__
-
- self.assertExceptionsEqual(cause, expected)
-
- class SpamError(Exception):
- # The normal Exception.__reduce__() produces a funny result
- # here. So we have to use a custom __new__().
- def __new__(cls, q):
- if type(q) is SpamError:
- return q
- return super().__new__(cls, q)
- def __init__(self, q):
- super().__init__(f'got {q}')
- self.q = q
-
- def test_custom_exception(self):
- script = dedent("""
- import test.test__xxsubinterpreters
- SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamError
- raise SpamError('eggs')
- """)
- try:
- ns = {}
- exec(script, ns, ns)
- except Exception as exc:
- expected = exc
-
- interpid = interpreters.create()
- with self.expected_run_failure(expected):
- interpreters.run_string(interpid, script)
-
- class SpamReducedError(Exception):
- def __init__(self, q):
- super().__init__(f'got {q}')
- self.q = q
- def __reduce__(self):
- return (type(self), (self.q,), {})
-
- def test_custom___reduce__(self):
- script = dedent("""
- import test.test__xxsubinterpreters
- SpamError = test.test__xxsubinterpreters.RunFailedTests.SpamReducedError
- raise SpamError('eggs')
- """)
- try:
- exec(script, (ns := {'__name__': '__main__'}), ns)
- except Exception as exc:
- expected = exc
-
- interpid = interpreters.create()
- with self.expected_run_failure(expected):
- interpreters.run_string(interpid, script)
-
- def test_traceback_propagated(self):
- script = dedent("""
- def do_spam():
- raise Exception('uh-oh')
- def do_eggs():
- return do_spam()
- class Spam:
- def do(self):
- return do_eggs()
- def get_handler():
- def handler():
- return Spam().do()
- return handler
- go = (lambda: get_handler()())
- def iter_all():
- yield from (go() for _ in [True])
- yield None
- def main():
- for v in iter_all():
- pass
- main()
- """)
- try:
- ns = {}
- exec(script, ns, ns)
- except Exception as exc:
- expected = exc
- expectedtb = exc.__traceback__.tb_next
-
- interpid = interpreters.create()
- with self.expected_run_failure(expected) as caught:
- interpreters.run_string(interpid, script)
- exc = caught.exception
-
- self.assertTracebacksEqual(exc.__cause__.__traceback__,
- expectedtb)
-
- def test_chained_exceptions(self):
- script = dedent("""
- try:
- raise ValueError('msg 1')
- except Exception as exc1:
- try:
- raise TypeError('msg 2')
- except Exception as exc2:
- try:
- raise IndexError('msg 3') from exc2
- except Exception:
- raise AttributeError('msg 4')
- """)
- try:
- exec(script, {}, {})
- except Exception as exc:
- expected = exc
-
- interpid = interpreters.create()
- with self.expected_run_failure(expected) as caught:
- interpreters.run_string(interpid, script)
- exc = caught.exception
-
- # ...just to be sure.
- self.assertIs(type(exc.__cause__), AttributeError)
-
-
##################################
# channel tests
diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py
index 68f3b8cce9..6eb6b46ec8 100644
--- a/Lib/test/test_asyncio/test_tasks.py
+++ b/Lib/test/test_asyncio/test_tasks.py
@@ -466,6 +466,33 @@ class BaseTaskTests:
t = outer()
self.assertEqual(self.loop.run_until_complete(t), 1042)
+ def test_exception_chaining_after_await(self):
+ # Test that when awaiting on a task when an exception is already
+ # active, if the task raises an exception it will be chained
+ # with the original.
+ loop = asyncio.new_event_loop()
+ self.set_event_loop(loop)
+
+ async def raise_error():
+ raise ValueError
+
+ async def run():
+ try:
+ raise KeyError(3)
+ except Exception as exc:
+ task = self.new_task(loop, raise_error())
+ try:
+ await task
+ except Exception as exc:
+ self.assertEqual(type(exc), ValueError)
+ chained = exc.__context__
+ self.assertEqual((type(chained), chained.args),
+ (KeyError, (3,)))
+
+ task = self.new_task(loop, run())
+ loop.run_until_complete(task)
+ loop.close()
+
def test_cancel(self):
def gen():
diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py
index 7267894508..b4061b7935 100644
--- a/Lib/test/test_compileall.py
+++ b/Lib/test/test_compileall.py
@@ -1,16 +1,19 @@
-import sys
import compileall
+import contextlib
+import filecmp
import importlib.util
-import test.test_importlib.util
+import io
+import itertools
import os
import pathlib
import py_compile
import shutil
import struct
+import sys
import tempfile
+import test.test_importlib.util
import time
import unittest
-import io
from unittest import mock, skipUnless
try:
@@ -26,6 +29,24 @@ from .test_py_compile import without_source_date_epoch
from .test_py_compile import SourceDateEpochTestMeta
+def get_pyc(script, opt):
+ if not opt:
+ # Replace None and 0 with ''
+ opt = ''
+ return importlib.util.cache_from_source(script, optimization=opt)
+
+
+def get_pycs(script):
+ return [get_pyc(script, opt) for opt in (0, 1, 2)]
+
+
+def is_hardlink(filename1, filename2):
+ """Returns True if two files have the same inode (hardlink)"""
+ inode1 = os.stat(filename1).st_ino
+ inode2 = os.stat(filename2).st_ino
+ return inode1 == inode2
+
+
class CompileallTestsBase:
def setUp(self):
@@ -825,6 +846,32 @@ class CommandLineTestsBase:
self.assertTrue(os.path.isfile(allowed_bc))
self.assertFalse(os.path.isfile(prohibited_bc))
+ def test_hardlink_bad_args(self):
+ # Bad arguments combination, hardlink deduplication make sense
+ # only for more than one optimization level
+ self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes")
+
+ def test_hardlink(self):
+ # 'a = 0' code produces the same bytecode for the 3 optimization
+ # levels. All three .pyc files must have the same inode (hardlinks).
+ #
+ # If deduplication is disabled, all pyc files must have different
+ # inodes.
+ for dedup in (True, False):
+ with tempfile.TemporaryDirectory() as path:
+ with self.subTest(dedup=dedup):
+ script = script_helper.make_script(path, "script", "a = 0")
+ pycs = get_pycs(script)
+
+ args = ["-q", "-o 0", "-o 1", "-o 2"]
+ if dedup:
+ args.append("--hardlink-dupes")
+ self.assertRunOK(path, *args)
+
+ self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup)
+ self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup)
+ self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup)
+
class CommandLineTestsWithSourceEpoch(CommandLineTestsBase,
unittest.TestCase,
@@ -841,5 +888,176 @@ class CommandLineTestsNoSourceEpoch(CommandLineTestsBase,
+class HardlinkDedupTestsBase:
+ # Test hardlink_dupes parameter of compileall.compile_dir()
+
+ def setUp(self):
+ self.path = None
+
+ @contextlib.contextmanager
+ def temporary_directory(self):
+ with tempfile.TemporaryDirectory() as path:
+ self.path = path
+ yield path
+ self.path = None
+
+ def make_script(self, code, name="script"):
+ return script_helper.make_script(self.path, name, code)
+
+ def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False):
+ compileall.compile_dir(self.path, quiet=True, optimize=optimize,
+ hardlink_dupes=dedup, force=force)
+
+ def test_bad_args(self):
+ # Bad arguments combination, hardlink deduplication make sense
+ # only for more than one optimization level
+ with self.temporary_directory():
+ self.make_script("pass")
+ with self.assertRaises(ValueError):
+ compileall.compile_dir(self.path, quiet=True, optimize=0,
+ hardlink_dupes=True)
+ with self.assertRaises(ValueError):
+ # same optimization level specified twice:
+ # compile_dir() removes duplicates
+ compileall.compile_dir(self.path, quiet=True, optimize=[0, 0],
+ hardlink_dupes=True)
+
+ def create_code(self, docstring=False, assertion=False):
+ lines = []
+ if docstring:
+ lines.append("'module docstring'")
+ lines.append('x = 1')
+ if assertion:
+ lines.append("assert x == 1")
+ return '\n'.join(lines)
+
+ def iter_codes(self):
+ for docstring in (False, True):
+ for assertion in (False, True):
+ code = self.create_code(docstring=docstring, assertion=assertion)
+ yield (code, docstring, assertion)
+
+ def test_disabled(self):
+ # Deduplication disabled, no hardlinks
+ for code, docstring, assertion in self.iter_codes():
+ with self.subTest(docstring=docstring, assertion=assertion):
+ with self.temporary_directory():
+ script = self.make_script(code)
+ pycs = get_pycs(script)
+ self.compile_dir(dedup=False)
+ self.assertFalse(is_hardlink(pycs[0], pycs[1]))
+ self.assertFalse(is_hardlink(pycs[0], pycs[2]))
+ self.assertFalse(is_hardlink(pycs[1], pycs[2]))
+
+ def check_hardlinks(self, script, docstring=False, assertion=False):
+ pycs = get_pycs(script)
+ self.assertEqual(is_hardlink(pycs[0], pycs[1]),
+ not assertion)
+ self.assertEqual(is_hardlink(pycs[0], pycs[2]),
+ not assertion and not docstring)
+ self.assertEqual(is_hardlink(pycs[1], pycs[2]),
+ not docstring)
+
+ def test_hardlink(self):
+ # Test deduplication on all combinations
+ for code, docstring, assertion in self.iter_codes():
+ with self.subTest(docstring=docstring, assertion=assertion):
+ with self.temporary_directory():
+ script = self.make_script(code)
+ self.compile_dir()
+ self.check_hardlinks(script, docstring, assertion)
+
+ def test_only_two_levels(self):
+ # Don't build the 3 optimization levels, but only 2
+ for opts in ((0, 1), (1, 2), (0, 2)):
+ with self.subTest(opts=opts):
+ with self.temporary_directory():
+ # code with no dostring and no assertion:
+ # same bytecode for all optimization levels
+ script = self.make_script(self.create_code())
+ self.compile_dir(optimize=opts)
+ pyc1 = get_pyc(script, opts[0])
+ pyc2 = get_pyc(script, opts[1])
+ self.assertTrue(is_hardlink(pyc1, pyc2))
+
+ def test_duplicated_levels(self):
+ # compile_dir() must not fail if optimize contains duplicated
+ # optimization levels and/or if optimization levels are not sorted.
+ with self.temporary_directory():
+ # code with no dostring and no assertion:
+ # same bytecode for all optimization levels
+ script = self.make_script(self.create_code())
+ self.compile_dir(optimize=[1, 0, 1, 0])
+ pyc1 = get_pyc(script, 0)
+ pyc2 = get_pyc(script, 1)
+ self.assertTrue(is_hardlink(pyc1, pyc2))
+
+ def test_recompilation(self):
+ # Test compile_dir() when pyc files already exists and the script
+ # content changed
+ with self.temporary_directory():
+ script = self.make_script("a = 0")
+ self.compile_dir()
+ # All three levels have the same inode
+ self.check_hardlinks(script)
+
+ pycs = get_pycs(script)
+ inode = os.stat(pycs[0]).st_ino
+
+ # Change of the module content
+ script = self.make_script("print(0)")
+
+ # Recompilation without -o 1
+ self.compile_dir(optimize=[0, 2], force=True)
+
+ # opt-1.pyc should have the same inode as before and others should not
+ self.assertEqual(inode, os.stat(pycs[1]).st_ino)
+ self.assertTrue(is_hardlink(pycs[0], pycs[2]))
+ self.assertNotEqual(inode, os.stat(pycs[2]).st_ino)
+ # opt-1.pyc and opt-2.pyc have different content
+ self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True))
+
+ def test_import(self):
+ # Test that import updates a single pyc file when pyc files already
+ # exists and the script content changed
+ with self.temporary_directory():
+ script = self.make_script(self.create_code(), name="module")
+ self.compile_dir()
+ # All three levels have the same inode
+ self.check_hardlinks(script)
+
+ pycs = get_pycs(script)
+ inode = os.stat(pycs[0]).st_ino
+
+ # Change of the module content
+ script = self.make_script("print(0)", name="module")
+
+ # Import the module in Python with -O (optimization level 1)
+ script_helper.assert_python_ok(
+ "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path
+ )
+
+ # Only opt-1.pyc is changed
+ self.assertEqual(inode, os.stat(pycs[0]).st_ino)
+ self.assertEqual(inode, os.stat(pycs[2]).st_ino)
+ self.assertFalse(is_hardlink(pycs[1], pycs[2]))
+ # opt-1.pyc and opt-2.pyc have different content
+ self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True))
+
+
+class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase,
+ unittest.TestCase,
+ metaclass=SourceDateEpochTestMeta,
+ source_date_epoch=True):
+ pass
+
+
+class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase,
+ unittest.TestCase,
+ metaclass=SourceDateEpochTestMeta,
+ source_date_epoch=False):
+ pass
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_dictcomps.py b/Lib/test/test_dictcomps.py
index 16aa651b93..472e3dfa0d 100644
--- a/Lib/test/test_dictcomps.py
+++ b/Lib/test/test_dictcomps.py
@@ -77,7 +77,7 @@ class DictComprehensionTest(unittest.TestCase):
compile("{x: y for y, x in ((1, 2), (3, 4))} = 5", "<test>",
"exec")
- with self.assertRaisesRegex(SyntaxError, "cannot assign"):
+ with self.assertRaisesRegex(SyntaxError, "illegal expression"):
compile("{x: y for y, x in ((1, 2), (3, 4))} += 5", "<test>",
"exec")
diff --git a/Lib/test/test_email/test_contentmanager.py b/Lib/test/test_email/test_contentmanager.py
index 169058eac8..64dca2d017 100644
--- a/Lib/test/test_email/test_contentmanager.py
+++ b/Lib/test/test_email/test_contentmanager.py
@@ -329,6 +329,21 @@ class TestRawDataManager(TestEmailBase):
self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content)
self.assertEqual(m.get_content(), content)
+ def test_set_text_plain_long_line_heuristics(self):
+ m = self._make_message()
+ content = ("Simple but long message that is over 78 characters"
+ " long to force transfer encoding.\n")
+ raw_data_manager.set_content(m, content)
+ self.assertEqual(str(m), textwrap.dedent("""\
+ Content-Type: text/plain; charset="utf-8"
+ Content-Transfer-Encoding: quoted-printable
+
+ Simple but long message that is over 78 characters long to =
+ force transfer encoding.
+ """))
+ self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content)
+ self.assertEqual(m.get_content(), content)
+
def test_set_text_short_line_minimal_non_ascii_heuristics(self):
m = self._make_message()
content = "et là il est monté sur moi et il commence à m'éto.\n"
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index b689ec7aed..efd77fdbaa 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -228,6 +228,8 @@ class ExceptionTests(unittest.TestCase):
def baz():
'''quux'''
""", 9, 20)
+ check("pass\npass\npass\n(1+)\npass\npass\npass", 4, 4)
+ check("(1+)", 1, 4)
# Errors thrown by symtable.c
check('x = [(yield i) for i in range(3)]', 1, 5)
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index e047801199..348ae15aa6 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -318,7 +318,7 @@ class ExceptionTest(unittest.TestCase):
class GeneratorThrowTest(unittest.TestCase):
- def test_exception_context_set(self):
+ def test_exception_context_with_yield(self):
def f():
try:
raise KeyError('a')
@@ -332,6 +332,23 @@ class GeneratorThrowTest(unittest.TestCase):
context = cm.exception.__context__
self.assertEqual((type(context), context.args), (KeyError, ('a',)))
+ def test_exception_context_with_yield_from(self):
+ def f():
+ yield
+
+ def g():
+ try:
+ raise KeyError('a')
+ except Exception:
+ yield from f()
+
+ gen = g()
+ gen.send(None)
+ with self.assertRaises(ValueError) as cm:
+ gen.throw(ValueError)
+ context = cm.exception.__context__
+ self.assertEqual((type(context), context.args), (KeyError, ('a',)))
+
def test_throw_after_none_exc_type(self):
def g():
try:
@@ -1904,7 +1921,7 @@ SyntaxError: cannot assign to yield expression
>>> def f(): (yield bar) += y
Traceback (most recent call last):
...
-SyntaxError: cannot assign to yield expression
+SyntaxError: 'yield expression' is an illegal expression for augmented assignment
Now check some throw() conditions:
diff --git a/Lib/test/test_genexps.py b/Lib/test/test_genexps.py
index 86e4e195f5..5c1a209b0e 100644
--- a/Lib/test/test_genexps.py
+++ b/Lib/test/test_genexps.py
@@ -158,7 +158,7 @@ Verify that syntax error's are raised for genexps used as lvalues
>>> (y for y in (1,2)) += 10
Traceback (most recent call last):
...
- SyntaxError: cannot assign to generator expression
+ SyntaxError: 'generator expression' is an illegal expression for augmented assignment
########### Tests borrowed from or inspired by test_generators.py ############
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index 02ba8a8b15..e1a402e2b4 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -1279,7 +1279,7 @@ class GrammarTests(unittest.TestCase):
def test_try(self):
### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
### | 'try' ':' suite 'finally' ':' suite
- ### except_clause: 'except' [expr ['as' expr]]
+ ### except_clause: 'except' [expr ['as' NAME]]
try:
1/0
except ZeroDivisionError:
@@ -1297,6 +1297,9 @@ class GrammarTests(unittest.TestCase):
except (EOFError, TypeError, ZeroDivisionError) as msg: pass
try: pass
finally: pass
+ with self.assertRaises(SyntaxError):
+ compile("try:\n pass\nexcept Exception as a.b:\n pass", "?", "exec")
+ compile("try:\n pass\nexcept Exception as a[b]:\n pass", "?", "exec")
def test_suite(self):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
diff --git a/Lib/test/test_json/test_recursion.py b/Lib/test/test_json/test_recursion.py
index 877dc448b1..543c62839b 100644
--- a/Lib/test/test_json/test_recursion.py
+++ b/Lib/test/test_json/test_recursion.py
@@ -52,7 +52,7 @@ class TestRecursion:
return [JSONTestObject]
else:
return 'JSONTestObject'
- return pyjson.JSONEncoder.default(o)
+ return self.json.JSONEncoder.default(o)
enc = RecursiveJSONEncoder()
self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"')
diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py
index df2d46d882..9614e45799 100644
--- a/Lib/test/test_peg_parser.py
+++ b/Lib/test/test_peg_parser.py
@@ -35,6 +35,9 @@ TEST_CASES = [
('attribute_simple', 'a.b'),
('attributes_subscript', 'a.b[0]'),
('augmented_assignment', 'x += 42'),
+ ('augmented_assignment_attribute', 'a.b.c += 42'),
+ ('augmented_assignment_paren', '(x) += 42'),
+ ('augmented_assignment_paren_subscript', '(x[0]) -= 42'),
('binop_add', '1 + 1'),
('binop_add_multiple', '1 + 1 + 1 + 1'),
('binop_all', '1 + 2 * 5 + 3 ** 2 - -3'),
@@ -547,6 +550,11 @@ TEST_CASES = [
with a as (x, y):
pass
'''),
+ ('with_list_target',
+ '''
+ with a as [x, y]:
+ pass
+ '''),
('yield', 'yield'),
('yield_expr', 'yield a'),
('yield_from', 'yield from a'),
@@ -560,6 +568,9 @@ FAIL_TEST_CASES = [
("annotation_tuple", "(a,): int"),
("annotation_tuple_without_paren", "a,: int"),
("assignment_keyword", "a = if"),
+ ("augmented_assignment_list", "[a, b] += 1"),
+ ("augmented_assignment_tuple", "a, b += 1"),
+ ("augmented_assignment_tuple_paren", "(a, b) += (1, 2)"),
("comprehension_lambda", "(a for a in lambda: b)"),
("comprehension_else", "(a for a in b if c else d"),
("del_call", "del a()"),
@@ -589,6 +600,20 @@ FAIL_TEST_CASES = [
a
"""),
("not_terminated_string", "a = 'example"),
+ ("try_except_attribute_target",
+ """
+ try:
+ pass
+ except Exception as a.b:
+ pass
+ """),
+ ("try_except_subscript_target",
+ """
+ try:
+ pass
+ except Exception as a[0]:
+ pass
+ """),
]
FAIL_SPECIALIZED_MESSAGE_CASES = [
@@ -600,7 +625,7 @@ FAIL_SPECIALIZED_MESSAGE_CASES = [
("(a, b): int", "only single target (not tuple) can be annotated"),
("[a, b]: int", "only single target (not list) can be annotated"),
("a(): int", "illegal target for annotation"),
- ("1 += 1", "cannot assign to literal"),
+ ("1 += 1", "'literal' is an illegal expression for augmented assignment"),
("pass\n pass", "unexpected indent"),
("def f():\npass", "expected an indented block"),
("def f(*): pass", "named arguments must follow bare *"),
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index 06636ae8a1..60c7d9fd38 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -100,35 +100,54 @@ expression inside that contain should still cause a syntax error.
This test just checks a couple of cases rather than enumerating all of
them.
-# All of the following also produce different error messages with pegen
-# >>> (a, "b", c) = (1, 2, 3)
-# Traceback (most recent call last):
-# SyntaxError: cannot assign to literal
+>>> (a, "b", c) = (1, 2, 3)
+Traceback (most recent call last):
+SyntaxError: cannot assign to literal
-# >>> (a, True, c) = (1, 2, 3)
-# Traceback (most recent call last):
-# SyntaxError: cannot assign to True
+>>> (a, True, c) = (1, 2, 3)
+Traceback (most recent call last):
+SyntaxError: cannot assign to True
>>> (a, __debug__, c) = (1, 2, 3)
Traceback (most recent call last):
SyntaxError: cannot assign to __debug__
-# >>> (a, *True, c) = (1, 2, 3)
-# Traceback (most recent call last):
-# SyntaxError: cannot assign to True
+>>> (a, *True, c) = (1, 2, 3)
+Traceback (most recent call last):
+SyntaxError: cannot assign to True
>>> (a, *__debug__, c) = (1, 2, 3)
Traceback (most recent call last):
SyntaxError: cannot assign to __debug__
-# >>> [a, b, c + 1] = [1, 2, 3]
-# Traceback (most recent call last):
-# SyntaxError: cannot assign to operator
+>>> [a, b, c + 1] = [1, 2, 3]
+Traceback (most recent call last):
+SyntaxError: cannot assign to operator
+
+>>> [a, b[1], c + 1] = [1, 2, 3]
+Traceback (most recent call last):
+SyntaxError: cannot assign to operator
+
+>>> [a, b.c.d, c + 1] = [1, 2, 3]
+Traceback (most recent call last):
+SyntaxError: cannot assign to operator
>>> a if 1 else b = 1
Traceback (most recent call last):
SyntaxError: cannot assign to conditional expression
+>>> a, b += 1, 2
+Traceback (most recent call last):
+SyntaxError: 'tuple' is an illegal expression for augmented assignment
+
+>>> (a, b) += 1, 2
+Traceback (most recent call last):
+SyntaxError: 'tuple' is an illegal expression for augmented assignment
+
+>>> [a, b] += 1, 2
+Traceback (most recent call last):
+SyntaxError: 'list' is an illegal expression for augmented assignment
+
From compiler_complex_args():
>>> def f(None=1):
@@ -334,16 +353,16 @@ More set_context():
>>> (x for x in x) += 1
Traceback (most recent call last):
-SyntaxError: cannot assign to generator expression
+SyntaxError: 'generator expression' is an illegal expression for augmented assignment
>>> None += 1
Traceback (most recent call last):
-SyntaxError: cannot assign to None
+SyntaxError: 'None' is an illegal expression for augmented assignment
>>> __debug__ += 1
Traceback (most recent call last):
SyntaxError: cannot assign to __debug__
>>> f() += 1
Traceback (most recent call last):
-SyntaxError: cannot assign to function call
+SyntaxError: 'function call' is an illegal expression for augmented assignment
Test continue in finally in weird combinations.
@@ -676,6 +695,7 @@ class SyntaxTestCase(unittest.TestCase):
def test_assign_call(self):
self._check_error("f() = 1", "assign")
+ @unittest.skipIf(support.use_old_parser(), "The old parser cannot generate these error messages")
def test_assign_del(self):
self._check_error("del (,)", "invalid syntax")
self._check_error("del 1", "delete literal")
diff --git a/Misc/ACKS b/Misc/ACKS
index f744de6b1f..b479aa5d80 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -86,6 +86,7 @@ Marcin Bachry
Alfonso Baciero
Dwayne Bailey
Stig Bakken
+Lumír Balhar
Aleksandr Balezin
Greg Ball
Lewis Ball
diff --git a/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst
new file mode 100644
index 0000000000..a08c3da566
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst
@@ -0,0 +1 @@
+Remove the ``_PyUnicode_ClearStaticStrings()`` function from the C API.
diff --git a/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst
new file mode 100644
index 0000000000..d3049b05a7
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst
@@ -0,0 +1,2 @@
+:mod:`compileall` is now able to use hardlinks to prevent duplicates in a
+case when ``.pyc`` files for different optimization levels have the same content.
diff --git a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst
new file mode 100644
index 0000000000..1b9fe609c2
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst
@@ -0,0 +1 @@
+If text content lines are longer than policy.max_line_length, always use a content-encoding to make sure they are wrapped.
diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c
index 1abc9ca6f2..f2c72ebd51 100644
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -1007,7 +1007,7 @@ io_check_errors(PyObject *errors)
/* Avoid calling PyCodec_LookupError() before the codec registry is ready:
before_PyUnicode_InitEncodings() is called. */
- if (!interp->fs_codec.encoding) {
+ if (!interp->unicode.fs_codec.encoding) {
return 0;
}
diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c
index 1b7563cb20..5f217dcb89 100644
--- a/Modules/_testinternalcapi.c
+++ b/Modules/_testinternalcapi.c
@@ -14,6 +14,7 @@
#include "Python.h"
#include "pycore_byteswap.h" // _Py_bswap32()
#include "pycore_initconfig.h" // _Py_GetConfigsAsDict()
+#include "pycore_hashtable.h" // _Py_hashtable_new()
#include "pycore_gc.h" // PyGC_Head
@@ -62,10 +63,101 @@ test_bswap(PyObject *self, PyObject *Py_UNUSED(args))
}
+#define TO_PTR(ch) ((void*)(uintptr_t)ch)
+#define FROM_PTR(ptr) ((uintptr_t)ptr)
+#define VALUE(key) (1 + ((int)(key) - 'a'))
+
+static Py_uhash_t
+hash_char(const void *key)
+{
+ char ch = (char)FROM_PTR(key);
+ return ch;
+}
+
+
+static int
+hashtable_cb(_Py_hashtable_t *table,
+ const void *key_ptr, const void *value_ptr,
+ void *user_data)
+{
+ int *count = (int *)user_data;
+ char key = (char)FROM_PTR(key_ptr);
+ int value = (int)FROM_PTR(value_ptr);
+ assert(value == VALUE(key));
+ *count += 1;
+ return 0;
+}
+
+
+static PyObject*
+test_hashtable(PyObject *self, PyObject *Py_UNUSED(args))
+{
+ _Py_hashtable_t *table = _Py_hashtable_new(hash_char,
+ _Py_hashtable_compare_direct);
+ if (table == NULL) {
+ return PyErr_NoMemory();
+ }
+
+ // Using an newly allocated table must not crash
+ assert(table->nentries == 0);
+ assert(table->nbuckets > 0);
+ assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL);
+
+ // Test _Py_hashtable_set()
+ char key;
+ for (key='a'; key <= 'z'; key++) {
+ int value = VALUE(key);
+ if (_Py_hashtable_set(table, TO_PTR(key), TO_PTR(value)) < 0) {
+ _Py_hashtable_destroy(table);
+ return PyErr_NoMemory();
+ }
+ }
+ assert(table->nentries == 26);
+ assert(table->nbuckets > table->nentries);
+
+ // Test _Py_hashtable_get_entry()
+ for (key='a'; key <= 'z'; key++) {
+ _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry(table, TO_PTR(key));
+ assert(entry != NULL);
+ assert(entry->key = TO_PTR(key));
+ assert(entry->value = TO_PTR(VALUE(key)));
+ }
+
+ // Test _Py_hashtable_get()
+ for (key='a'; key <= 'z'; key++) {
+ void *value_ptr = _Py_hashtable_get(table, TO_PTR(key));
+ assert((int)FROM_PTR(value_ptr) == VALUE(key));
+ }
+
+ // Test _Py_hashtable_steal()
+ key = 'p';
+ void *value_ptr = _Py_hashtable_steal(table, TO_PTR(key));
+ assert((int)FROM_PTR(value_ptr) == VALUE(key));
+ assert(table->nentries == 25);
+ assert(_Py_hashtable_get_entry(table, TO_PTR(key)) == NULL);
+
+ // Test _Py_hashtable_foreach()
+ int count = 0;
+ int res = _Py_hashtable_foreach(table, hashtable_cb, &count);
+ assert(res == 0);
+ assert(count == 25);
+
+ // Test _Py_hashtable_clear()
+ _Py_hashtable_clear(table);
+ assert(table->nentries == 0);
+ assert(table->nbuckets > 0);
+ assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL);
+
+ _Py_hashtable_destroy(table);
+ Py_RETURN_NONE;
+}
+
+
static PyMethodDef TestMethods[] = {
{"get_configs", get_configs, METH_NOARGS},
{"get_recursion_depth", get_recursion_depth, METH_NOARGS},
{"test_bswap", test_bswap, METH_NOARGS},
+ {"test_hashtable", test_hashtable, METH_NOARGS},
{NULL, NULL} /* sentinel */
};
diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c
index 18dd8918e7..8a6fce9e0b 100644
--- a/Modules/_xxsubinterpretersmodule.c
+++ b/Modules/_xxsubinterpretersmodule.c
@@ -1,4 +1,5 @@
-/* _interpreters module */
+
+/* interpreters module */
/* low-level access to interpreter primitives */
#include "Python.h"
@@ -6,927 +7,35 @@
#include "interpreteridobject.h"
-// XXX Emit a warning?
-#define IGNORE_FAILURE(msg) \
- fprintf(stderr, " -----\nRunFailedError: %s\n", msg); \
- PyErr_PrintEx(0); \
- fprintf(stderr, " -----\n"); \
- PyErr_Clear();
-
-typedef void (*_deallocfunc)(void *);
-
-static PyInterpreterState *
-_get_current(void)
-{
- // _PyInterpreterState_Get() aborts if lookup fails, so don't need
- // to check the result for NULL.
- return _PyInterpreterState_Get();
-}
-
-
-/* string utils *************************************************************/
-
-// PyMem_Free() must be used to dealocate the resulting string.
static char *
-_strdup_and_size(const char *data, Py_ssize_t *psize, _deallocfunc *dealloc)
-{
- if (data == NULL) {
- if (psize != NULL) {
- *psize = 0;
- }
- if (dealloc != NULL) {
- *dealloc = NULL;
- }
- return "";
- }
-
- Py_ssize_t size;
- if (psize == NULL) {
- size = strlen(data);
- } else {
- size = *psize;
- if (size == 0) {
- size = strlen(data);
- *psize = size; // The size "return" value.
- }
- }
- char *copied = PyMem_Malloc(size+1);
- if (copied == NULL) {
- PyErr_NoMemory();
- return NULL;
- }
- if (dealloc != NULL) {
- *dealloc = PyMem_Free;
- }
- memcpy(copied, data, size+1);
- return copied;
-}
-
-static const char *
-_pyobj_get_str_and_size(PyObject *obj, Py_ssize_t *psize)
-{
- if (PyUnicode_Check(obj)) {
- return PyUnicode_AsUTF8AndSize(obj, psize);
- } else {
- const char *data = NULL;
- PyBytes_AsStringAndSize(obj, (char **)&data, psize);
- return data;
- }
-}
-
-/* "raw" strings */
-
-typedef struct _rawstring {
- Py_ssize_t size;
- const char *data;
- _deallocfunc dealloc;
-} _rawstring;
-
-static void
-_rawstring_init(_rawstring *raw)
-{
- raw->size = 0;
- raw->data = NULL;
- raw->dealloc = NULL;
-}
-
-static _rawstring *
-_rawstring_new(void)
-{
- _rawstring *raw = PyMem_NEW(_rawstring, 1);
- if (raw == NULL) {
- PyErr_NoMemory();
- return NULL;
- }
- _rawstring_init(raw);
- return raw;
-}
-
-static void
-_rawstring_clear(_rawstring *raw)
-{
- if (raw->data != NULL && raw->dealloc != NULL) {
- (*raw->dealloc)((void *)raw->data);
- }
- _rawstring_init(raw);
-}
-
-static void
-_rawstring_free(_rawstring *raw)
-{
- _rawstring_clear(raw);
- PyMem_Free(raw);
-}
-
-static int
-_rawstring_is_clear(_rawstring *raw)
-{
- return raw->size == 0 && raw->data == NULL && raw->dealloc == NULL;
-}
-
-//static void
-//_rawstring_move(_rawstring *raw, _rawstring *src)
-//{
-// raw->size = src->size;
-// raw->data = src->data;
-// raw->dealloc = src->dealloc;
-// _rawstring_init(src);
-//}
-
-static void
-_rawstring_proxy(_rawstring *raw, const char *str)
+_copy_raw_string(PyObject *strobj)
{
+ const char *str = PyUnicode_AsUTF8(strobj);
if (str == NULL) {
- str = "";
- }
- raw->size = strlen(str);
- raw->data = str;
- raw->dealloc = NULL;
-}
-
-static int
-_rawstring_buffer(_rawstring *raw, Py_ssize_t size)
-{
- raw->data = PyMem_Malloc(size+1);
- if (raw->data == NULL) {
- PyErr_NoMemory();
- return -1;
- }
- raw->size = size;
- raw->dealloc = PyMem_Free;
- return 0;
-}
-
-static int
-_rawstring_strcpy(_rawstring *raw, const char *str, Py_ssize_t size)
-{
- _deallocfunc dealloc = NULL;
- const char *copied = _strdup_and_size(str, &size, &dealloc);
- if (copied == NULL) {
- return -1;
- }
-
- raw->size = size;
- raw->dealloc = dealloc;
- raw->data = copied;
- return 0;
-}
-
-static int
-_rawstring_from_pyobj(_rawstring *raw, PyObject *obj)
-{
- Py_ssize_t size = 0;
- const char *data = _pyobj_get_str_and_size(obj, &size);
- if (PyErr_Occurred()) {
- return -1;
- }
- if (_rawstring_strcpy(raw, data, size) != 0) {
- return -1;
- }
- return 0;
-}
-
-static int
-_rawstring_from_pyobj_attr(_rawstring *raw, PyObject *obj, const char *attr)
-{
- int res = -1;
- PyObject *valueobj = PyObject_GetAttrString(obj, attr);
- if (valueobj == NULL) {
- goto done;
- }
- if (!PyUnicode_Check(valueobj)) {
- // XXX PyObject_Str()? Repr()?
- goto done;
- }
- const char *valuestr = PyUnicode_AsUTF8(valueobj);
- if (valuestr == NULL) {
- if (PyErr_Occurred()) {
- goto done;
- }
- } else if (_rawstring_strcpy(raw, valuestr, 0) != 0) {
- _rawstring_clear(raw);
- goto done;
- }
- res = 0;
-
-done:
- Py_XDECREF(valueobj);
- return res;
-}
-
-static PyObject *
-_rawstring_as_pybytes(_rawstring *raw)
-{
- return PyBytes_FromStringAndSize(raw->data, raw->size);
-}
-
-
-/* object utils *************************************************************/
-
-static void
-_pyobj_identify_type(PyObject *obj, _rawstring *modname, _rawstring *clsname)
-{
- PyObject *objtype = (PyObject *)Py_TYPE(obj);
-
- // Try __module__ and __name__.
- if (_rawstring_from_pyobj_attr(modname, objtype, "__module__") != 0) {
- // Fall back to the previous values in "modname".
- IGNORE_FAILURE("bad __module__");
- }
- if (_rawstring_from_pyobj_attr(clsname, objtype, "__name__") != 0) {
- // Fall back to the previous values in "clsname".
- IGNORE_FAILURE("bad __name__");
- }
-
- // XXX Fall back to __qualname__?
- // XXX Fall back to tp_name?
-}
-
-static PyObject *
-_pyobj_get_class(const char *modname, const char *clsname)
-{
- assert(clsname != NULL);
- if (modname == NULL) {
- modname = "builtins";
- }
-
- PyObject *module = PyImport_ImportModule(modname);
- if (module == NULL) {
- return NULL;
- }
- PyObject *cls = PyObject_GetAttrString(module, clsname);
- Py_DECREF(module);
- return cls;
-}
-
-static PyObject *
-_pyobj_create(const char *modname, const char *clsname, PyObject *arg)
-{
- PyObject *cls = _pyobj_get_class(modname, clsname);
- if (cls == NULL) {
return NULL;
}
- PyObject *obj = NULL;
- if (arg == NULL) {
- obj = _PyObject_CallNoArg(cls);
- } else {
- obj = PyObject_CallFunction(cls, "O", arg);
- }
- Py_DECREF(cls);
- return obj;
-}
-
-
-/* object snapshots */
-
-typedef struct _objsnapshot {
- // If modname is NULL then try "builtins" and "__main__".
- _rawstring modname;
- // clsname is required.
- _rawstring clsname;
-
- // The rest are optional.
-
- // The serialized exception.
- _rawstring *serialized;
-} _objsnapshot;
-
-static void
-_objsnapshot_init(_objsnapshot *osn)
-{
- _rawstring_init(&osn->modname);
- _rawstring_init(&osn->clsname);
- osn->serialized = NULL;
-}
-
-//static _objsnapshot *
-//_objsnapshot_new(void)
-//{
-// _objsnapshot *osn = PyMem_NEW(_objsnapshot, 1);
-// if (osn == NULL) {
-// PyErr_NoMemory();
-// return NULL;
-// }
-// _objsnapshot_init(osn);
-// return osn;
-//}
-
-static void
-_objsnapshot_clear(_objsnapshot *osn)
-{
- _rawstring_clear(&osn->modname);
- _rawstring_clear(&osn->clsname);
- if (osn->serialized != NULL) {
- _rawstring_free(osn->serialized);
- osn->serialized = NULL;
- }
-}
-
-//static void
-//_objsnapshot_free(_objsnapshot *osn)
-//{
-// _objsnapshot_clear(osn);
-// PyMem_Free(osn);
-//}
-
-#ifndef NDEBUG
-static int
-_objsnapshot_is_clear(_objsnapshot *osn)
-{
- return osn->serialized == NULL
- && _rawstring_is_clear(&osn->modname)
- && _rawstring_is_clear(&osn->clsname);
-}
-#endif
-
-static void
-_objsnapshot_summarize(_objsnapshot *osn, _rawstring *rawbuf, const char *msg)
-{
- if (msg == NULL || *msg == '\0') {
- // XXX Keep it NULL?
- // XXX Keep it an empty string?
- // XXX Use something more informative?
- msg = "<no message>";
- }
- const char *clsname = osn->clsname.data;
- const char *modname = osn->modname.data;
- if (modname && *modname == '\0') {
- modname = NULL;
- }
-
- // Prep the buffer.
- Py_ssize_t size = strlen(clsname);
- if (modname != NULL) {
- if (strcmp(modname, "builtins") == 0) {
- modname = NULL;
- } else if (strcmp(modname, "__main__") == 0) {
- modname = NULL;
- } else {
- size += strlen(modname) + 1;
- }
- }
- if (msg != NULL) {
- size += strlen(": ") + strlen(msg);
- }
- if (modname != NULL || msg != NULL) {
- if (_rawstring_buffer(rawbuf, size) != 0) {
- IGNORE_FAILURE("could not summarize object snapshot");
- return;
- }
- }
- // ...else we'll proxy clsname as-is, so no need to allocate a buffer.
-
- // XXX Use __qualname__ somehow?
- char *buf = (char *)rawbuf->data;
- if (modname != NULL) {
- if (msg != NULL) {
- snprintf(buf, size+1, "%s.%s: %s", modname, clsname, msg);
- } else {
- snprintf(buf, size+1, "%s.%s", modname, clsname);
- }
- } else if (msg != NULL) {
- snprintf(buf, size+1, "%s: %s", clsname, msg);
- } else {
- _rawstring_proxy(rawbuf, clsname);
- }
-}
-
-static _rawstring *
-_objsnapshot_get_minimal_summary(_objsnapshot *osn, PyObject *obj)
-{
- const char *str = NULL;
- PyObject *objstr = PyObject_Str(obj);
- if (objstr == NULL) {
- PyErr_Clear();
- } else {
- str = PyUnicode_AsUTF8(objstr);
- if (str == NULL) {
- PyErr_Clear();
- }
- }
-
- _rawstring *summary = _rawstring_new();
- if (summary == NULL) {
- return NULL;
- }
- _objsnapshot_summarize(osn, summary, str);
- return summary;
-}
-
-static void
-_objsnapshot_extract(_objsnapshot *osn, PyObject *obj)
-{
- assert(_objsnapshot_is_clear(osn));
-
- // Get the "qualname".
- _rawstring_proxy(&osn->modname, "<unknown>");
- _rawstring_proxy(&osn->clsname, "<unknown>");
- _pyobj_identify_type(obj, &osn->modname, &osn->clsname);
-
- // Serialize the object.
- // XXX Use marshal?
- PyObject *pickle = PyImport_ImportModule("pickle");
- if (pickle == NULL) {
- IGNORE_FAILURE("could not serialize object: pickle import failed");
- return;
- }
- PyObject *objdata = PyObject_CallMethod(pickle, "dumps", "(O)", obj);
- Py_DECREF(pickle);
- if (objdata == NULL) {
- IGNORE_FAILURE("could not serialize object: pickle.dumps failed");
- } else {
- _rawstring *serialized = _rawstring_new();
- int res = _rawstring_from_pyobj(serialized, objdata);
- Py_DECREF(objdata);
- if (res != 0) {
- IGNORE_FAILURE("could not serialize object: raw str failed");
- _rawstring_free(serialized);
- } else if (serialized->size == 0) {
- _rawstring_free(serialized);
- } else {
- osn->serialized = serialized;
- }
- }
-}
-
-static PyObject *
-_objsnapshot_resolve_serialized(_objsnapshot *osn)
-{
- assert(osn->serialized != NULL);
-
- // XXX Use marshal?
- PyObject *pickle = PyImport_ImportModule("pickle");
- if (pickle == NULL) {
- return NULL;
- }
- PyObject *objdata = _rawstring_as_pybytes(osn->serialized);
- if (objdata == NULL) {
- return NULL;
- } else {
- PyObject *obj = PyObject_CallMethod(pickle, "loads", "O", objdata);
- Py_DECREF(objdata);
- return obj;
- }
-}
-
-static PyObject *
-_objsnapshot_resolve_naive(_objsnapshot *osn, PyObject *arg)
-{
- if (_rawstring_is_clear(&osn->clsname)) {
- // We can't proceed without at least the class name.
- PyErr_SetString(PyExc_ValueError, "missing class name");
- return NULL;
- }
-
- if (osn->modname.data != NULL) {
- return _pyobj_create(osn->modname.data, osn->clsname.data, arg);
- } else {
- PyObject *obj = _pyobj_create("builtins", osn->clsname.data, arg);
- if (obj == NULL) {
- PyErr_Clear();
- obj = _pyobj_create("__main__", osn->clsname.data, arg);
- }
- return obj;
- }
-}
-
-static PyObject *
-_objsnapshot_resolve(_objsnapshot *osn)
-{
- if (osn->serialized != NULL) {
- PyObject *obj = _objsnapshot_resolve_serialized(osn);
- if (obj != NULL) {
- return obj;
- }
- IGNORE_FAILURE("could not de-serialize object");
- }
-
- // Fall back to naive resolution.
- return _objsnapshot_resolve_naive(osn, NULL);
-}
-
-
-/* exception utils **********************************************************/
-
-// _pyexc_create is inspired by _PyErr_SetObject().
-
-static PyObject *
-_pyexc_create(PyObject *exctype, const char *msg, PyObject *tb)
-{
- assert(exctype != NULL && PyExceptionClass_Check(exctype));
-
- PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL;
- PyErr_Fetch(&curtype, &curexc, &curtb);
-
- // Create the object.
- PyObject *exc = NULL;
- if (msg != NULL) {
- PyObject *msgobj = PyUnicode_FromString(msg);
- if (msgobj == NULL) {
- IGNORE_FAILURE("could not deserialize propagated error message");
- }
- exc = _PyObject_CallOneArg(exctype, msgobj);
- Py_XDECREF(msgobj);
- } else {
- exc = _PyObject_CallNoArg(exctype);
- }
- if (exc == NULL) {
- return NULL;
- }
-
- // Set the traceback, if any.
- if (tb == NULL) {
- tb = curtb;
- }
- if (tb != NULL) {
- // This does *not* steal a reference!
- PyException_SetTraceback(exc, tb);
- }
-
- PyErr_Restore(curtype, curexc, curtb);
-
- return exc;
-}
-
-/* traceback snapshots */
-
-typedef struct _tbsnapshot {
- _rawstring tbs_funcname;
- _rawstring tbs_filename;
- int tbs_lineno;
- struct _tbsnapshot *tbs_next;
-} _tbsnapshot;
-
-static void
-_tbsnapshot_init(_tbsnapshot *tbs)
-{
- _rawstring_init(&tbs->tbs_funcname);
- _rawstring_init(&tbs->tbs_filename);
- tbs->tbs_lineno = -1;
- tbs->tbs_next = NULL;
-}
-
-static _tbsnapshot *
-_tbsnapshot_new(void)
-{
- _tbsnapshot *tbs = PyMem_NEW(_tbsnapshot, 1);
- if (tbs == NULL) {
- PyErr_NoMemory();
- return NULL;
- }
- _tbsnapshot_init(tbs);
- return tbs;
-}
-
-static void _tbsnapshot_free(_tbsnapshot *); // forward
-
-static void
-_tbsnapshot_clear(_tbsnapshot *tbs)
-{
- _rawstring_clear(&tbs->tbs_funcname);
- _rawstring_clear(&tbs->tbs_filename);
- tbs->tbs_lineno = -1;
- if (tbs->tbs_next != NULL) {
- _tbsnapshot_free(tbs->tbs_next);
- tbs->tbs_next = NULL;
- }
-}
-
-static void
-_tbsnapshot_free(_tbsnapshot *tbs)
-{
- _tbsnapshot_clear(tbs);
- PyMem_Free(tbs);
-}
-
-#ifndef NDEBUG
-static int
-_tbsnapshot_is_clear(_tbsnapshot *tbs)
-{
- return tbs->tbs_lineno == -1 && tbs->tbs_next == NULL
- && _rawstring_is_clear(&tbs->tbs_funcname)
- && _rawstring_is_clear(&tbs->tbs_filename);
-}
-#endif
-
-static int
-_tbsnapshot_from_pytb(_tbsnapshot *tbs, PyTracebackObject *pytb)
-{
- assert(_tbsnapshot_is_clear(tbs));
- assert(pytb != NULL);
-
- PyCodeObject *pycode = pytb->tb_frame->f_code;
- const char *funcname = PyUnicode_AsUTF8(pycode->co_name);
- if (_rawstring_strcpy(&tbs->tbs_funcname, funcname, 0) != 0) {
- goto error;
- }
- const char *filename = PyUnicode_AsUTF8(pycode->co_filename);
- if (_rawstring_strcpy(&tbs->tbs_filename, filename, 0) != 0) {
- goto error;
- }
- tbs->tbs_lineno = pytb->tb_lineno;
-
- return 0;
-
-error:
- _tbsnapshot_clear(tbs);
- return -1;
-}
-
-static int
-_tbsnapshot_extract(_tbsnapshot *tbs, PyTracebackObject *pytb)
-{
- assert(_tbsnapshot_is_clear(tbs));
- assert(pytb != NULL);
-
- _tbsnapshot *next = NULL;
- while (pytb->tb_next != NULL) {
- _tbsnapshot *_next = _tbsnapshot_new();
- if (_next == NULL) {
- goto error;
- }
- if (_tbsnapshot_from_pytb(_next, pytb) != 0) {
- goto error;
- }
- if (next != NULL) {
- _next->tbs_next = next;
- }
- next = _next;
- pytb = pytb->tb_next;
- }
- if (_tbsnapshot_from_pytb(tbs, pytb) != 0) {
- goto error;
- }
- tbs->tbs_next = next;
-
- return 0;
-
-error:
- _tbsnapshot_clear(tbs);
- return -1;
-}
-
-static PyObject *
-_tbsnapshot_resolve(_tbsnapshot *tbs)
-{
- assert(!PyErr_Occurred());
- // At this point there should be no traceback set yet.
-
- while (tbs != NULL) {
- const char *funcname = tbs->tbs_funcname.data;
- const char *filename = tbs->tbs_filename.data;
- _PyTraceback_Add(funcname ? funcname : "",
- filename ? filename : "",
- tbs->tbs_lineno);
- tbs = tbs->tbs_next;
- }
-
- PyObject *exctype = NULL, *excval = NULL, *tb = NULL;
- PyErr_Fetch(&exctype, &excval, &tb);
- // Leave it cleared.
- return tb;
-}
-
-/* exception snapshots */
-
-typedef struct _excsnapshot {
- _objsnapshot es_object;
- _rawstring *es_msg;
- struct _excsnapshot *es_cause;
- struct _excsnapshot *es_context;
- char es_suppress_context;
- struct _tbsnapshot *es_traceback;
-} _excsnapshot;
-
-static void
-_excsnapshot_init(_excsnapshot *es)
-{
- _objsnapshot_init(&es->es_object);
- es->es_msg = NULL;
- es->es_cause = NULL;
- es->es_context = NULL;
- es->es_suppress_context = 0;
- es->es_traceback = NULL;
-}
-
-static _excsnapshot *
-_excsnapshot_new(void) {
- _excsnapshot *es = PyMem_NEW(_excsnapshot, 1);
- if (es == NULL) {
+ char *copied = PyMem_Malloc(strlen(str)+1);
+ if (copied == NULL) {
PyErr_NoMemory();
return NULL;
}
- _excsnapshot_init(es);
- return es;
-}
-
-static void _excsnapshot_free(_excsnapshot *); // forward
-
-static void
-_excsnapshot_clear(_excsnapshot *es)
-{
- _objsnapshot_clear(&es->es_object);
- if (es->es_msg != NULL) {
- _rawstring_free(es->es_msg);
- es->es_msg = NULL;
- }
- if (es->es_cause != NULL) {
- _excsnapshot_free(es->es_cause);
- es->es_cause = NULL;
- }
- if (es->es_context != NULL) {
- _excsnapshot_free(es->es_context);
- es->es_context = NULL;
- }
- es->es_suppress_context = 0;
- if (es->es_traceback != NULL) {
- _tbsnapshot_free(es->es_traceback);
- es->es_traceback = NULL;
- }
-}
-
-static void
-_excsnapshot_free(_excsnapshot *es)
-{
- _excsnapshot_clear(es);
- PyMem_Free(es);
-}
-
-#ifndef NDEBUG
-static int
-_excsnapshot_is_clear(_excsnapshot *es)
-{
- return es->es_suppress_context == 0
- && es->es_cause == NULL
- && es->es_context == NULL
- && es->es_traceback == NULL
- && es->es_msg == NULL
- && _objsnapshot_is_clear(&es->es_object);
-}
-#endif
-
-static PyObject *
-_excsnapshot_get_exc_naive(_excsnapshot *es)
-{
- _rawstring buf;
- const char *msg = NULL;
- if (es->es_msg != NULL) {
- msg = es->es_msg->data;
- } else {
- _objsnapshot_summarize(&es->es_object, &buf, NULL);
- if (buf.size > 0) {
- msg = buf.data;
- }
- }
-
- PyObject *exc = NULL;
- // XXX Use _objsnapshot_resolve_naive()?
- const char *modname = es->es_object.modname.size > 0
- ? es->es_object.modname.data
- : NULL;
- PyObject *exctype = _pyobj_get_class(modname, es->es_object.clsname.data);
- if (exctype != NULL) {
- exc = _pyexc_create(exctype, msg, NULL);
- Py_DECREF(exctype);
- if (exc != NULL) {
- return exc;
- }
- PyErr_Clear();
- } else {
- PyErr_Clear();
- }
- exctype = PyExc_Exception;
- return _pyexc_create(exctype, msg, NULL);
-}
-
-static PyObject *
-_excsnapshot_get_exc(_excsnapshot *es)
-{
- assert(!_objsnapshot_is_clear(&es->es_object));
-
- PyObject *exc = _objsnapshot_resolve(&es->es_object);
- if (exc == NULL) {
- // Fall back to resolving the object.
- PyObject *curtype = NULL, *curexc = NULL, *curtb = NULL;
- PyErr_Fetch(&curtype, &curexc, &curtb);
-
- exc = _excsnapshot_get_exc_naive(es);
- if (exc == NULL) {
- PyErr_Restore(curtype, curexc, curtb);
- return NULL;
- }
- }
- // People can do some weird stuff...
- if (!PyExceptionInstance_Check(exc)) {
- // We got a bogus "exception".
- Py_DECREF(exc);
- PyErr_SetString(PyExc_TypeError, "expected exception");
- return NULL;
- }
- return exc;
-}
-
-static void _excsnapshot_extract(_excsnapshot *, PyObject *);
-static void
-_excsnapshot_extract(_excsnapshot *es, PyObject *excobj)
-{
- assert(_excsnapshot_is_clear(es));
- assert(PyExceptionInstance_Check(excobj));
-
- _objsnapshot_extract(&es->es_object, excobj);
-
- es->es_msg = _objsnapshot_get_minimal_summary(&es->es_object, excobj);
- if (es->es_msg == NULL) {
- PyErr_Clear();
- }
-
- PyBaseExceptionObject *exc = (PyBaseExceptionObject *)excobj;
-
- if (exc->cause != NULL && exc->cause != Py_None) {
- es->es_cause = _excsnapshot_new();
- _excsnapshot_extract(es->es_cause, exc->cause);
- }
-
- if (exc->context != NULL && exc->context != Py_None) {
- es->es_context = _excsnapshot_new();
- _excsnapshot_extract(es->es_context, exc->context);
- }
-
- es->es_suppress_context = exc->suppress_context;
-
- PyObject *tb = PyException_GetTraceback(excobj);
- if (PyErr_Occurred()) {
- IGNORE_FAILURE("could not get traceback");
- } else if (tb == Py_None) {
- Py_DECREF(tb);
- tb = NULL;
- }
- if (tb != NULL) {
- es->es_traceback = _tbsnapshot_new();
- if (_tbsnapshot_extract(es->es_traceback,
- (PyTracebackObject *)tb) != 0) {
- IGNORE_FAILURE("could not extract __traceback__");
- }
- }
+ strcpy(copied, str);
+ return copied;
}
-static PyObject *
-_excsnapshot_resolve(_excsnapshot *es)
+static PyInterpreterState *
+_get_current(void)
{
- PyObject *exc = _excsnapshot_get_exc(es);
- if (exc == NULL) {
- return NULL;
- }
-
- if (es->es_traceback != NULL) {
- PyObject *tb = _tbsnapshot_resolve(es->es_traceback);
- if (tb == NULL) {
- // The snapshot is still somewhat useful without this.
- IGNORE_FAILURE("could not deserialize traceback");
- } else {
- // This does not steal references.
- PyException_SetTraceback(exc, tb);
- Py_DECREF(tb);
- }
- }
- // NULL means "not set".
-
- if (es->es_context != NULL) {
- PyObject *context = _excsnapshot_resolve(es->es_context);
- if (context == NULL) {
- // The snapshot is still useful without this.
- IGNORE_FAILURE("could not deserialize __context__");
- } else {
- // This steals references but we have one to give.
- PyException_SetContext(exc, context);
- }
- }
- // NULL means "not set".
-
- if (es->es_cause != NULL) {
- PyObject *cause = _excsnapshot_resolve(es->es_cause);
- if (cause == NULL) {
- // The snapshot is still useful without this.
- IGNORE_FAILURE("could not deserialize __cause__");
- } else {
- // This steals references, but we have one to give.
- PyException_SetCause(exc, cause);
- }
- }
- // NULL means "not set".
-
- ((PyBaseExceptionObject *)exc)->suppress_context = es->es_suppress_context;
-
- return exc;
+ // PyInterpreterState_Get() aborts if lookup fails, so don't need
+ // to check the result for NULL.
+ return PyInterpreterState_Get();
}
/* data-sharing-specific code ***********************************************/
-/* shared "object" */
-
struct _sharednsitem {
- _rawstring name;
+ char *name;
_PyCrossInterpreterData data;
};
@@ -935,7 +44,8 @@ static void _sharednsitem_clear(struct _sharednsitem *); // forward
static int
_sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value)
{
- if (_rawstring_from_pyobj(&item->name, key) != 0) {
+ item->name = _copy_raw_string(key);
+ if (item->name == NULL) {
return -1;
}
if (_PyObject_GetCrossInterpreterData(value, &item->data) != 0) {
@@ -948,14 +58,17 @@ _sharednsitem_init(struct _sharednsitem *item, PyObject *key, PyObject *value)
static void
_sharednsitem_clear(struct _sharednsitem *item)
{
- _rawstring_clear(&item->name);
+ if (item->name != NULL) {
+ PyMem_Free(item->name);
+ item->name = NULL;
+ }
_PyCrossInterpreterData_Release(&item->data);
}
static int
_sharednsitem_apply(struct _sharednsitem *item, PyObject *ns)
{
- PyObject *name = PyUnicode_FromString(item->name.data);
+ PyObject *name = PyUnicode_FromString(item->name);
if (name == NULL) {
return -1;
}
@@ -1046,121 +159,121 @@ _sharedns_apply(_sharedns *shared, PyObject *ns)
return 0;
}
-/* shared exception */
-
// Ultimately we'd like to preserve enough information about the
// exception and traceback that we could re-constitute (or at least
// simulate, a la traceback.TracebackException), and even chain, a copy
// of the exception in the calling interpreter.
typedef struct _sharedexception {
- _excsnapshot snapshot;
- _rawstring msg;
+ char *name;
+ char *msg;
} _sharedexception;
-static void
-_sharedexception_init(_sharedexception *she)
-{
- _excsnapshot_init(&she->snapshot);
- _rawstring_init(&she->msg);
-}
-
static _sharedexception *
_sharedexception_new(void)
{
- _sharedexception *she = PyMem_NEW(_sharedexception, 1);
- if (she == NULL) {
+ _sharedexception *err = PyMem_NEW(_sharedexception, 1);
+ if (err == NULL) {
PyErr_NoMemory();
return NULL;
}
- _sharedexception_init(she);
- return she;
+ err->name = NULL;
+ err->msg = NULL;
+ return err;
}
static void
-_sharedexception_clear(_sharedexception *she)
+_sharedexception_clear(_sharedexception *exc)
{
- _excsnapshot_clear(&she->snapshot);
- _rawstring_clear(&she->msg);
+ if (exc->name != NULL) {
+ PyMem_Free(exc->name);
+ }
+ if (exc->msg != NULL) {
+ PyMem_Free(exc->msg);
+ }
}
static void
-_sharedexception_free(_sharedexception *she)
+_sharedexception_free(_sharedexception *exc)
{
- _sharedexception_clear(she);
- PyMem_Free(she);
+ _sharedexception_clear(exc);
+ PyMem_Free(exc);
}
-#ifndef NDEBUG
-static int
-_sharedexception_is_clear(_sharedexception *she)
+static _sharedexception *
+_sharedexception_bind(PyObject *exctype, PyObject *exc, PyObject *tb)
{
- return 1
- && _excsnapshot_is_clear(&she->snapshot)
- && _rawstring_is_clear(&she->msg);
-}
-#endif
+ assert(exctype != NULL);
+ char *failure = NULL;
-static PyObject *
-_sharedexception_get_cause(_sharedexception *sharedexc)
-{
- // FYI, "cause" is already normalized.
- PyObject *cause = _excsnapshot_resolve(&sharedexc->snapshot);
- if (cause == NULL) {
- if (PyErr_Occurred()) {
- IGNORE_FAILURE("could not deserialize exc snapshot");
- }
- return NULL;
+ _sharedexception *err = _sharedexception_new();
+ if (err == NULL) {
+ goto finally;
}
- // XXX Ensure "cause" has a traceback.
- return cause;
-}
-static void
-_sharedexception_extract(_sharedexception *she, PyObject *exc)
-{
- assert(_sharedexception_is_clear(she));
- assert(exc != NULL);
+ PyObject *name = PyUnicode_FromFormat("%S", exctype);
+ if (name == NULL) {
+ failure = "unable to format exception type name";
+ goto finally;
+ }
+ err->name = _copy_raw_string(name);
+ Py_DECREF(name);
+ if (err->name == NULL) {
+ if (PyErr_ExceptionMatches(PyExc_MemoryError)) {
+ failure = "out of memory copying exception type name";
+ } else {
+ failure = "unable to encode and copy exception type name";
+ }
+ goto finally;
+ }
- _excsnapshot_extract(&she->snapshot, exc);
+ if (exc != NULL) {
+ PyObject *msg = PyUnicode_FromFormat("%S", exc);
+ if (msg == NULL) {
+ failure = "unable to format exception message";
+ goto finally;
+ }
+ err->msg = _copy_raw_string(msg);
+ Py_DECREF(msg);
+ if (err->msg == NULL) {
+ if (PyErr_ExceptionMatches(PyExc_MemoryError)) {
+ failure = "out of memory copying exception message";
+ } else {
+ failure = "unable to encode and copy exception message";
+ }
+ goto finally;
+ }
+ }
- // Compose the message.
- const char *msg = NULL;
- PyObject *msgobj = PyUnicode_FromFormat("%S", exc);
- if (msgobj == NULL) {
- IGNORE_FAILURE("unable to format exception message");
- } else {
- msg = PyUnicode_AsUTF8(msgobj);
- if (PyErr_Occurred()) {
- PyErr_Clear();
+finally:
+ if (failure != NULL) {
+ PyErr_Clear();
+ if (err->name != NULL) {
+ PyMem_Free(err->name);
+ err->name = NULL;
}
+ err->msg = failure;
}
- _objsnapshot_summarize(&she->snapshot.es_object, &she->msg, msg);
- Py_XDECREF(msgobj);
+ return err;
}
-static PyObject *
-_sharedexception_resolve(_sharedexception *sharedexc, PyObject *wrapperclass)
+static void
+_sharedexception_apply(_sharedexception *exc, PyObject *wrapperclass)
{
- assert(!PyErr_Occurred());
-
- // Get the exception object (already normalized).
- PyObject *exc = _pyexc_create(wrapperclass, sharedexc->msg.data, NULL);
- assert(exc != NULL);
-
- // Set __cause__, is possible.
- PyObject *cause = _sharedexception_get_cause(sharedexc);
- if (cause != NULL) {
- // Set __context__.
- Py_INCREF(cause); // PyException_SetContext() steals a reference.
- PyException_SetContext(exc, cause);
-
- // Set __cause__.
- Py_INCREF(cause); // PyException_SetCause() steals a reference.
- PyException_SetCause(exc, cause);
+ if (exc->name != NULL) {
+ if (exc->msg != NULL) {
+ PyErr_Format(wrapperclass, "%s: %s", exc->name, exc->msg);
+ }
+ else {
+ PyErr_SetString(wrapperclass, exc->name);
+ }
+ }
+ else if (exc->msg != NULL) {
+ PyErr_SetString(wrapperclass, exc->msg);
+ }
+ else {
+ PyErr_SetNone(wrapperclass);
}
-
- return exc;
}
@@ -2756,9 +1869,11 @@ _ensure_not_running(PyInterpreterState *interp)
static int
_run_script(PyInterpreterState *interp, const char *codestr,
- _sharedns *shared, _sharedexception **pexc)
+ _sharedns *shared, _sharedexception **exc)
{
- assert(!PyErr_Occurred()); // ...in the called interpreter.
+ PyObject *exctype = NULL;
+ PyObject *excval = NULL;
+ PyObject *tb = NULL;
PyObject *main_mod = _PyInterpreterState_GetMainModule(interp);
if (main_mod == NULL) {
@@ -2789,38 +1904,25 @@ _run_script(PyInterpreterState *interp, const char *codestr,
Py_DECREF(result); // We throw away the result.
}
- *pexc = NULL;
+ *exc = NULL;
return 0;
- PyObject *exctype = NULL, *exc = NULL, *tb = NULL;
error:
- PyErr_Fetch(&exctype, &exc, &tb);
-
- // First normalize the exception.
- PyErr_NormalizeException(&exctype, &exc, &tb);
- assert(PyExceptionInstance_Check(exc));
- if (tb != NULL) {
- PyException_SetTraceback(exc, tb);
- }
-
- // Behave as though the exception was caught in this thread.
- PyErr_SetExcInfo(exctype, exc, tb); // Like entering "except" block.
+ PyErr_Fetch(&exctype, &excval, &tb);
- // Serialize the exception.
- _sharedexception *sharedexc = _sharedexception_new();
+ _sharedexception *sharedexc = _sharedexception_bind(exctype, excval, tb);
+ Py_XDECREF(exctype);
+ Py_XDECREF(excval);
+ Py_XDECREF(tb);
if (sharedexc == NULL) {
- IGNORE_FAILURE("script raised an uncaught exception");
- } else {
- _sharedexception_extract(sharedexc, exc);
+ fprintf(stderr, "RunFailedError: script raised an uncaught exception");
+ PyErr_Clear();
+ sharedexc = NULL;
+ }
+ else {
assert(!PyErr_Occurred());
}
-
- // Clear the exception.
- PyErr_SetExcInfo(NULL, NULL, NULL); // Like leaving "except" block.
- PyErr_Clear(); // Do not re-raise.
-
- // "Return" the serialized exception.
- *pexc = sharedexc;
+ *exc = sharedexc;
return -1;
}
@@ -2828,8 +1930,6 @@ static int
_run_script_in_interpreter(PyInterpreterState *interp, const char *codestr,
PyObject *shareables)
{
- assert(!PyErr_Occurred()); // ...in the calling interpreter.
-
if (_ensure_not_running(interp) < 0) {
return -1;
}
@@ -2863,8 +1963,8 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr,
}
// Run the script.
- _sharedexception *sharedexc = NULL;
- int result = _run_script(interp, codestr, shared, &sharedexc);
+ _sharedexception *exc = NULL;
+ int result = _run_script(interp, codestr, shared, &exc);
// Switch back.
if (save_tstate != NULL) {
@@ -2873,14 +1973,9 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr,
#endif
// Propagate any exception out to the caller.
- if (sharedexc != NULL) {
- assert(!PyErr_Occurred());
- PyObject *exc = _sharedexception_resolve(sharedexc, RunFailedError);
- // XXX This is not safe once interpreters no longer share allocators.
- _sharedexception_free(sharedexc);
- PyObject *exctype = (PyObject *)Py_TYPE(exc);
- Py_INCREF(exctype); // PyErr_Restore() steals a reference.
- PyErr_Restore(exctype, exc, PyException_GetTraceback(exc));
+ if (exc != NULL) {
+ _sharedexception_apply(exc, RunFailedError);
+ _sharedexception_free(exc);
}
else if (result != 0) {
// We were unable to allocate a shared exception.
diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h
index cf6d7449ba..41baa45573 100644
--- a/Modules/clinic/posixmodule.c.h
+++ b/Modules/clinic/posixmodule.c.h
@@ -8388,18 +8388,24 @@ PyDoc_STRVAR(os_DirEntry_is_symlink__doc__,
"Return True if the entry is a symbolic link; cached per entry.");
#define OS_DIRENTRY_IS_SYMLINK_METHODDEF \
- {"is_symlink", (PyCFunction)os_DirEntry_is_symlink, METH_NOARGS, os_DirEntry_is_symlink__doc__},
+ {"is_symlink", (PyCFunction)(void(*)(void))os_DirEntry_is_symlink, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_symlink__doc__},
static int
-os_DirEntry_is_symlink_impl(DirEntry *self);
+os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class);
static PyObject *
-os_DirEntry_is_symlink(DirEntry *self, PyObject *Py_UNUSED(ignored))
+os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
+ static const char * const _keywords[] = { NULL};
+ static _PyArg_Parser _parser = {":is_symlink", _keywords, 0};
int _return_value;
- _return_value = os_DirEntry_is_symlink_impl(self);
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser
+ )) {
+ goto exit;
+ }
+ _return_value = os_DirEntry_is_symlink_impl(self, defining_class);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
@@ -8416,34 +8422,25 @@ PyDoc_STRVAR(os_DirEntry_stat__doc__,
"Return stat_result object for the entry; cached per entry.");
#define OS_DIRENTRY_STAT_METHODDEF \
- {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__},
+ {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__},
static PyObject *
-os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks);
+os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks);
static PyObject *
-os_DirEntry_stat(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
static const char * const _keywords[] = {"follow_symlinks", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "stat", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ static _PyArg_Parser _parser = {"|$p:stat", _keywords, 0};
int follow_symlinks = 1;
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_kwonly;
- }
- follow_symlinks = PyObject_IsTrue(args[0]);
- if (follow_symlinks < 0) {
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &follow_symlinks)) {
goto exit;
}
-skip_optional_kwonly:
- return_value = os_DirEntry_stat_impl(self, follow_symlinks);
+ return_value = os_DirEntry_stat_impl(self, defining_class, follow_symlinks);
exit:
return return_value;
@@ -8456,35 +8453,26 @@ PyDoc_STRVAR(os_DirEntry_is_dir__doc__,
"Return True if the entry is a directory; cached per entry.");
#define OS_DIRENTRY_IS_DIR_METHODDEF \
- {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__},
+ {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__},
static int
-os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks);
+os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks);
static PyObject *
-os_DirEntry_is_dir(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
static const char * const _keywords[] = {"follow_symlinks", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "is_dir", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ static _PyArg_Parser _parser = {"|$p:is_dir", _keywords, 0};
int follow_symlinks = 1;
int _return_value;
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_kwonly;
- }
- follow_symlinks = PyObject_IsTrue(args[0]);
- if (follow_symlinks < 0) {
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &follow_symlinks)) {
goto exit;
}
-skip_optional_kwonly:
- _return_value = os_DirEntry_is_dir_impl(self, follow_symlinks);
+ _return_value = os_DirEntry_is_dir_impl(self, defining_class, follow_symlinks);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
@@ -8501,35 +8489,26 @@ PyDoc_STRVAR(os_DirEntry_is_file__doc__,
"Return True if the entry is a file; cached per entry.");
#define OS_DIRENTRY_IS_FILE_METHODDEF \
- {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__},
+ {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__},
static int
-os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks);
+os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks);
static PyObject *
-os_DirEntry_is_file(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
static const char * const _keywords[] = {"follow_symlinks", NULL};
- static _PyArg_Parser _parser = {NULL, _keywords, "is_file", 0};
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
+ static _PyArg_Parser _parser = {"|$p:is_file", _keywords, 0};
int follow_symlinks = 1;
int _return_value;
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_kwonly;
- }
- follow_symlinks = PyObject_IsTrue(args[0]);
- if (follow_symlinks < 0) {
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser,
+ &follow_symlinks)) {
goto exit;
}
-skip_optional_kwonly:
- _return_value = os_DirEntry_is_file_impl(self, follow_symlinks);
+ _return_value = os_DirEntry_is_file_impl(self, defining_class, follow_symlinks);
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
@@ -9417,4 +9396,4 @@ exit:
#ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF
#define OS_WAITSTATUS_TO_EXITCODE_METHODDEF
#endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */
-/*[clinic end generated code: output=be90d3aba972098b input=a9049054013a1b77]*/
+/*[clinic end generated code: output=005919eaaef3f8e6 input=a9049054013a1b77]*/
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index 60a60e9aed..2ddf30de89 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -2101,48 +2101,50 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
static int
_posix_clear(PyObject *module)
{
- Py_CLEAR(get_posix_state(module)->billion);
- Py_CLEAR(get_posix_state(module)->DirEntryType);
- Py_CLEAR(get_posix_state(module)->ScandirIteratorType);
+ _posixstate *state = get_posix_state(module);
+ Py_CLEAR(state->billion);
+ Py_CLEAR(state->DirEntryType);
+ Py_CLEAR(state->ScandirIteratorType);
#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM)
- Py_CLEAR(get_posix_state(module)->SchedParamType);
+ Py_CLEAR(state->SchedParamType);
#endif
- Py_CLEAR(get_posix_state(module)->StatResultType);
- Py_CLEAR(get_posix_state(module)->StatVFSResultType);
- Py_CLEAR(get_posix_state(module)->TerminalSizeType);
- Py_CLEAR(get_posix_state(module)->TimesResultType);
- Py_CLEAR(get_posix_state(module)->UnameResultType);
+ Py_CLEAR(state->StatResultType);
+ Py_CLEAR(state->StatVFSResultType);
+ Py_CLEAR(state->TerminalSizeType);
+ Py_CLEAR(state->TimesResultType);
+ Py_CLEAR(state->UnameResultType);
#if defined(HAVE_WAITID) && !defined(__APPLE__)
- Py_CLEAR(get_posix_state(module)->WaitidResultType);
+ Py_CLEAR(state->WaitidResultType);
#endif
#if defined(HAVE_WAIT3) || defined(HAVE_WAIT4)
- Py_CLEAR(get_posix_state(module)->struct_rusage);
+ Py_CLEAR(state->struct_rusage);
#endif
- Py_CLEAR(get_posix_state(module)->st_mode);
+ Py_CLEAR(state->st_mode);
return 0;
}
static int
_posix_traverse(PyObject *module, visitproc visit, void *arg)
{
- Py_VISIT(get_posix_state(module)->billion);
- Py_VISIT(get_posix_state(module)->DirEntryType);
- Py_VISIT(get_posix_state(module)->ScandirIteratorType);
+ _posixstate *state = get_posix_state(module);
+ Py_VISIT(state->billion);
+ Py_VISIT(state->DirEntryType);
+ Py_VISIT(state->ScandirIteratorType);
#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM)
- Py_VISIT(get_posix_state(module)->SchedParamType);
+ Py_VISIT(state->SchedParamType);
#endif
- Py_VISIT(get_posix_state(module)->StatResultType);
- Py_VISIT(get_posix_state(module)->StatVFSResultType);
- Py_VISIT(get_posix_state(module)->TerminalSizeType);
- Py_VISIT(get_posix_state(module)->TimesResultType);
- Py_VISIT(get_posix_state(module)->UnameResultType);
+ Py_VISIT(state->StatResultType);
+ Py_VISIT(state->StatVFSResultType);
+ Py_VISIT(state->TerminalSizeType);
+ Py_VISIT(state->TimesResultType);
+ Py_VISIT(state->UnameResultType);
#if defined(HAVE_WAITID) && !defined(__APPLE__)
- Py_VISIT(get_posix_state(module)->WaitidResultType);
+ Py_VISIT(state->WaitidResultType);
#endif
#if defined(HAVE_WAIT3) || defined(HAVE_WAIT4)
- Py_VISIT(get_posix_state(module)->struct_rusage);
+ Py_VISIT(state->struct_rusage);
#endif
- Py_VISIT(get_posix_state(module)->st_mode);
+ Py_VISIT(state->st_mode);
return 0;
}
@@ -12747,17 +12749,20 @@ DirEntry_dealloc(DirEntry *entry)
/* Forward reference */
static int
-DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits);
+DirEntry_test_mode(PyTypeObject *defining_class, DirEntry *self,
+ int follow_symlinks, unsigned short mode_bits);
/*[clinic input]
os.DirEntry.is_symlink -> bool
+ defining_class: defining_class
+ /
Return True if the entry is a symbolic link; cached per entry.
[clinic start generated code]*/
static int
-os_DirEntry_is_symlink_impl(DirEntry *self)
-/*[clinic end generated code: output=42244667d7bcfc25 input=1605a1b4b96976c3]*/
+os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class)
+/*[clinic end generated code: output=293096d589b6d47c input=e9acc5ee4d511113]*/
{
#ifdef MS_WINDOWS
return (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK;
@@ -12766,21 +12771,15 @@ os_DirEntry_is_symlink_impl(DirEntry *self)
if (self->d_type != DT_UNKNOWN)
return self->d_type == DT_LNK;
else
- return DirEntry_test_mode(self, 0, S_IFLNK);
+ return DirEntry_test_mode(defining_class, self, 0, S_IFLNK);
#else
/* POSIX without d_type */
- return DirEntry_test_mode(self, 0, S_IFLNK);
+ return DirEntry_test_mode(defining_class, self, 0, S_IFLNK);
#endif
}
-static inline PyObject*
-DirEntry_get_module(DirEntry *self)
-{
- return PyType_GetModule(Py_TYPE(self));
-}
-
static PyObject *
-DirEntry_fetch_stat(DirEntry *self, int follow_symlinks)
+DirEntry_fetch_stat(PyObject *module, DirEntry *self, int follow_symlinks)
{
int result;
STRUCT_STAT st;
@@ -12816,18 +12815,18 @@ DirEntry_fetch_stat(DirEntry *self, int follow_symlinks)
if (result != 0)
return path_object_error(self->path);
- return _pystat_fromstructstat(DirEntry_get_module(self), &st);
+ return _pystat_fromstructstat(module, &st);
}
static PyObject *
-DirEntry_get_lstat(DirEntry *self)
+DirEntry_get_lstat(PyTypeObject *defining_class, DirEntry *self)
{
if (!self->lstat) {
+ PyObject *module = PyType_GetModule(defining_class);
#ifdef MS_WINDOWS
- self->lstat = _pystat_fromstructstat(DirEntry_get_module(self),
- &self->win32_lstat);
+ self->lstat = _pystat_fromstructstat(module, &self->win32_lstat);
#else /* POSIX */
- self->lstat = DirEntry_fetch_stat(self, 0);
+ self->lstat = DirEntry_fetch_stat(module, self, 0);
#endif
}
Py_XINCREF(self->lstat);
@@ -12836,6 +12835,8 @@ DirEntry_get_lstat(DirEntry *self)
/*[clinic input]
os.DirEntry.stat
+ defining_class: defining_class
+ /
*
follow_symlinks: bool = True
@@ -12843,20 +12844,26 @@ Return stat_result object for the entry; cached per entry.
[clinic start generated code]*/
static PyObject *
-os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks)
-/*[clinic end generated code: output=008593b3a6d01305 input=280d14c1d6f1d00d]*/
+os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks)
+/*[clinic end generated code: output=23f803e19c3e780e input=e816273c4e67ee98]*/
{
- if (!follow_symlinks)
- return DirEntry_get_lstat(self);
+ if (!follow_symlinks) {
+ return DirEntry_get_lstat(defining_class, self);
+ }
if (!self->stat) {
- int result = os_DirEntry_is_symlink_impl(self);
- if (result == -1)
+ int result = os_DirEntry_is_symlink_impl(self, defining_class);
+ if (result == -1) {
return NULL;
- else if (result)
- self->stat = DirEntry_fetch_stat(self, 1);
- else
- self->stat = DirEntry_get_lstat(self);
+ }
+ if (result) {
+ PyObject *module = PyType_GetModule(defining_class);
+ self->stat = DirEntry_fetch_stat(module, self, 1);
+ }
+ else {
+ self->stat = DirEntry_get_lstat(defining_class, self);
+ }
}
Py_XINCREF(self->stat);
@@ -12865,7 +12872,8 @@ os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks)
/* Set exception and return -1 on error, 0 for False, 1 for True */
static int
-DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits)
+DirEntry_test_mode(PyTypeObject *defining_class, DirEntry *self,
+ int follow_symlinks, unsigned short mode_bits)
{
PyObject *stat = NULL;
PyObject *st_mode = NULL;
@@ -12890,7 +12898,7 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits
#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
if (need_stat) {
#endif
- stat = os_DirEntry_stat_impl(self, follow_symlinks);
+ stat = os_DirEntry_stat_impl(self, defining_class, follow_symlinks);
if (!stat) {
if (PyErr_ExceptionMatches(PyExc_FileNotFoundError)) {
/* If file doesn't exist (anymore), then return False
@@ -12900,7 +12908,8 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits
}
goto error;
}
- st_mode = PyObject_GetAttr(stat, get_posix_state(DirEntry_get_module(self))->st_mode);
+ _posixstate* state = get_posix_state(PyType_GetModule(defining_class));
+ st_mode = PyObject_GetAttr(stat, state->st_mode);
if (!st_mode)
goto error;
@@ -12943,6 +12952,8 @@ error:
/*[clinic input]
os.DirEntry.is_dir -> bool
+ defining_class: defining_class
+ /
*
follow_symlinks: bool = True
@@ -12950,14 +12961,17 @@ Return True if the entry is a directory; cached per entry.
[clinic start generated code]*/
static int
-os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks)
-/*[clinic end generated code: output=ad2e8d54365da287 input=0135232766f53f58]*/
+os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks)
+/*[clinic end generated code: output=0cd453b9c0987fdf input=1a4ffd6dec9920cb]*/
{
- return DirEntry_test_mode(self, follow_symlinks, S_IFDIR);
+ return DirEntry_test_mode(defining_class, self, follow_symlinks, S_IFDIR);
}
/*[clinic input]
os.DirEntry.is_file -> bool
+ defining_class: defining_class
+ /
*
follow_symlinks: bool = True
@@ -12965,10 +12979,11 @@ Return True if the entry is a file; cached per entry.
[clinic start generated code]*/
static int
-os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks)
-/*[clinic end generated code: output=8462ade481d8a476 input=0dc90be168b041ee]*/
+os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class,
+ int follow_symlinks)
+/*[clinic end generated code: output=f7c277ab5ba80908 input=0a64c5a12e802e3b]*/
{
- return DirEntry_test_mode(self, follow_symlinks, S_IFREG);
+ return DirEntry_test_mode(defining_class, self, follow_symlinks, S_IFREG);
}
/*[clinic input]
@@ -13496,6 +13511,8 @@ static PyType_Spec ScandirIteratorType_spec = {
MODNAME ".ScandirIterator",
sizeof(ScandirIterator),
0,
+ // bpo-40549: Py_TPFLAGS_BASETYPE should not be used, since
+ // PyType_GetModule(Py_TYPE(self)) doesn't work on a subclass instance.
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_FINALIZE,
ScandirIteratorType_slots
};
@@ -14785,12 +14802,10 @@ static const char * const have_functions[] = {
static int
posixmodule_exec(PyObject *m)
{
- PyObject *v;
- PyObject *list;
- const char * const *trace;
+ _posixstate *state = get_posix_state(m);
/* Initialize environ dictionary */
- v = convertenviron();
+ PyObject *v = convertenviron();
Py_XINCREF(v);
if (v == NULL || PyModule_AddObject(m, "environ", v) != 0)
return -1;
@@ -14813,7 +14828,7 @@ posixmodule_exec(PyObject *m)
}
Py_INCREF(WaitidResultType);
PyModule_AddObject(m, "waitid_result", WaitidResultType);
- get_posix_state(m)->WaitidResultType = WaitidResultType;
+ state->WaitidResultType = WaitidResultType;
#endif
stat_result_desc.name = "os.stat_result"; /* see issue #19209 */
@@ -14826,7 +14841,7 @@ posixmodule_exec(PyObject *m)
}
Py_INCREF(StatResultType);
PyModule_AddObject(m, "stat_result", StatResultType);
- get_posix_state(m)->StatResultType = StatResultType;
+ state->StatResultType = StatResultType;
structseq_new = ((PyTypeObject *)StatResultType)->tp_new;
((PyTypeObject *)StatResultType)->tp_new = statresult_new;
@@ -14837,7 +14852,7 @@ posixmodule_exec(PyObject *m)
}
Py_INCREF(StatVFSResultType);
PyModule_AddObject(m, "statvfs_result", StatVFSResultType);
- get_posix_state(m)->StatVFSResultType = StatVFSResultType;
+ state->StatVFSResultType = StatVFSResultType;
#ifdef NEED_TICKS_PER_SECOND
# if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK)
ticks_per_second = sysconf(_SC_CLK_TCK);
@@ -14856,7 +14871,7 @@ posixmodule_exec(PyObject *m)
}
Py_INCREF(SchedParamType);
PyModule_AddObject(m, "sched_param", SchedParamType);
- get_posix_state(m)->SchedParamType = SchedParamType;
+ state->SchedParamType = SchedParamType;
((PyTypeObject *)SchedParamType)->tp_new = os_sched_param;
#endif
@@ -14867,14 +14882,14 @@ posixmodule_exec(PyObject *m)
}
Py_INCREF(TerminalSizeType);
PyModule_AddObject(m, "terminal_size", TerminalSizeType);
- get_posix_state(m)->TerminalSizeType = TerminalSizeType;
+ state->TerminalSizeType = TerminalSizeType;
/* initialize scandir types */
PyObject *ScandirIteratorType = PyType_FromModuleAndSpec(m, &ScandirIteratorType_spec, NULL);
if (ScandirIteratorType == NULL) {
return -1;
}
- get_posix_state(m)->ScandirIteratorType = ScandirIteratorType;
+ state->ScandirIteratorType = ScandirIteratorType;
PyObject *DirEntryType = PyType_FromModuleAndSpec(m, &DirEntryType_spec, NULL);
if (DirEntryType == NULL) {
@@ -14882,7 +14897,7 @@ posixmodule_exec(PyObject *m)
}
Py_INCREF(DirEntryType);
PyModule_AddObject(m, "DirEntry", DirEntryType);
- get_posix_state(m)->DirEntryType = DirEntryType;
+ state->DirEntryType = DirEntryType;
times_result_desc.name = MODNAME ".times_result";
PyObject *TimesResultType = (PyObject *)PyStructSequence_NewType(&times_result_desc);
@@ -14891,7 +14906,7 @@ posixmodule_exec(PyObject *m)
}
Py_INCREF(TimesResultType);
PyModule_AddObject(m, "times_result", TimesResultType);
- get_posix_state(m)->TimesResultType = TimesResultType;
+ state->TimesResultType = TimesResultType;
PyTypeObject *UnameResultType = PyStructSequence_NewType(&uname_result_desc);
if (UnameResultType == NULL) {
@@ -14899,7 +14914,7 @@ posixmodule_exec(PyObject *m)
}
Py_INCREF(UnameResultType);
PyModule_AddObject(m, "uname_result", (PyObject *)UnameResultType);
- get_posix_state(m)->UnameResultType = (PyObject *)UnameResultType;
+ state->UnameResultType = (PyObject *)UnameResultType;
#ifdef __APPLE__
/*
@@ -14939,15 +14954,15 @@ posixmodule_exec(PyObject *m)
#endif /* __APPLE__ */
- if ((get_posix_state(m)->billion = PyLong_FromLong(1000000000)) == NULL)
+ if ((state->billion = PyLong_FromLong(1000000000)) == NULL)
return -1;
#if defined(HAVE_WAIT3) || defined(HAVE_WAIT4)
- get_posix_state(m)->struct_rusage = PyUnicode_InternFromString("struct_rusage");
- if (get_posix_state(m)->struct_rusage == NULL)
+ state->struct_rusage = PyUnicode_InternFromString("struct_rusage");
+ if (state->struct_rusage == NULL)
return -1;
#endif
- get_posix_state(m)->st_mode = PyUnicode_InternFromString("st_mode");
- if (get_posix_state(m)->st_mode == NULL)
+ state->st_mode = PyUnicode_InternFromString("st_mode");
+ if (state->st_mode == NULL)
return -1;
/* suppress "function not used" warnings */
@@ -14964,10 +14979,11 @@ posixmodule_exec(PyObject *m)
* provide list of locally available functions
* so os.py can populate support_* lists
*/
- list = PyList_New(0);
- if (!list)
+ PyObject *list = PyList_New(0);
+ if (!list) {
return -1;
- for (trace = have_functions; *trace; trace++) {
+ }
+ for (const char * const *trace = have_functions; *trace; trace++) {
PyObject *unicode = PyUnicode_DecodeASCII(*trace, strlen(*trace), NULL);
if (!unicode)
return -1;
diff --git a/Objects/genobject.c b/Objects/genobject.c
index 5b253edfdc..fb01e581f8 100644
--- a/Objects/genobject.c
+++ b/Objects/genobject.c
@@ -217,6 +217,18 @@ gen_send_ex(PyGenObject *gen, PyObject *arg, int exc, int closing)
assert(f->f_back == NULL);
f->f_back = tstate->frame;
+ _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state;
+ if (exc && gi_exc_state->exc_type != NULL &&
+ gi_exc_state->exc_type != Py_None)
+ {
+ Py_INCREF(gi_exc_state->exc_type);
+ Py_XINCREF(gi_exc_state->exc_value);
+ Py_XINCREF(gi_exc_state->exc_traceback);
+ _PyErr_ChainExceptions(gi_exc_state->exc_type,
+ gi_exc_state->exc_value,
+ gi_exc_state->exc_traceback);
+ }
+
gen->gi_running = 1;
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
@@ -512,16 +524,6 @@ throw_here:
}
PyErr_Restore(typ, val, tb);
-
- _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state;
- if (gi_exc_state->exc_type != NULL && gi_exc_state->exc_type != Py_None) {
- Py_INCREF(gi_exc_state->exc_type);
- Py_XINCREF(gi_exc_state->exc_value);
- Py_XINCREF(gi_exc_state->exc_traceback);
- _PyErr_ChainExceptions(gi_exc_state->exc_type,
- gi_exc_state->exc_value,
- gi_exc_state->exc_traceback);
- }
return gen_send_ex(gen, Py_None, 1, 0);
failed_throw:
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index 826298c23a..ea46a44bf5 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -463,7 +463,7 @@ unicode_check_encoding_errors(const char *encoding, const char *errors)
/* Avoid calling _PyCodec_Lookup() and PyCodec_LookupError() before the
codec registry is ready: before_PyUnicode_InitEncodings() is called. */
- if (!interp->fs_codec.encoding) {
+ if (!interp->unicode.fs_codec.encoding) {
return 0;
}
@@ -2289,8 +2289,8 @@ _PyUnicode_FromId(_Py_Identifier *id)
return id->object;
}
-void
-_PyUnicode_ClearStaticStrings()
+static void
+unicode_clear_static_strings(void)
{
_Py_Identifier *tmp, *s = static_strings;
while (s) {
@@ -3650,16 +3650,17 @@ PyObject *
PyUnicode_EncodeFSDefault(PyObject *unicode)
{
PyInterpreterState *interp = _PyInterpreterState_GET();
- if (interp->fs_codec.utf8) {
+ struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec;
+ if (fs_codec->utf8) {
return unicode_encode_utf8(unicode,
- interp->fs_codec.error_handler,
- interp->fs_codec.errors);
+ fs_codec->error_handler,
+ fs_codec->errors);
}
#ifndef _Py_FORCE_UTF8_FS_ENCODING
- else if (interp->fs_codec.encoding) {
+ else if (fs_codec->encoding) {
return PyUnicode_AsEncodedString(unicode,
- interp->fs_codec.encoding,
- interp->fs_codec.errors);
+ fs_codec->encoding,
+ fs_codec->errors);
}
#endif
else {
@@ -3886,17 +3887,18 @@ PyObject*
PyUnicode_DecodeFSDefaultAndSize(const char *s, Py_ssize_t size)
{
PyInterpreterState *interp = _PyInterpreterState_GET();
- if (interp->fs_codec.utf8) {
+ struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec;
+ if (fs_codec->utf8) {
return unicode_decode_utf8(s, size,
- interp->fs_codec.error_handler,
- interp->fs_codec.errors,
+ fs_codec->error_handler,
+ fs_codec->errors,
NULL);
}
#ifndef _Py_FORCE_UTF8_FS_ENCODING
- else if (interp->fs_codec.encoding) {
+ else if (fs_codec->encoding) {
return PyUnicode_Decode(s, size,
- interp->fs_codec.encoding,
- interp->fs_codec.errors);
+ fs_codec->encoding,
+ fs_codec->errors);
}
#endif
else {
@@ -16071,16 +16073,17 @@ init_fs_codec(PyInterpreterState *interp)
return -1;
}
- PyMem_RawFree(interp->fs_codec.encoding);
- interp->fs_codec.encoding = encoding;
+ struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec;
+ PyMem_RawFree(fs_codec->encoding);
+ fs_codec->encoding = encoding;
/* encoding has been normalized by init_fs_encoding() */
- interp->fs_codec.utf8 = (strcmp(encoding, "utf-8") == 0);
- PyMem_RawFree(interp->fs_codec.errors);
- interp->fs_codec.errors = errors;
- interp->fs_codec.error_handler = error_handler;
+ fs_codec->utf8 = (strcmp(encoding, "utf-8") == 0);
+ PyMem_RawFree(fs_codec->errors);
+ fs_codec->errors = errors;
+ fs_codec->error_handler = error_handler;
#ifdef _Py_FORCE_UTF8_FS_ENCODING
- assert(interp->fs_codec.utf8 == 1);
+ assert(fs_codec->utf8 == 1);
#endif
/* At this point, PyUnicode_EncodeFSDefault() and
@@ -16089,8 +16092,8 @@ init_fs_codec(PyInterpreterState *interp)
/* Set Py_FileSystemDefaultEncoding and Py_FileSystemDefaultEncodeErrors
global configuration variables. */
- if (_Py_SetFileSystemEncoding(interp->fs_codec.encoding,
- interp->fs_codec.errors) < 0) {
+ if (_Py_SetFileSystemEncoding(fs_codec->encoding,
+ fs_codec->errors) < 0) {
PyErr_NoMemory();
return -1;
}
@@ -16133,15 +16136,14 @@ _PyUnicode_InitEncodings(PyThreadState *tstate)
static void
-_PyUnicode_FiniEncodings(PyThreadState *tstate)
+_PyUnicode_FiniEncodings(struct _Py_unicode_fs_codec *fs_codec)
{
- PyInterpreterState *interp = tstate->interp;
- PyMem_RawFree(interp->fs_codec.encoding);
- interp->fs_codec.encoding = NULL;
- interp->fs_codec.utf8 = 0;
- PyMem_RawFree(interp->fs_codec.errors);
- interp->fs_codec.errors = NULL;
- interp->fs_codec.error_handler = _Py_ERROR_UNKNOWN;
+ PyMem_RawFree(fs_codec->encoding);
+ fs_codec->encoding = NULL;
+ fs_codec->utf8 = 0;
+ PyMem_RawFree(fs_codec->errors);
+ fs_codec->errors = NULL;
+ fs_codec->error_handler = _Py_ERROR_UNKNOWN;
}
@@ -16196,10 +16198,10 @@ _PyUnicode_Fini(PyThreadState *tstate)
Py_CLEAR(unicode_latin1[i]);
}
#endif
- _PyUnicode_ClearStaticStrings();
+ unicode_clear_static_strings();
}
- _PyUnicode_FiniEncodings(tstate);
+ _PyUnicode_FiniEncodings(&tstate->interp->unicode.fs_codec);
}
diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c
index b1b248187e..f4c5692212 100644
--- a/Parser/pegen/parse.c
+++ b/Parser/pegen/parse.c
@@ -199,8 +199,8 @@ static KeywordToken *reserved_keywords[] = {
#define star_targets_seq_type 1128
#define star_target_type 1129
#define star_atom_type 1130
-#define inside_paren_ann_assign_target_type 1131
-#define ann_assign_subscript_attribute_target_type 1132
+#define single_target_type 1131
+#define single_subscript_attribute_target_type 1132
#define del_targets_type 1133
#define del_target_type 1134
#define del_t_atom_type 1135
@@ -501,8 +501,8 @@ static expr_ty star_targets_rule(Parser *p);
static asdl_seq* star_targets_seq_rule(Parser *p);
static expr_ty star_target_rule(Parser *p);
static expr_ty star_atom_rule(Parser *p);
-static expr_ty inside_paren_ann_assign_target_rule(Parser *p);
-static expr_ty ann_assign_subscript_attribute_target_rule(Parser *p);
+static expr_ty single_target_rule(Parser *p);
+static expr_ty single_subscript_attribute_target_rule(Parser *p);
static asdl_seq* del_targets_rule(Parser *p);
static expr_ty del_target_rule(Parser *p);
static expr_ty del_t_atom_rule(Parser *p);
@@ -1590,9 +1590,9 @@ compound_stmt_rule(Parser *p)
// assignment:
// | NAME ':' expression ['=' annotated_rhs]
-// | ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs]
+// | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]
// | ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT?
-// | target augassign (yield_expr | star_expressions)
+// | single_target augassign (yield_expr | star_expressions)
// | invalid_assignment
static stmt_ty
assignment_rule(Parser *p)
@@ -1642,13 +1642,13 @@ assignment_rule(Parser *p)
}
p->mark = _mark;
}
- { // ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs]
+ { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]
Token * _literal;
void *a;
expr_ty b;
void *c;
if (
- (a = _tmp_20_rule(p)) // '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target
+ (a = _tmp_20_rule(p)) // '(' single_target ')' | single_subscript_attribute_target
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -1703,12 +1703,12 @@ assignment_rule(Parser *p)
}
p->mark = _mark;
}
- { // target augassign (yield_expr | star_expressions)
+ { // single_target augassign (yield_expr | star_expressions)
expr_ty a;
AugOperator* b;
void *c;
if (
- (a = target_rule(p)) // target
+ (a = single_target_rule(p)) // single_target
&&
(b = augassign_rule(p)) // augassign
&&
@@ -3350,7 +3350,7 @@ try_stmt_rule(Parser *p)
return _res;
}
-// except_block: 'except' expression ['as' target] ':' block | 'except' ':' block
+// except_block: 'except' expression ['as' NAME] ':' block | 'except' ':' block
static excepthandler_ty
except_block_rule(Parser *p)
{
@@ -3367,7 +3367,7 @@ except_block_rule(Parser *p)
UNUSED(_start_lineno); // Only used by EXTRA macro
int _start_col_offset = p->tokens[_mark]->col_offset;
UNUSED(_start_col_offset); // Only used by EXTRA macro
- { // 'except' expression ['as' target] ':' block
+ { // 'except' expression ['as' NAME] ':' block
Token * _keyword;
Token * _literal;
asdl_seq* b;
@@ -3378,7 +3378,7 @@ except_block_rule(Parser *p)
&&
(e = expression_rule(p)) // expression
&&
- (t = _tmp_48_rule(p), 1) // ['as' target]
+ (t = _tmp_48_rule(p), 1) // ['as' NAME]
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -9605,25 +9605,22 @@ star_atom_rule(Parser *p)
return _res;
}
-// inside_paren_ann_assign_target:
-// | ann_assign_subscript_attribute_target
-// | NAME
-// | '(' inside_paren_ann_assign_target ')'
+// single_target: single_subscript_attribute_target | NAME | '(' single_target ')'
static expr_ty
-inside_paren_ann_assign_target_rule(Parser *p)
+single_target_rule(Parser *p)
{
if (p->error_indicator) {
return NULL;
}
expr_ty _res = NULL;
int _mark = p->mark;
- { // ann_assign_subscript_attribute_target
- expr_ty ann_assign_subscript_attribute_target_var;
+ { // single_subscript_attribute_target
+ expr_ty single_subscript_attribute_target_var;
if (
- (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target
+ (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target
)
{
- _res = ann_assign_subscript_attribute_target_var;
+ _res = single_subscript_attribute_target_var;
goto done;
}
p->mark = _mark;
@@ -9643,14 +9640,14 @@ inside_paren_ann_assign_target_rule(Parser *p)
}
p->mark = _mark;
}
- { // '(' inside_paren_ann_assign_target ')'
+ { // '(' single_target ')'
Token * _literal;
Token * _literal_1;
expr_ty a;
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target
+ (a = single_target_rule(p)) // single_target
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
@@ -9669,11 +9666,11 @@ inside_paren_ann_assign_target_rule(Parser *p)
return _res;
}
-// ann_assign_subscript_attribute_target:
+// single_subscript_attribute_target:
// | t_primary '.' NAME !t_lookahead
// | t_primary '[' slices ']' !t_lookahead
static expr_ty
-ann_assign_subscript_attribute_target_rule(Parser *p)
+single_subscript_attribute_target_rule(Parser *p)
{
if (p->error_indicator) {
return NULL;
@@ -10750,7 +10747,8 @@ invalid_named_expression_rule(Parser *p)
// | tuple ':'
// | star_named_expression ',' star_named_expressions* ':'
// | expression ':' expression ['=' annotated_rhs]
-// | expression ('=' | augassign) (yield_expr | star_expressions)
+// | star_expressions '=' (yield_expr | star_expressions)
+// | star_expressions augassign (yield_expr | star_expressions)
static void *
invalid_assignment_rule(Parser *p)
{
@@ -10844,19 +10842,40 @@ invalid_assignment_rule(Parser *p)
}
p->mark = _mark;
}
- { // expression ('=' | augassign) (yield_expr | star_expressions)
+ { // star_expressions '=' (yield_expr | star_expressions)
+ Token * _literal;
void *_tmp_128_var;
+ expr_ty a;
+ if (
+ (a = star_expressions_rule(p)) // star_expressions
+ &&
+ (_literal = _PyPegen_expect_token(p, 22)) // token='='
+ &&
+ (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions
+ )
+ {
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ }
+ { // star_expressions augassign (yield_expr | star_expressions)
void *_tmp_129_var;
expr_ty a;
+ AugOperator* augassign_var;
if (
- (a = expression_rule(p)) // expression
+ (a = star_expressions_rule(p)) // star_expressions
&&
- (_tmp_128_var = _tmp_128_rule(p)) // '=' | augassign
+ (augassign_var = augassign_rule(p)) // augassign
&&
(_tmp_129_var = _tmp_129_rule(p)) // yield_expr | star_expressions
)
{
- _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot assign to %s" , _PyPegen_get_expr_name ( a ) );
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "'%s' is an illegal expression for augmented assignment" , _PyPegen_get_expr_name ( a ) );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
return NULL;
@@ -11907,7 +11926,7 @@ _tmp_19_rule(Parser *p)
return _res;
}
-// _tmp_20: '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target
+// _tmp_20: '(' single_target ')' | single_subscript_attribute_target
static void *
_tmp_20_rule(Parser *p)
{
@@ -11916,14 +11935,14 @@ _tmp_20_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // '(' inside_paren_ann_assign_target ')'
+ { // '(' single_target ')'
Token * _literal;
Token * _literal_1;
expr_ty b;
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (b = inside_paren_ann_assign_target_rule(p)) // inside_paren_ann_assign_target
+ (b = single_target_rule(p)) // single_target
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
@@ -11937,13 +11956,13 @@ _tmp_20_rule(Parser *p)
}
p->mark = _mark;
}
- { // ann_assign_subscript_attribute_target
- expr_ty ann_assign_subscript_attribute_target_var;
+ { // single_subscript_attribute_target
+ expr_ty single_subscript_attribute_target_var;
if (
- (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) // ann_assign_subscript_attribute_target
+ (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target
)
{
- _res = ann_assign_subscript_attribute_target_var;
+ _res = single_subscript_attribute_target_var;
goto done;
}
p->mark = _mark;
@@ -13073,7 +13092,7 @@ _loop1_47_rule(Parser *p)
return _seq;
}
-// _tmp_48: 'as' target
+// _tmp_48: 'as' NAME
static void *
_tmp_48_rule(Parser *p)
{
@@ -13082,13 +13101,13 @@ _tmp_48_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // 'as' target
+ { // 'as' NAME
Token * _keyword;
expr_ty z;
if (
(_keyword = _PyPegen_expect_token(p, 531)) // token='as'
&&
- (z = target_rule(p)) // target
+ (z = _PyPegen_name_token(p)) // NAME
)
{
_res = z;
@@ -16678,7 +16697,7 @@ _tmp_127_rule(Parser *p)
return _res;
}
-// _tmp_128: '=' | augassign
+// _tmp_128: yield_expr | star_expressions
static void *
_tmp_128_rule(Parser *p)
{
@@ -16687,24 +16706,24 @@ _tmp_128_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // '='
- Token * _literal;
+ { // yield_expr
+ expr_ty yield_expr_var;
if (
- (_literal = _PyPegen_expect_token(p, 22)) // token='='
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- _res = _literal;
+ _res = yield_expr_var;
goto done;
}
p->mark = _mark;
}
- { // augassign
- AugOperator* augassign_var;
+ { // star_expressions
+ expr_ty star_expressions_var;
if (
- (augassign_var = augassign_rule(p)) // augassign
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- _res = augassign_var;
+ _res = star_expressions_var;
goto done;
}
p->mark = _mark;
diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c
index 083088bd96..7f3e4561de 100644
--- a/Parser/pegen/pegen.c
+++ b/Parser/pegen/pegen.c
@@ -300,30 +300,6 @@ error:
Py_XDECREF(tuple);
}
-static inline PyObject *
-get_error_line(char *buffer, int is_file)
-{
- const char *newline;
- if (is_file) {
- newline = strrchr(buffer, '\n');
- } else {
- newline = strchr(buffer, '\n');
- }
-
- if (is_file) {
- while (newline > buffer && newline[-1] == '\n') {
- --newline;
- }
- }
-
- if (newline) {
- return PyUnicode_DecodeUTF8(buffer, newline - buffer, "replace");
- }
- else {
- return PyUnicode_DecodeUTF8(buffer, strlen(buffer), "replace");
- }
-}
-
static int
tokenizer_error(Parser *p)
{
@@ -422,7 +398,11 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype,
}
if (!error_line) {
- error_line = get_error_line(p->tok->buf, p->start_rule == Py_file_input);
+ Py_ssize_t size = p->tok->inp - p->tok->buf;
+ if (size && p->tok->buf[size-1] == '\n') {
+ size--;
+ }
+ error_line = PyUnicode_DecodeUTF8(p->tok->buf, size, "replace");
if (!error_line) {
goto error;
}
@@ -2074,3 +2054,49 @@ _PyPegen_make_module(Parser *p, asdl_seq *a) {
}
return Module(a, type_ignores, p->arena);
}
+
+// Error reporting helpers
+
+expr_ty
+_PyPegen_get_invalid_target(expr_ty e)
+{
+ if (e == NULL) {
+ return NULL;
+ }
+
+#define VISIT_CONTAINER(CONTAINER, TYPE) do { \
+ Py_ssize_t len = asdl_seq_LEN(CONTAINER->v.TYPE.elts);\
+ for (Py_ssize_t i = 0; i < len; i++) {\
+ expr_ty other = asdl_seq_GET(CONTAINER->v.TYPE.elts, i);\
+ expr_ty child = _PyPegen_get_invalid_target(other);\
+ if (child != NULL) {\
+ return child;\
+ }\
+ }\
+ } while (0)
+
+ // We only need to visit List and Tuple nodes recursively as those
+ // are the only ones that can contain valid names in targets when
+ // they are parsed as expressions. Any other kind of expression
+ // that is a container (like Sets or Dicts) is directly invalid and
+ // we don't need to visit it recursively.
+
+ switch (e->kind) {
+ case List_kind: {
+ VISIT_CONTAINER(e, List);
+ return NULL;
+ }
+ case Tuple_kind: {
+ VISIT_CONTAINER(e, Tuple);
+ return NULL;
+ }
+ case Starred_kind:
+ return _PyPegen_get_invalid_target(e->v.Starred.value);
+ case Name_kind:
+ case Subscript_kind:
+ case Attribute_kind:
+ return NULL;
+ default:
+ return e;
+ }
+} \ No newline at end of file
diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h
index e5b1b757bd..b9d4c048bb 100644
--- a/Parser/pegen/pegen.h
+++ b/Parser/pegen/pegen.h
@@ -260,6 +260,10 @@ void *_PyPegen_arguments_parsing_error(Parser *, expr_ty);
int _PyPegen_check_barry_as_flufl(Parser *);
mod_ty _PyPegen_make_module(Parser *, asdl_seq *);
+// Error reporting helpers
+
+expr_ty _PyPegen_get_invalid_target(expr_ty e);
+
void *_PyPegen_parse(Parser *);
#endif
diff --git a/Python/ast.c b/Python/ast.c
index 1a4a3110e6..2d20ca62aa 100644
--- a/Python/ast.c
+++ b/Python/ast.c
@@ -3164,10 +3164,7 @@ ast_for_expr_stmt(struct compiling *c, const node *n)
expr1 = ast_for_testlist(c, ch);
if (!expr1)
return NULL;
- if(!set_context(c, expr1, Store, ch))
- return NULL;
- /* set_context checks that most expressions are not the left side.
- Augmented assignments can only have a name, a subscript, or an
+ /* Augmented assignments can only have a name, a subscript, or an
attribute on the left, though, so we have to explicitly check for
those. */
switch (expr1->kind) {
@@ -3176,10 +3173,16 @@ ast_for_expr_stmt(struct compiling *c, const node *n)
case Subscript_kind:
break;
default:
- ast_error(c, ch, "illegal expression for augmented assignment");
+ ast_error(c, ch, "'%s' is an illegal expression for augmented assignment",
+ get_expr_name(expr1));
return NULL;
}
+ /* set_context checks that most expressions are not the left side. */
+ if(!set_context(c, expr1, Store, ch)) {
+ return NULL;
+ }
+
ch = CHILD(n, 2);
if (TYPE(ch) == testlist)
expr2 = ast_for_testlist(c, ch);
diff --git a/Python/hashtable.c b/Python/hashtable.c
index d1467ad94e..b92e8ca08c 100644
--- a/Python/hashtable.c
+++ b/Python/hashtable.c
@@ -60,7 +60,7 @@
((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY))
/* Forward declaration */
-static void hashtable_rehash(_Py_hashtable_t *ht);
+static int hashtable_rehash(_Py_hashtable_t *ht);
static void
_Py_slist_init(_Py_slist_t *list)
@@ -119,66 +119,20 @@ round_size(size_t s)
size_t
_Py_hashtable_size(const _Py_hashtable_t *ht)
{
- size_t size;
-
- size = sizeof(_Py_hashtable_t);
-
+ size_t size = sizeof(_Py_hashtable_t);
/* buckets */
- size += ht->num_buckets * sizeof(_Py_hashtable_entry_t *);
-
+ size += ht->nbuckets * sizeof(_Py_hashtable_entry_t *);
/* entries */
- size += ht->entries * sizeof(_Py_hashtable_entry_t);
-
+ size += ht->nentries * sizeof(_Py_hashtable_entry_t);
return size;
}
-#ifdef Py_DEBUG
-void
-_Py_hashtable_print_stats(_Py_hashtable_t *ht)
-{
- size_t size;
- size_t chain_len, max_chain_len, total_chain_len, nchains;
- _Py_hashtable_entry_t *entry;
- size_t hv;
- double load;
-
- size = _Py_hashtable_size(ht);
-
- load = (double)ht->entries / ht->num_buckets;
-
- max_chain_len = 0;
- total_chain_len = 0;
- nchains = 0;
- for (hv = 0; hv < ht->num_buckets; hv++) {
- entry = TABLE_HEAD(ht, hv);
- if (entry != NULL) {
- chain_len = 0;
- for (; entry; entry = ENTRY_NEXT(entry)) {
- chain_len++;
- }
- if (chain_len > max_chain_len)
- max_chain_len = chain_len;
- total_chain_len += chain_len;
- nchains++;
- }
- }
- printf("hash table %p: entries=%"
- PY_FORMAT_SIZE_T "u/%" PY_FORMAT_SIZE_T "u (%.0f%%), ",
- (void *)ht, ht->entries, ht->num_buckets, load * 100.0);
- if (nchains)
- printf("avg_chain_len=%.1f, ", (double)total_chain_len / nchains);
- printf("max_chain_len=%" PY_FORMAT_SIZE_T "u, %" PY_FORMAT_SIZE_T "u KiB\n",
- max_chain_len, size / 1024);
-}
-#endif
-
-
_Py_hashtable_entry_t *
_Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key)
{
Py_uhash_t key_hash = ht->hash_func(key);
- size_t index = key_hash & (ht->num_buckets - 1);
+ size_t index = key_hash & (ht->nbuckets - 1);
_Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index);
while (1) {
if (entry == NULL) {
@@ -200,7 +154,7 @@ static _Py_hashtable_entry_t *
_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key)
{
Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key);
- size_t index = key_hash & (ht->num_buckets - 1);
+ size_t index = key_hash & (ht->nbuckets - 1);
_Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index);
while (1) {
if (entry == NULL) {
@@ -220,7 +174,7 @@ void*
_Py_hashtable_steal(_Py_hashtable_t *ht, const void *key)
{
Py_uhash_t key_hash = ht->hash_func(key);
- size_t index = key_hash & (ht->num_buckets - 1);
+ size_t index = key_hash & (ht->nbuckets - 1);
_Py_hashtable_entry_t *entry = TABLE_HEAD(ht, index);
_Py_hashtable_entry_t *previous = NULL;
@@ -238,12 +192,13 @@ _Py_hashtable_steal(_Py_hashtable_t *ht, const void *key)
_Py_slist_remove(&ht->buckets[index], (_Py_slist_item_t *)previous,
(_Py_slist_item_t *)entry);
- ht->entries--;
+ ht->nentries--;
void *value = entry->value;
ht->alloc.free(entry);
- if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) {
+ if ((float)ht->nentries / (float)ht->nbuckets < HASHTABLE_LOW) {
+ // Ignore failure: error cannot be reported to the caller
hashtable_rehash(ht);
}
return value;
@@ -263,8 +218,6 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value)
assert(entry == NULL);
#endif
- Py_uhash_t key_hash = ht->hash_func(key);
- size_t index = key_hash & (ht->num_buckets - 1);
entry = ht->alloc.malloc(sizeof(_Py_hashtable_entry_t));
if (entry == NULL) {
@@ -272,15 +225,21 @@ _Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value)
return -1;
}
- entry->key_hash = key_hash;
+ entry->key_hash = ht->hash_func(key);
entry->key = (void *)key;
entry->value = value;
- _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry);
- ht->entries++;
+ ht->nentries++;
+ if ((float)ht->nentries / (float)ht->nbuckets > HASHTABLE_HIGH) {
+ if (hashtable_rehash(ht) < 0) {
+ ht->nentries--;
+ ht->alloc.free(entry);
+ return -1;
+ }
+ }
- if ((float)ht->entries / (float)ht->num_buckets > HASHTABLE_HIGH)
- hashtable_rehash(ht);
+ size_t index = entry->key_hash & (ht->nbuckets - 1);
+ _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry);
return 0;
}
@@ -303,61 +262,53 @@ _Py_hashtable_foreach(_Py_hashtable_t *ht,
_Py_hashtable_foreach_func func,
void *user_data)
{
- _Py_hashtable_entry_t *entry;
- size_t hv;
-
- for (hv = 0; hv < ht->num_buckets; hv++) {
- for (entry = TABLE_HEAD(ht, hv); entry; entry = ENTRY_NEXT(entry)) {
+ for (size_t hv = 0; hv < ht->nbuckets; hv++) {
+ _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, hv);
+ while (entry != NULL) {
int res = func(ht, entry->key, entry->value, user_data);
- if (res)
+ if (res) {
return res;
+ }
+ entry = ENTRY_NEXT(entry);
}
}
return 0;
}
-static void
+static int
hashtable_rehash(_Py_hashtable_t *ht)
{
- size_t buckets_size, new_size, bucket;
- _Py_slist_t *old_buckets = NULL;
- size_t old_num_buckets;
-
- new_size = round_size((size_t)(ht->entries * HASHTABLE_REHASH_FACTOR));
- if (new_size == ht->num_buckets)
- return;
-
- old_num_buckets = ht->num_buckets;
+ size_t new_size = round_size((size_t)(ht->nentries * HASHTABLE_REHASH_FACTOR));
+ if (new_size == ht->nbuckets) {
+ return 0;
+ }
- buckets_size = new_size * sizeof(ht->buckets[0]);
- old_buckets = ht->buckets;
- ht->buckets = ht->alloc.malloc(buckets_size);
- if (ht->buckets == NULL) {
- /* cancel rehash on memory allocation failure */
- ht->buckets = old_buckets ;
+ size_t buckets_size = new_size * sizeof(ht->buckets[0]);
+ _Py_slist_t *new_buckets = ht->alloc.malloc(buckets_size);
+ if (new_buckets == NULL) {
/* memory allocation failed */
- return;
+ return -1;
}
- memset(ht->buckets, 0, buckets_size);
-
- ht->num_buckets = new_size;
-
- for (bucket = 0; bucket < old_num_buckets; bucket++) {
- _Py_hashtable_entry_t *entry, *next;
- for (entry = BUCKETS_HEAD(old_buckets[bucket]); entry != NULL; entry = next) {
- size_t entry_index;
-
+ memset(new_buckets, 0, buckets_size);
+ for (size_t bucket = 0; bucket < ht->nbuckets; bucket++) {
+ _Py_hashtable_entry_t *entry = BUCKETS_HEAD(ht->buckets[bucket]);
+ while (entry != NULL) {
assert(ht->hash_func(entry->key) == entry->key_hash);
- next = ENTRY_NEXT(entry);
- entry_index = entry->key_hash & (new_size - 1);
+ _Py_hashtable_entry_t *next = ENTRY_NEXT(entry);
+ size_t entry_index = entry->key_hash & (new_size - 1);
+
+ _Py_slist_prepend(&new_buckets[entry_index], (_Py_slist_item_t*)entry);
- _Py_slist_prepend(&ht->buckets[entry_index], (_Py_slist_item_t*)entry);
+ entry = next;
}
}
- ht->alloc.free(old_buckets);
+ ht->alloc.free(ht->buckets);
+ ht->nbuckets = new_size;
+ ht->buckets = new_buckets;
+ return 0;
}
@@ -368,10 +319,7 @@ _Py_hashtable_new_full(_Py_hashtable_hash_func hash_func,
_Py_hashtable_destroy_func value_destroy_func,
_Py_hashtable_allocator_t *allocator)
{
- _Py_hashtable_t *ht;
- size_t buckets_size;
_Py_hashtable_allocator_t alloc;
-
if (allocator == NULL) {
alloc.malloc = PyMem_Malloc;
alloc.free = PyMem_Free;
@@ -380,14 +328,15 @@ _Py_hashtable_new_full(_Py_hashtable_hash_func hash_func,
alloc = *allocator;
}
- ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t));
- if (ht == NULL)
+ _Py_hashtable_t *ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t));
+ if (ht == NULL) {
return ht;
+ }
- ht->num_buckets = HASHTABLE_MIN_SIZE;
- ht->entries = 0;
+ ht->nbuckets = HASHTABLE_MIN_SIZE;
+ ht->nentries = 0;
- buckets_size = ht->num_buckets * sizeof(ht->buckets[0]);
+ size_t buckets_size = ht->nbuckets * sizeof(ht->buckets[0]);
ht->buckets = alloc.malloc(buckets_size);
if (ht->buckets == NULL) {
alloc.free(ht);
@@ -435,25 +384,26 @@ _Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry)
void
_Py_hashtable_clear(_Py_hashtable_t *ht)
{
- _Py_hashtable_entry_t *entry, *next;
- size_t i;
-
- for (i=0; i < ht->num_buckets; i++) {
- for (entry = TABLE_HEAD(ht, i); entry != NULL; entry = next) {
- next = ENTRY_NEXT(entry);
+ for (size_t i=0; i < ht->nbuckets; i++) {
+ _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i);
+ while (entry != NULL) {
+ _Py_hashtable_entry_t *next = ENTRY_NEXT(entry);
_Py_hashtable_destroy_entry(ht, entry);
+ entry = next;
}
_Py_slist_init(&ht->buckets[i]);
}
- ht->entries = 0;
- hashtable_rehash(ht);
+ ht->nentries = 0;
+ // Ignore failure: clear function is not expected to fail
+ // because of a memory allocation failure.
+ (void)hashtable_rehash(ht);
}
void
_Py_hashtable_destroy(_Py_hashtable_t *ht)
{
- for (size_t i = 0; i < ht->num_buckets; i++) {
+ for (size_t i = 0; i < ht->nbuckets; i++) {
_Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i);
while (entry) {
_Py_hashtable_entry_t *entry_next = ENTRY_NEXT(entry);
diff --git a/Python/marshal.c b/Python/marshal.c
index b096ff8932..a0f6b98126 100644
--- a/Python/marshal.c
+++ b/Python/marshal.c
@@ -312,7 +312,7 @@ w_ref(PyObject *v, char *flag, WFILE *p)
w_long(w, p);
return 1;
} else {
- size_t s = p->hashtable->entries;
+ size_t s = p->hashtable->nentries;
/* we don't support long indices */
if (s >= 0x7fffffff) {
PyErr_SetString(PyExc_ValueError, "too many objects");
diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py
index 281a749a93..b07ffdd928 100755
--- a/Tools/clinic/clinic.py
+++ b/Tools/clinic/clinic.py
@@ -724,7 +724,7 @@ class CLanguage(Language):
parser_prototype_def_class = normalize_snippet("""
static PyObject *
- {c_basename}({self_type}{self_name}, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+ {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
""")
# parser_body_fields remembers the fields passed in to the
@@ -1305,7 +1305,8 @@ class CLanguage(Language):
template_dict['docstring'] = self.docstring_for_c_string(f)
template_dict['self_name'] = template_dict['self_type'] = template_dict['self_type_check'] = ''
- f_self.converter.set_template_dict(template_dict)
+ for converter in converters:
+ converter.set_template_dict(template_dict)
f.return_converter.render(f, data)
template_dict['impl_return_type'] = f.return_converter.type
@@ -2698,6 +2699,10 @@ class CConverter(metaclass=CConverterAutoRegister):
""".format(argname=argname, paramname=self.name, cast=cast)
return None
+ def set_template_dict(self, template_dict):
+ pass
+
+
type_checks = {
'&PyLong_Type': ('PyLong_Check', 'int'),
'&PyTuple_Type': ('PyTuple_Check', 'tuple'),