summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorda-woods <dw-git@d-woods.co.uk>2022-12-08 20:47:31 +0000
committerda-woods <dw-git@d-woods.co.uk>2022-12-08 20:47:31 +0000
commit880b383de482cc8f4feae5c799c8a7e271575e1c (patch)
tree019734c005c39737ac375463b1e6aa23153afc13
parentc6bda31bcb957027acea36a5c72e57b404713b72 (diff)
parent82f7e9bfca66144712e2b0f8f05c268737595385 (diff)
downloadcython-880b383de482cc8f4feae5c799c8a7e271575e1c.tar.gz
Merge branch 'parse-match' into basic_match
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.yml55
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.md41
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.yml46
-rw-r--r--.github/ISSUE_TEMPLATE/other.md (renamed from .github/ISSUE_TEMPLATE/bug_report.md)27
-rw-r--r--.github/workflows/ci.yml151
-rw-r--r--.github/workflows/wheel-manylinux.yml91
-rw-r--r--.github/workflows/wheels.yml151
-rw-r--r--.gitignore27
-rw-r--r--.travis.yml48
-rw-r--r--CHANGES.rst341
-rw-r--r--Cython/Build/Cythonize.py29
-rw-r--r--Cython/Build/Dependencies.py35
-rw-r--r--Cython/Build/Inline.py25
-rw-r--r--Cython/Build/IpythonMagic.py7
-rw-r--r--Cython/Build/Tests/TestDependencies.py142
-rw-r--r--Cython/Compiler/Annotate.py6
-rw-r--r--Cython/Compiler/Builtin.py35
-rw-r--r--Cython/Compiler/CmdLine.py23
-rw-r--r--Cython/Compiler/Code.pxd6
-rw-r--r--Cython/Compiler/Code.py81
-rw-r--r--Cython/Compiler/CythonScope.py2
-rw-r--r--Cython/Compiler/Dataclass.py328
-rw-r--r--Cython/Compiler/ExprNodes.py598
-rw-r--r--Cython/Compiler/FlowControl.pxd10
-rw-r--r--Cython/Compiler/FlowControl.py81
-rw-r--r--Cython/Compiler/FusedNode.py18
-rw-r--r--Cython/Compiler/Lexicon.py3
-rw-r--r--Cython/Compiler/Main.py60
-rw-r--r--Cython/Compiler/MatchCaseNodes.py76
-rw-r--r--Cython/Compiler/MemoryView.py2
-rw-r--r--Cython/Compiler/ModuleNode.py38
-rw-r--r--Cython/Compiler/Naming.py2
-rw-r--r--Cython/Compiler/Nodes.py301
-rw-r--r--Cython/Compiler/Optimize.py25
-rw-r--r--Cython/Compiler/Options.py19
-rw-r--r--Cython/Compiler/ParseTreeTransforms.pxd5
-rw-r--r--Cython/Compiler/ParseTreeTransforms.py272
-rw-r--r--Cython/Compiler/Parsing.pxd24
-rw-r--r--Cython/Compiler/Parsing.py585
-rw-r--r--Cython/Compiler/Pipeline.py15
-rw-r--r--Cython/Compiler/PyrexTypes.py99
-rw-r--r--Cython/Compiler/Symtab.py147
-rw-r--r--Cython/Compiler/Tests/TestCmdLine.py77
-rw-r--r--Cython/Compiler/Tests/TestGrammar.py57
-rw-r--r--Cython/Compiler/Tests/TestParseTreeTransforms.py9
-rw-r--r--Cython/Compiler/TypeInference.py7
-rw-r--r--Cython/Compiler/TypeSlots.py9
-rw-r--r--Cython/Compiler/Visitor.py24
-rw-r--r--Cython/Coverage.py55
-rw-r--r--Cython/Distutils/old_build_ext.py4
-rw-r--r--Cython/Includes/cpython/object.pxd2
-rw-r--r--Cython/Includes/cpython/time.pxd2
-rw-r--r--Cython/Includes/cpython/unicode.pxd35
-rw-r--r--Cython/Includes/libcpp/bit.pxd31
-rw-r--r--Cython/Includes/libcpp/map.pxd3
-rw-r--r--Cython/Includes/libcpp/numeric.pxd7
-rw-r--r--Cython/Includes/libcpp/random.pxd156
-rw-r--r--Cython/Includes/libcpp/set.pxd4
-rw-r--r--Cython/Includes/libcpp/string.pxd9
-rw-r--r--Cython/Includes/libcpp/unordered_map.pxd4
-rw-r--r--Cython/Includes/libcpp/unordered_set.pxd4
-rw-r--r--Cython/Shadow.py4
-rw-r--r--Cython/TestUtils.py111
-rw-r--r--Cython/Utility/AsyncGen.c2
-rw-r--r--Cython/Utility/CommonStructures.c2
-rw-r--r--Cython/Utility/Complex.c2
-rw-r--r--Cython/Utility/Coroutine.c12
-rw-r--r--Cython/Utility/CppSupport.cpp39
-rw-r--r--Cython/Utility/CythonFunction.c31
-rw-r--r--Cython/Utility/Exceptions.c8
-rw-r--r--Cython/Utility/ExtensionTypes.c40
-rw-r--r--Cython/Utility/FunctionArguments.c2
-rw-r--r--Cython/Utility/ImportExport.c51
-rw-r--r--Cython/Utility/MemoryView.pyx65
-rw-r--r--Cython/Utility/MemoryView_C.c94
-rw-r--r--Cython/Utility/ModuleSetupCode.c105
-rw-r--r--Cython/Utility/ObjectHandling.c25
-rw-r--r--Cython/Utility/Optimize.c15
-rw-r--r--Cython/Utility/StringTools.c2
-rw-r--r--Cython/Utility/TypeConversion.c2
-rw-r--r--Cython/Utils.py25
-rw-r--r--Makefile6
-rw-r--r--README.rst4
-rw-r--r--Tools/ci-run.sh29
-rw-r--r--Tools/dataclass_test_data/test_dataclasses.py4266
-rw-r--r--Tools/make_dataclass_tests.py443
-rw-r--r--appveyor.yml138
-rw-r--r--docs/examples/tutorial/clibraries/queue.py2
-rw-r--r--docs/examples/tutorial/embedding/embedded.pyx3
-rw-r--r--docs/examples/tutorial/pure/disabled_annotations.py33
-rw-r--r--docs/examples/userguide/buffer/matrix.py15
-rw-r--r--docs/examples/userguide/buffer/matrix.pyx3
-rw-r--r--docs/examples/userguide/buffer/matrix_with_buffer.py48
-rw-r--r--docs/examples/userguide/buffer/matrix_with_buffer.pyx7
-rw-r--r--docs/examples/userguide/buffer/view_count.py30
-rw-r--r--docs/examples/userguide/buffer/view_count.pyx3
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle.py22
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle.pyx3
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py26
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx4
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py23
-rw-r--r--docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx6
-rw-r--r--docs/examples/userguide/extension_types/cheesy.py36
-rw-r--r--docs/examples/userguide/extension_types/cheesy.pyx36
-rw-r--r--docs/examples/userguide/extension_types/dataclass.py21
-rw-r--r--docs/examples/userguide/extension_types/dataclass.pyx3
-rw-r--r--docs/examples/userguide/extension_types/dict_animal.py12
-rw-r--r--docs/examples/userguide/extension_types/dict_animal.pyx3
-rw-r--r--docs/examples/userguide/extension_types/extendable_animal.py15
-rw-r--r--docs/examples/userguide/extension_types/extendable_animal.pyx5
-rw-r--r--docs/examples/userguide/extension_types/owned_pointer.py17
-rw-r--r--docs/examples/userguide/extension_types/owned_pointer.pyx17
-rw-r--r--docs/examples/userguide/extension_types/penguin.py14
-rw-r--r--docs/examples/userguide/extension_types/penguin.pyx14
-rw-r--r--docs/examples/userguide/extension_types/penguin2.py12
-rw-r--r--docs/examples/userguide/extension_types/penguin2.pyx12
-rw-r--r--docs/examples/userguide/extension_types/pets.py22
-rw-r--r--docs/examples/userguide/extension_types/pets.pyx22
-rw-r--r--docs/examples/userguide/extension_types/python_access.py7
-rw-r--r--docs/examples/userguide/extension_types/python_access.pyx4
-rw-r--r--docs/examples/userguide/extension_types/shrubbery.py2
-rw-r--r--docs/examples/userguide/extension_types/shrubbery.pyx2
-rw-r--r--docs/examples/userguide/extension_types/shrubbery_2.py10
-rw-r--r--docs/examples/userguide/extension_types/shrubbery_2.pyx2
-rw-r--r--docs/examples/userguide/extension_types/widen_shrubbery.py6
-rw-r--r--docs/examples/userguide/extension_types/widen_shrubbery.pyx2
-rw-r--r--docs/examples/userguide/extension_types/wrapper_class.py65
-rw-r--r--docs/examples/userguide/extension_types/wrapper_class.pyx65
-rw-r--r--docs/examples/userguide/language_basics/enum.pyx (renamed from docs/examples/userguide/language_basics/struct_union_enum.pyx)11
-rw-r--r--docs/examples/userguide/language_basics/function_pointer.pyx8
-rw-r--r--docs/examples/userguide/language_basics/function_pointer_struct.pyx9
-rw-r--r--docs/examples/userguide/language_basics/struct.py7
-rw-r--r--docs/examples/userguide/language_basics/struct.pyx7
-rw-r--r--docs/examples/userguide/language_basics/struct_union_enum.py7
-rw-r--r--docs/examples/userguide/language_basics/union.py9
-rw-r--r--docs/examples/userguide/language_basics/union.pyx9
-rw-r--r--docs/examples/userguide/parallelism/breaking_loop.py15
-rw-r--r--docs/examples/userguide/parallelism/breaking_loop.pyx2
-rw-r--r--docs/examples/userguide/parallelism/cimport_openmp.py11
-rw-r--r--docs/examples/userguide/parallelism/cimport_openmp.pyx2
-rw-r--r--docs/examples/userguide/parallelism/memoryview_sum.py7
-rw-r--r--docs/examples/userguide/parallelism/memoryview_sum.pyx7
-rw-r--r--docs/examples/userguide/parallelism/parallel.py30
-rw-r--r--docs/examples/userguide/parallelism/parallel.pyx30
-rw-r--r--docs/examples/userguide/parallelism/setup_py.py16
-rw-r--r--docs/examples/userguide/parallelism/setup_pyx.py (renamed from docs/examples/userguide/parallelism/setup.py)0
-rw-r--r--docs/examples/userguide/parallelism/simple_sum.py10
-rw-r--r--docs/examples/userguide/sharing_declarations/landscaping.py7
-rw-r--r--docs/examples/userguide/sharing_declarations/lunch.py5
-rw-r--r--docs/examples/userguide/sharing_declarations/lunch.pyx1
-rw-r--r--docs/examples/userguide/sharing_declarations/restaurant.py12
-rw-r--r--docs/examples/userguide/sharing_declarations/restaurant.pyx2
-rw-r--r--docs/examples/userguide/sharing_declarations/setup_py.py4
-rw-r--r--docs/examples/userguide/sharing_declarations/setup_pyx.py (renamed from docs/examples/userguide/sharing_declarations/setup.py)0
-rw-r--r--docs/examples/userguide/sharing_declarations/shrubbing.py10
-rw-r--r--docs/examples/userguide/sharing_declarations/shrubbing.pyx5
-rw-r--r--docs/examples/userguide/sharing_declarations/spammery.py10
-rw-r--r--docs/examples/userguide/sharing_declarations/spammery.pyx3
-rw-r--r--docs/examples/userguide/sharing_declarations/volume.py2
-rw-r--r--docs/examples/userguide/special_methods/total_ordering.py13
-rw-r--r--docs/examples/userguide/special_methods/total_ordering.pyx13
-rw-r--r--docs/examples/userguide/wrapping_CPlusPlus/rect.pyx2
-rw-r--r--docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx10
-rw-r--r--docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx2
-rw-r--r--docs/src/quickstart/build.rst4
-rw-r--r--docs/src/quickstart/install.rst12
-rw-r--r--docs/src/tutorial/clibraries.rst37
-rw-r--r--docs/src/tutorial/cython_tutorial.rst6
-rw-r--r--docs/src/tutorial/embedding.rst7
-rw-r--r--docs/src/tutorial/pure.rst18
-rw-r--r--docs/src/two-syntax-variants-used4
-rw-r--r--docs/src/userguide/buffer.rst34
-rw-r--r--docs/src/userguide/early_binding_for_speed.rst37
-rw-r--r--docs/src/userguide/extension_types.rst737
-rw-r--r--docs/src/userguide/external_C_code.rst4
-rw-r--r--docs/src/userguide/language_basics.rst240
-rw-r--r--docs/src/userguide/memoryviews.rst1
-rw-r--r--docs/src/userguide/migrating_to_cy30.rst63
-rw-r--r--docs/src/userguide/parallelism.rst89
-rw-r--r--docs/src/userguide/sharing_declarations.rst155
-rw-r--r--docs/src/userguide/source_files_and_compilation.rst13
-rw-r--r--docs/src/userguide/special_methods.rst24
-rw-r--r--pyximport/_pyximport2.py620
-rw-r--r--pyximport/_pyximport3.py478
-rw-r--r--pyximport/pyximport.py607
-rwxr-xr-xruntests.py119
-rwxr-xr-xsetup.py55
-rw-r--r--test-requirements-27.txt3
-rw-r--r--test-requirements-34.txt2
-rw-r--r--test-requirements-cpython.txt1
-rw-r--r--test-requirements-pypy27.txt3
-rw-r--r--tests/buffers/bufaccess.pyx42
-rw-r--r--tests/bugs.txt1
-rw-r--r--tests/build/cythonize_options.srctree2
-rw-r--r--tests/build/depfile_package_cython.srctree61
-rw-r--r--tests/build/depfile_package_cythonize.srctree (renamed from tests/build/depfile_package.srctree)7
-rw-r--r--tests/compile/branch_hints.pyx2
-rw-r--r--tests/compile/buildenv.pyx4
-rw-r--r--tests/compile/c_directives.pyx2
-rw-r--r--tests/compile/cpp_nogil.pyx2
-rw-r--r--tests/compile/declarations.srctree2
-rw-r--r--tests/compile/excvalcheck.h6
-rw-r--r--tests/compile/fromimport.pyx24
-rw-r--r--tests/compile/fromimport_star.pyx7
-rw-r--r--tests/compile/fused_buffers.pyx16
-rw-r--r--tests/compile/fused_no_numpy.pyx13
-rw-r--r--tests/compile/fused_redeclare_T3111.pyx12
-rw-r--r--tests/compile/module_name_arg.srctree52
-rw-r--r--tests/compile/nogil.h12
-rw-r--r--tests/compile/publicapi_pxd_mix.pxd2
-rw-r--r--tests/compile/publicapi_pxd_mix.pyx2
-rw-r--r--tests/compile/pxd_mangling_names.srctree46
-rw-r--r--tests/errors/builtin_type_inheritance.pyx4
-rw-r--r--tests/errors/cfuncptr.pyx36
-rw-r--r--tests/errors/cpp_increment.pyx33
-rw-r--r--tests/errors/cpp_object_template.pyx6
-rw-r--r--tests/errors/dataclass_e1.pyx2
-rw-r--r--tests/errors/dataclass_e5.pyx21
-rw-r--r--tests/errors/dataclass_e6.pyx23
-rw-r--r--tests/errors/dataclass_w1.pyx13
-rw-r--r--tests/errors/dataclass_w1_othermod.pxd3
-rw-r--r--tests/errors/e_decorators.pyx12
-rw-r--r--tests/errors/e_excvalfunctype.pyx2
-rw-r--r--tests/errors/e_invalid_special_cython_modules.py42
-rw-r--r--tests/errors/e_nogilfunctype.pyx2
-rw-r--r--tests/errors/e_pure_cimports.pyx3
-rw-r--r--tests/errors/e_relative_cimport.pyx4
-rw-r--r--tests/errors/e_typing_errors.pyx59
-rw-r--r--tests/errors/e_typing_optional.py33
-rw-r--r--tests/errors/incomplete_varadic.pyx8
-rw-r--r--tests/errors/nogil.pyx2
-rw-r--r--tests/errors/nogilfunctype.pyx2
-rw-r--r--tests/errors/w_uninitialized.pyx4
-rw-r--r--tests/macos_cpp_bugs.txt1
-rw-r--r--tests/memoryview/cythonarray.pyx38
-rw-r--r--tests/memoryview/memoryview.pyx37
-rw-r--r--tests/memoryview/memoryview_acq_count.srctree2
-rw-r--r--tests/memoryview/memslice.pyx147
-rw-r--r--tests/memoryview/numpy_memoryview.pyx2
-rw-r--r--tests/pypy2_bugs.txt3
-rw-r--r--tests/pypy_bugs.txt3
-rw-r--r--tests/run/annotate_html.pyx3
-rw-r--r--tests/run/annotation_typing.pyx95
-rw-r--r--tests/run/append.pyx34
-rw-r--r--tests/run/binop_reverse_methods_GH2056.pyx44
-rw-r--r--tests/run/builtin_abs.pyx6
-rw-r--r--tests/run/builtin_type_inheritance_T608.pyx38
-rw-r--r--tests/run/builtin_type_inheritance_T608_py2only.pyx42
-rw-r--r--tests/run/bytearray_iter.py15
-rw-r--r--tests/run/c_file_validation.srctree72
-rw-r--r--tests/run/cdef_class_dataclass.pyx25
-rw-r--r--tests/run/cdef_setitem_T284.pyx4
-rw-r--r--tests/run/cfunc_convert.pyx27
-rw-r--r--tests/run/cfuncptr.pyx40
-rw-r--r--tests/run/complex_numbers_T305.pyx86
-rw-r--r--tests/run/coverage_cmd_src_pkg_layout.srctree177
-rw-r--r--tests/run/cpdef_void_return.pyx2
-rw-r--r--tests/run/cpp_classes.pyx2
-rw-r--r--tests/run/cpp_classes_def.pyx2
-rw-r--r--tests/run/cpp_exceptions_nogil.pyx2
-rw-r--r--tests/run/cpp_extern.srctree151
-rw-r--r--tests/run/cpp_function_lib.pxd6
-rw-r--r--tests/run/cpp_iterators.pyx154
-rw-r--r--tests/run/cpp_iterators_over_attribute_of_rvalue_support.h11
-rw-r--r--tests/run/cpp_iterators_simple.h11
-rw-r--r--tests/run/cpp_locals_directive.pyx10
-rw-r--r--tests/run/cpp_nested_classes.pyx44
-rw-r--r--tests/run/cpp_stl_associated_containers_contains_cpp20.pyx106
-rw-r--r--tests/run/cpp_stl_bit_cpp20.pyx131
-rw-r--r--tests/run/cpp_stl_function.pyx18
-rw-r--r--tests/run/cpp_stl_numeric_ops_cpp17.pyx18
-rw-r--r--tests/run/cpp_stl_numeric_ops_cpp20.pyx23
-rw-r--r--tests/run/cpp_stl_random.pyx319
-rw-r--r--tests/run/cpp_stl_string_cpp20.pyx61
-rw-r--r--tests/run/cython_no_files.srctree34
-rw-r--r--tests/run/decorators.pyx56
-rw-r--r--tests/run/delete.pyx18
-rw-r--r--tests/run/exceptionpropagation.pyx24
-rw-r--r--tests/run/exceptions_nogil.pyx2
-rw-r--r--tests/run/extern_varobject_extensions.srctree94
-rw-r--r--tests/run/extra_patma.pyx18
-rw-r--r--tests/run/funcexc_iter_T228.pyx86
-rw-r--r--tests/run/function_self.py21
-rw-r--r--tests/run/fused_cpp.pyx47
-rw-r--r--tests/run/generators_py.py17
-rw-r--r--tests/run/genexpr_arg_order.py181
-rw-r--r--tests/run/genexpr_iterable_lookup_T600.pyx18
-rw-r--r--tests/run/line_trace.pyx62
-rw-r--r--tests/run/locals.pyx10
-rw-r--r--tests/run/nogil.pyx4
-rw-r--r--tests/run/nogil_conditional.pyx4
-rw-r--r--tests/run/parallel.pyx2
-rw-r--r--tests/run/pep442_tp_finalize.pyx79
-rw-r--r--tests/run/pep442_tp_finalize_cimport.srctree67
-rw-r--r--tests/run/pep448_extended_unpacking.pyx18
-rw-r--r--tests/run/pep526_variable_annotations.py73
-rw-r--r--tests/run/pep526_variable_annotations_cy.pyx4
-rw-r--r--tests/run/pure_cdef_class_dataclass.py51
-rw-r--r--tests/run/pure_py.py15
-rw-r--r--tests/run/relative_cimport_compare.srctree327
-rw-r--r--tests/run/sequential_parallel.pyx14
-rw-r--r--tests/run/special_methods_T561.pyx41
-rw-r--r--tests/run/test_coroutines_pep492.pyx8
-rw-r--r--tests/run/test_dataclasses.pxi19
-rw-r--r--tests/run/test_dataclasses.pyx1186
-rw-r--r--tests/run/test_grammar.py118
-rw-r--r--tests/run/test_named_expressions.py4
-rw-r--r--tests/run/test_patma.py475
-rw-r--r--tests/run/trace_nogil.pyx2
-rw-r--r--tests/run/tuple_constants.pyx7
-rw-r--r--tests/run/type_inference.pyx4
-rw-r--r--tests/run/with_gil.pyx6
-rw-r--r--tests/run/with_gil_automatic.pyx6
-rw-r--r--tests/run/withnogil.pyx2
-rw-r--r--tests/testsupport/cythonarrayutil.pxi2
-rw-r--r--tests/windows_bugs_39.txt3
316 files changed, 16909 insertions, 3387 deletions
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 000000000..1c2f8aa83
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,55 @@
+name: Bug Report
+description: File a bug report
+title: "[BUG] "
+body:
+ - type: markdown
+ attributes:
+ value: |
+ **PLEASE READ THIS FIRST:**
+ - DO NOT use the bug and feature tracker for general questions and support requests.
+ Use the [`cython-users`](https://groups.google.com/g/cython-users) mailing list instead.
+ It has a wider audience, so you get more and better answers.
+ - Did you search for SIMILAR ISSUES already?
+ Please do, it helps to save us precious time that we otherwise could not invest into development.
+ - Did you try the LATEST MASTER BRANCH or pre-release?
+ It might already have what you want to report.
+ Specifically, the legacy stable 0.29.x release series receives only important low-risk bug fixes.
+ Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes
+ - type: textarea
+ id: describe
+ attributes:
+ label: "Describe the bug"
+ description: "A clear and concise description of what the bug is."
+ placeholder: "Tell us what you see!"
+ validations:
+ required: true
+ - type: textarea
+ id: reproduce
+ attributes:
+ label: "Code to reproduce the behaviour:"
+ value: |
+ ```cython
+ # example code
+ ```
+ - type: textarea
+ id: expected
+ attributes:
+ label: "Expected behaviour"
+ description: "A clear and concise description of what you expected to happen."
+ - type: textarea
+ id: environment
+ attributes:
+ label: Environment
+ description: "please complete the following information"
+ value: |
+ OS: [e.g. Linux, Windows, macOS]
+ Python version [e.g. 3.10.2]
+ Cython version [e.g. 3.0.0a11]
+ validations:
+ required: true
+ - type: textarea
+ id: context
+ attributes:
+ label: Additional context
+ description: Add any other context about the problem here.
+
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index c35dfae51..000000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,41 +0,0 @@
----
-name: Feature request
-about: Suggest an idea for this project
-title: "[ENH] "
-labels: ''
-assignees: ''
-
----
-
-<!--
-**Note:**
-- DO NOT use the bug and feature tracker for general questions and support requests.
- Use the `cython-users` mailing list instead.
- It has a wider audience, so you get more and better answers.
-- Did you search for similar issues already?
- Please do, it helps to save us precious time that we otherwise could not invest into development.
-- Did you try the latest master branch or pre-release?
- It might already have what you want to report.
- Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes.
--->
-
-**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. In my code, I would like to do [...]
-```cython
-# add use case related code here
-```
-
-**Describe the solution you'd like**
-A clear and concise description of what you want to happen, including code examples if applicable.
-```cython
-# add a proposed code/syntax example here
-```
-
-**Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
-```cython
-# add alternative code/syntax proposals here
-```
-
-**Additional context**
-Add any other context about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 000000000..3d46fe3bc
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,46 @@
+name: Feature request
+description: Suggest an idea for this project
+title: "[ENH] "
+body:
+ - type: markdown
+ attributes:
+ value: |
+ **PLEASE READ THIS FIRST:**
+ - DO NOT use the bug and feature tracker for general questions and support requests.
+ Use the [`cython-users`](https://groups.google.com/g/cython-users) mailing list instead.
+ It has a wider audience, so you get more and better answers.
+ - Did you search for SIMILAR ISSUES already?
+ Please do, it helps to save us precious time that we otherwise could not invest into development.
+ - Did you try the LATEST MASTER BRANCH or pre-release?
+ It might already have what you want to report.
+ Specifically, the legacy stable 0.29.x release series receives only important low-risk bug fixes.
+ Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes
+ - type: textarea
+ id: problem
+ attributes:
+ label: "Is your feature request related to a problem? Please describe."
+ description: "A clear and concise description of what the problem is."
+ value: |
+ In my code, I would like to do [...]
+ ```cython
+ # add use case related code here
+ ```
+ validations:
+ required: true
+ - type: textarea
+ id: solution
+ attributes:
+ label: "Describe the solution you'd like."
+ description: "A clear and concise description of what you want to happen, including code examples if applicable."
+ placeholder: add a proposed code/syntax example here
+ - type: textarea
+ id: alternatives
+ attributes:
+ label: "Describe alternatives you've considered."
+ description: "A clear and concise description of any alternative solutions or features you've considered."
+ placeholder: "add alternative code/syntax proposals here"
+ - type: textarea
+ id: context
+ attributes:
+ label: "Additional context"
+ description: "Add any other context about the feature request here."
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/other.md
index be0b183dc..95aa5153e 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/other.md
@@ -1,14 +1,14 @@
---
-name: Bug report
-about: Create a report to help us improve
-title: "[BUG] "
+name: Other
+about: Anything that does not qualify as either "bug" or "feature request". DO NOT post support requests here.
+title: ""
labels: ''
assignees: ''
---
<!--
-**PLEASE READ THIS FIRST:**
+**Note:**
- DO NOT use the bug and feature tracker for general questions and support requests.
Use the `cython-users` mailing list instead.
It has a wider audience, so you get more and better answers.
@@ -18,22 +18,3 @@ assignees: ''
It might already have what you want to report.
Also see the [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) regarding recent changes.
-->
-
-**Describe the bug**
-A clear and concise description of what the bug is.
-
-**To Reproduce**
-Code to reproduce the behaviour:
-```cython
-```
-
-**Expected behavior**
-A clear and concise description of what you expected to happen.
-
-**Environment (please complete the following information):**
- - OS: [e.g. Linux, Windows, macOS]
- - Python version [e.g. 3.8.4]
- - Cython version [e.g. 0.29.18]
-
-**Additional context**
-Add any other context about the problem here.
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f5f645555..4dd771ad9 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,6 +1,24 @@
name: CI
-on: [push, pull_request, workflow_dispatch]
+on:
+ push:
+ paths:
+ - '**'
+ - '!.github/**'
+ - '.github/workflows/ci.yml'
+ pull_request:
+ paths:
+ - '**'
+ - '!.github/**'
+ - '.github/workflows/ci.yml'
+ workflow_dispatch:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
+ cancel-in-progress: true
+
+permissions:
+ contents: read # to fetch code (actions/checkout)
jobs:
ci:
@@ -23,7 +41,7 @@ jobs:
#
# FIXME: 'cpp' tests seems to fail due to compilation errors (numpy_pythran_unit)
# in all python versions and test failures (builtin_float) in 3.5<
- os: [ubuntu-18.04]
+ os: [windows-2019, ubuntu-18.04]
backend: [c, cpp]
python-version:
- "2.7"
@@ -39,24 +57,22 @@ jobs:
env: [{}]
include:
- # Temporary - Allow failure on Python 3.11-dev jobs until they are considered stable
- - python-version: 3.11-dev
- allowed_failure: true
+ # Temporary - Allow failure on Python 3.12-dev jobs until they are in beta (feature frozen)
#- python-version: 3.12-dev
# allowed_failure: true
# Ubuntu sub-jobs:
# ================
- # GCC 11
+ # GCC 11 (with latest language standards)
- os: ubuntu-18.04
python-version: 3.9
backend: c
- env: { GCC_VERSION: 11 }
+ env: { GCC_VERSION: 11, EXTRA_CFLAGS: "-std=c17" }
extra_hash: "-gcc11"
- os: ubuntu-18.04
python-version: 3.9
backend: cpp
- env: { GCC_VERSION: 11 }
+ env: { GCC_VERSION: 11, EXTRA_CFLAGS: "-std=c++20" }
extra_hash: "-gcc11"
# compile all modules
- os: ubuntu-18.04
@@ -70,12 +86,12 @@ jobs:
env: { CYTHON_COMPILE_ALL: 1 }
extra_hash: "-all"
- os: ubuntu-18.04
- python-version: 3.9
+ python-version: "3.10"
backend: c
env: { CYTHON_COMPILE_ALL: 1 }
extra_hash: "-all"
- os: ubuntu-18.04
- python-version: 3.9
+ python-version: "3.10"
backend: cpp
env: { CYTHON_COMPILE_ALL: 1 }
extra_hash: "-all"
@@ -147,66 +163,81 @@ jobs:
python-version: pypy-3.7
backend: c
env: { NO_CYTHON_COMPILE: 1 }
- # Coverage - Disabled due to taking too long to run
- # - os: ubuntu-18.04
- # python-version: 3.7
- # backend: "c,cpp"
- # env: { COVERAGE: 1 }
- # extra_hash: '-coverage'
+ # Coverage
+ - os: ubuntu-18.04
+ python-version: 3.8
+ backend: "c,cpp"
+ env: { COVERAGE: 1 }
+ extra_hash: '-coverage'
+
+ - os: windows-2019
+ allowed_failure: true
# MacOS sub-jobs
# ==============
# (C-only builds are used to create wheels)
- - os: macos-10.15
+ - os: macos-11
python-version: 2.7
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 2.7
backend: cpp
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.5
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.6
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.7
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.15 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.8
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.15 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.9
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: 3.9
backend: cpp
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: "3.10"
backend: c
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
- - os: macos-10.15
+ - os: macos-11
python-version: "3.10"
backend: cpp
env: { MACOSX_DEPLOYMENT_TARGET: 10.14 }
+ exclude:
+ # fails due to lack of a compatible compiler
+ - os: windows-2019
+ python-version: 2.7
+ - os: windows-2019
+ python-version: 3.4
+
+ # cpp specific test fails
+ - os: windows-2019
+ python-version: 3.5
+ backend: cpp
+
+
# This defaults to 360 minutes (6h) which is way too long and if a test gets stuck, it can block other pipelines.
- # From testing, the runs tend to take ~20/~30 minutes, so a limit of 40 minutes should be enough. This can always be
- # changed in the future if needed.
- timeout-minutes: 40
+ # From testing, the runs tend to take ~20 minutes for ubuntu / macos and ~40 for windows,
+ # so a limit of 50 minutes should be enough. This can always be changed in the future if needed.
+ timeout-minutes: 50
runs-on: ${{ matrix.os }}
env:
BACKEND: ${{ matrix.backend }}
- OS_NAME: ${{ matrix.os }}
PYTHON_VERSION: ${{ matrix.python-version }}
GCC_VERSION: 8
USE_CCACHE: 1
@@ -226,7 +257,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Cache [ccache]
- uses: pat-s/always-upload-cache@v3.0.1
+ uses: pat-s/always-upload-cache@v3.0.11
if: startsWith(runner.os, 'Linux')
with:
path: ~/.ccache
@@ -247,7 +278,7 @@ jobs:
- name: Upload wheels
uses: actions/upload-artifact@v3
with:
- name: wheels-${{ runner.os }}
+ name: wheels-${{ runner.os }}${{ matrix.extra_hash }}
path: dist/*.whl
if-no-files-found: ignore
@@ -279,31 +310,31 @@ jobs:
name: pycoverage_html
path: coverage-report-html
-# cycoverage:
-# runs-on: ubuntu-18.04
-#
-# env:
-# BACKEND: c,cpp
-# OS_NAME: ubuntu-18.04
-# PYTHON_VERSION: 3.9
-#
-# steps:
-# - name: Checkout repo
-# uses: actions/checkout@v2
-# with:
-# fetch-depth: 1
-#
-# - name: Setup python
-# uses: actions/setup-python@v2
-# with:
-# python-version: 3.9
-#
-# - name: Run Coverage
-# env: { COVERAGE: 1 }
-# run: bash ./Tools/ci-run.sh
-#
-# - name: Upload Coverage Report
-# uses: actions/upload-artifact@v2
-# with:
-# name: cycoverage_html
-# path: coverage-report-html
+ cycoverage:
+ runs-on: ubuntu-18.04
+
+ env:
+ BACKEND: c,cpp
+ OS_NAME: ubuntu-18.04
+ PYTHON_VERSION: 3.9
+
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 1
+
+ - name: Setup python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+
+ - name: Run Coverage
+ env: { COVERAGE: 1 }
+ run: bash ./Tools/ci-run.sh
+
+ - name: Upload Coverage Report
+ uses: actions/upload-artifact@v2
+ with:
+ name: cycoverage_html
+ path: coverage-report-html
diff --git a/.github/workflows/wheel-manylinux.yml b/.github/workflows/wheel-manylinux.yml
deleted file mode 100644
index 40ef7ad47..000000000
--- a/.github/workflows/wheel-manylinux.yml
+++ /dev/null
@@ -1,91 +0,0 @@
-name: Linux wheel build
-
-on:
- release:
- types: [created]
-
-jobs:
- python:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
-
- - name: Set up Python
- uses: actions/setup-python@v3
- with:
- python-version: "3.10"
-
- - name: Install build dependencies
- run: pip install -U "setuptools<60" pip wheel
-
- - name: Make sdist and Python wheel
- run: make sdist pywheel
-
- - name: Release
- uses: softprops/action-gh-release@v1
- if: startsWith(github.ref, 'refs/tags/')
- with:
- files: |
- dist/*.tar.gz
- dist/*-none-any.whl
-
- - name: Upload sdist
- uses: actions/upload-artifact@v3
- with:
- name: sdist
- path: dist/*.tar.gz
- if-no-files-found: ignore
-
- - name: Upload Python wheel
- uses: actions/upload-artifact@v3
- with:
- name: wheel-Python
- path: dist/*-none-any.whl
- if-no-files-found: ignore
-
- binary:
- strategy:
- # Allows for matrix sub-jobs to fail without canceling the rest
- fail-fast: false
-
- matrix:
- image:
- - manylinux1_x86_64
- - manylinux1_i686
- - musllinux_1_1_x86_64
- - musllinux_1_1_aarch64
- - manylinux_2_24_x86_64
- - manylinux_2_24_i686
- - manylinux_2_24_aarch64
-
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v3
-
- - name: Set up Python 3.10
- uses: actions/setup-python@v3
- with:
- python-version: "3.10"
-
- - name: Building wheel
- run: |
- make sdist wheel_${{ matrix.image }}
-
- - name: Copy wheels in dist
- run: cp wheelhouse*/*.whl dist/
-
- - name: Release
- uses: softprops/action-gh-release@v1
- if: startsWith(github.ref, 'refs/tags/')
- with:
- files: |
- dist/*manylinux*.whl
- dist/*musllinux*.whl
-
- - name: Archive Wheels
- uses: actions/upload-artifact@v3
- with:
- name: ${{ matrix.image }}
- path: dist/*m[au][ns][yl]linux*.whl
- if-no-files-found: ignore
diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml
new file mode 100644
index 000000000..910d86a4e
--- /dev/null
+++ b/.github/workflows/wheels.yml
@@ -0,0 +1,151 @@
+# Workflow to build wheels for upload to PyPI.
+#
+# In an attempt to save CI resources, wheel builds do
+# not run on each push but only weekly and for releases.
+# Wheel builds can be triggered from the Actions page
+# (if you have the perms) on a commit to master.
+#
+# Alternatively, if you would like to trigger wheel builds
+# on a pull request, the labels that trigger builds are:
+# - Build System
+
+name: Wheel Builder
+on:
+ release:
+ types: [created]
+ schedule:
+ # ┌───────────── minute (0 - 59)
+ # │ ┌───────────── hour (0 - 23)
+ # │ │ ┌───────────── day of the month (1 - 31)
+ # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
+ # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
+ # │ │ │ │ │
+ - cron: "42 1 * * 4"
+ pull_request:
+ types: [labeled, opened, synchronize, reopened]
+ paths:
+ #- Cython/Build/**
+ - .github/workflows/wheels.yml
+ - MANIFEST.in
+ - setup.*
+ workflow_dispatch:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+permissions:
+ contents: write # to create GitHub release (softprops/action-gh-release)
+
+jobs:
+ build_wheels:
+ name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+ if: >-
+ github.event_name == 'release' ||
+ github.event_name == 'schedule' ||
+ github.event_name == 'workflow_dispatch' ||
+ (github.event_name == 'pull_request' &&
+ contains(github.event.pull_request.labels.*.name, 'Build System'))
+ runs-on: ${{ matrix.buildplat[0] }}
+ strategy:
+ # Ensure that a wheel builder finishes even if another fails
+ fail-fast: false
+ matrix:
+ # Github Actions doesn't support pairing matrix values together, let's improvise
+ # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026
+ buildplat:
+ - [ubuntu-20.04, manylinux_x86_64]
+ - [ubuntu-20.04, manylinux_aarch64]
+ - [ubuntu-20.04, manylinux_i686]
+ - [ubuntu-20.04, musllinux_x86_64]
+ - [ubuntu-20.04, musllinux_aarch64]
+ - [macos-11, macosx_*]
+ - [windows-2019, win_amd64]
+ - [windows-2019, win32]
+ python: ["cp36", "cp37", "cp38", "cp39", "cp310", "cp311"] # Note: Wheels not needed for PyPy
+ steps:
+ - name: Checkout Cython
+ uses: actions/checkout@v3
+
+ - name: Set up QEMU
+ if: contains(matrix.buildplat[1], '_aarch64')
+ uses: docker/setup-qemu-action@v1
+ with:
+ platforms: all
+
+ - name: Build wheels
+ uses: pypa/cibuildwheel@v2.11.2
+ env:
+ # TODO: Build Cython with the compile-all flag?
+ CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+ CIBW_PRERELEASE_PYTHONS: True
+ CIBW_ARCHS_LINUX: auto aarch64
+ CIBW_ENVIRONMENT: CFLAGS='-O3 -g0 -mtune=generic -pipe -fPIC' LDFLAGS='-fPIC'
+ # TODO: Cython tests take a long time to complete
+ # consider running a subset in the future?
+ #CIBW_TEST_COMMAND: python {project}/runtests.py -vv --no-refnanny
+
+ - name: Release
+ uses: softprops/action-gh-release@v1
+ if: startsWith(github.ref, 'refs/tags/')
+ with:
+ files: |
+ dist/*manylinux*.whl
+ dist/*musllinux*.whl
+ dist/*macos*.whl
+ dist/*win32*.whl
+ dist/*win_amd64*.whl
+ prerelease: >-
+ ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b')
+ || contains(github.ref_name, 'rc') || contains(github.ref_name, 'dev') }}
+
+ - uses: actions/upload-artifact@v3
+ with:
+ name: ${{ matrix.python }}-${{ startsWith(matrix.buildplat[1], 'macosx') && 'macosx' || matrix.buildplat[1] }}
+ path: ./wheelhouse/*.whl
+
+ build_sdist_pure_wheel:
+ name: Build sdist and pure wheel
+ if: >-
+ github.event_name == 'release' ||
+ github.event_name == 'schedule' ||
+ github.event_name == 'workflow_dispatch' ||
+ (github.event_name == 'pull_request' &&
+ contains(github.event.pull_request.labels.*.name, 'Build System'))
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Cython
+ uses: actions/checkout@v3
+
+ # Used to push the built wheels
+ - uses: actions/setup-python@v3
+ with:
+ # Build sdist on lowest supported Python
+ python-version: '3.8'
+
+ - name: Build sdist
+ run: |
+ pip install --upgrade wheel setuptools
+ python setup.py sdist
+ python setup.py bdist_wheel --no-cython-compile --universal
+
+ - uses: actions/upload-artifact@v3
+ with:
+ name: sdist
+ path: ./dist/*.tar.gz
+
+ - uses: actions/upload-artifact@v3
+ with:
+ name: pure-wheel
+ path: ./dist/*.whl
+
+ - name: Release
+ uses: softprops/action-gh-release@v1
+ if: startsWith(github.ref, 'refs/tags/')
+ with:
+ files: |
+ dist/*.tar.gz
+ dist/*-none-any.whl
+ prerelease: >-
+ ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b')
+ || contains(github.ref_name, 'rc') || contains(github.ref_name, 'dev') }}
diff --git a/.gitignore b/.gitignore
index deb4c6fce..18940cd9a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
*.pyc
+*.pyd
*.pyo
__pycache__
*.so
@@ -9,17 +10,21 @@ __pycache__
.*cache*/
*venv*/
-Cython/Compiler/*.c
-Cython/Plex/*.c
-Cython/Runtime/refnanny.c
-Cython/Tempita/*.c
-Cython/*.c
-Cython/*.html
-Cython/*/*.html
-
-Tools/*.elc
-Demos/*.html
-Demos/*/*.html
+/Cython/Build/*.c
+/Cython/Compiler/*.c
+/Cython/Debugger/*.c
+/Cython/Distutils/*.c
+/Cython/Parser/*.c
+/Cython/Plex/*.c
+/Cython/Runtime/refnanny.c
+/Cython/Tempita/*.c
+/Cython/*.c
+/Cython/*.html
+/Cython/*/*.html
+
+/Tools/*.elc
+/Demos/*.html
+/Demos/*/*.html
/TEST_TMP/
/build/
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 551a38cb7..000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-os: linux
-language: python
-
-addons:
- apt:
- packages:
- - gdb
- - python-dbg
- - python3-dbg
- - libzmq-dev # needed by IPython/Tornado
- #- gcc-8
- #- g++-8
-
-cache:
- pip: true
- directories:
- - $HOME/.ccache
-
-env:
- global:
- - USE_CCACHE=1
- - CCACHE_SLOPPINESS=pch_defines,time_macros
- - CCACHE_COMPRESS=1
- - CCACHE_MAXSIZE=250M
- - PATH="/usr/lib/ccache:$PATH"
- - PYTHON_VERSION=3.8
- - OS_NAME=ubuntu
-
-python: 3.8
-
-matrix:
- include:
- - arch: arm64
- env: BACKEND=c
- - arch: arm64
- env: BACKEND=cpp
- - arch: ppc64le
- env: BACKEND=c
- - arch: ppc64le
- env: BACKEND=cpp
- # Disabled due to test errors
- # - arch: s390x
- # env: BACKEND=c
- # - arch: s390x
- # env: BACKEND=cpp
-
-script:
- - bash ./Tools/ci-run.sh
diff --git a/CHANGES.rst b/CHANGES.rst
index 24cb4e23b..41fdc1aac 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -2,7 +2,124 @@
Cython Changelog
================
-3.0.0 alpha 11 (2022-0?-??)
+3.0.0 alpha 12 (2022-??-??)
+===========================
+
+Features added
+--------------
+
+* Cython implemented C functions now propagate exceptions by default, rather than
+ swallowing them in non-object returning function if the user forgot to add an
+ ``except`` declaration to the signature. This was a long-standing source of bugs,
+ but can require adding the ``noexcept`` declaration to existing functions if
+ exception propagation is really undesired.
+ (Github issue :issue:`4280`)
+
+* `PEP-614 <https://peps.python.org/pep-0614/>`_:
+ decorators can now be arbitrary Python expressions.
+ (Github issue :issue:`4570`)
+
+* Bound C methods can now coerce to Python objects.
+ (Github issue :issue:`4890`)
+
+* ``reversed()`` can now be used together with C++ iteration.
+ Patch by Chia-Hsiang Cheng. (Github issue :issue:`5002`)
+
+* Standard C/C++ atomic operations are now used for memory views, if available.
+ (Github issue :issue:`4925`)
+
+* ``cythonize --help`` now also prints information about the supported environment variables.
+ Patch by Matus Valo. (Github issue :issue:`1711`)
+
+* Declarations were added for the bit operations in C++20.
+ Patch by Jonathan Helgert. (Github issue :issue:`4962`)
+
+Bugs fixed
+----------
+
+* Generator expressions and comprehensions now look up their outer-most iterable
+ on creation, as Python does, and not later on start, as they did previously.
+ (Github issue :issue:`1159`)
+
+* Iterating over memoryviews in generator expressions could leak a buffer reference.
+ (Github issue :issue:`4968`)
+
+* ``__del__`` finaliser methods were not always called if they were only inherited.
+ (Github issue :issue:`4995`)
+
+* ``cdef public`` functions declared in .pxd files could use an incorrectly mangled C name.
+ Patch by EpigeneMax. (Github issue :issue:`2940`)
+
+* C++ post-increment/-decrement operators were not correctly looked up on declared C++
+ classes, thus allowing Cython declarations to be missing for them and incorrect C++
+ code to be generated.
+ Patch by Max Bachmann. (Github issue :issue:`4536`)
+
+* C++ iteration more safely stores the iterable in temporary variables.
+ Patch by Xavier. (Github issue :issue:`3828`)
+
+* C++ references did not work on fused types.
+ (Github issue :issue:`4717`)
+
+* Nesting fused types in other fused types could fail to specialise the inner type.
+ (Github issue :issue:`4725`)
+
+* The special methods ``__matmul__``, ``__truediv__``, ``__floordiv__`` failed to type
+ their ``self`` argument.
+ (Github issue :issue:`5067`)
+
+* Coverage analysis failed in projects with a separate source subdirectory.
+ Patch by Sviatoslav Sydorenko and Ruben Vorderman. (Github issue :issue:`3636`)
+
+* The ``@dataclass`` directive was accidentally inherited by methods and subclasses.
+ (Github issue :issue:`4953`)
+
+* Some issues with Cython ``@dataclass`` arguments, hashing and ``repr()`` were resolved.
+ (Github issue :issue:`4956`)
+
+* Relative imports failed in compiled ``__init__.py`` package modules.
+ Patch by Matus Valo. (Github issue :issue:`3442`)
+
+* Some old usages of the deprecated Python ``imp`` module were replaced with ``importlib``.
+ Patch by Matus Valo. (Github issue :issue:`4640`)
+
+* Invalid and misspelled ``cython.*`` module names were not reported as errors.
+ (Github issue :issue:`4947`)
+
+* Extended glob paths with ``/**/`` and ``\**\`` failed on Windows.
+
+* Annotated HTML generation was missing newlines in 3.0.0a11.
+ (Github issue :issue:`4945`)
+
+* Some parser issues were resolved.
+ (Github issue :issue:`4992`)
+
+* Some C/C++ warnings were resolved.
+ Patches by Max Bachmann at al.
+ (Github issues :issue:`5004`, :issue:`5005`, :issue:`5019`, :issue:`5029`)
+
+* Intel C compilers could complain about unsupported gcc pragmas.
+ Patch by Ralf Gommers. (Github issue :issue:`5052`)
+
+* Includes all bug-fixes and features from the 0.29 maintenance branch
+ up to the :ref:`0.29.33` release.
+
+Other changes
+-------------
+
+* The undocumented, untested and apparently useless syntax
+ ``from somemodule cimport class/struct/union somename`` was removed. The type
+ modifier is not needed here and a plain ``cimport`` of the name will do.
+ (Github issue :issue:`4904`)
+
+* The wheel building process was migrated to use the ``cibuildwheel`` tool.
+ Patch by Thomas Li. (Github issue :issue:`4736`)
+
+* Wheels now include a compiled parser again, which increases their size a little
+ but gives about a 10% speed-up when running Cython.
+
+
+3.0.0 alpha 11 (2022-07-31)
===========================
Features added
@@ -10,24 +127,43 @@ Features added
* A new decorator ``@cython.dataclasses.dataclass`` was implemented that provides
compile time dataclass generation capabilities to ``cdef`` classes (extension types).
- Patch by David Woods. (Github issue :issue:`2903`)
+ Patch by David Woods. (Github issue :issue:`2903`). ``kw_only`` dataclasses
+ added by Yury Sokov. (Github issue :issue:`4794`)
* Named expressions (PEP 572) aka. assignment expressions (aka. the walrus operator
``:=``) were implemented.
Patch by David Woods. (Github issue :issue:`2636`)
-* Some C++ library declarations were extended and fixed.
- Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion.
- (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`, :issue:`4751`)
+* Context managers can be written in parentheses.
+ Patch by David Woods. (Github issue :issue:`4814`)
-* The ``cythonize`` command has a new option ``-M`` to generate ``.dep`` dependency
- files for the compilation unit. This can be used by external build tools to track
- these dependencies. Already available in Cython :ref:`0.29.27`.
- Patch by Evgeni Burovski. (Github issue :issue:`1214`)
+* Cython avoids raising ``StopIteration`` in ``__next__`` methods when possible.
+ Patch by David Woods. (Github issue :issue:`3447`)
+
+* Some C++ and CPython library declarations were extended and fixed.
+ Patches by Max Bachmann, Till Hoffmann, Julien Jerphanion, Wenjun Si.
+ (Github issues :issue:`4530`, :issue:`4528`, :issue:`4710`, :issue:`4746`,
+ :issue:`4751`, :issue:`4818`, :issue:`4762`, :issue:`4910`)
+
+* The ``cythonize`` and ``cython`` commands have a new option ``-M`` / ``--depfile``
+ to generate ``.dep`` dependency files for the compilation unit. This can be used
+ by external build tools to track these dependencies.
+ The ``cythonize`` option was already available in Cython :ref:`0.29.27`.
+ Patches by Evgeni Burovski and Eli Schwartz. (Github issue :issue:`1214`)
* ``cythonize()`` and the corresponding CLI command now regenerate the output files
also when they already exist but were generated by a different Cython version.
+* Memory views and the internal Cython array type now identify as ``collections.abc.Sequence``.
+ Patch by David Woods. (Github issue :issue:`4817`)
+
+* Cython generators and coroutines now identify as ``CO_ASYNC_GENERATOR``,
+ ``CO_COROUTINE`` and ``CO_GENERATOR`` accordingly.
+ (Github issue :issue:`4902`)
+
+* Memory views can use atomic CPU instructions instead of locks in more cases.
+ Patch by Sam Gross. (Github issue :issue:`4912`)
+
* The environment variable ``CYTHON_FORCE_REGEN=1`` can be used to force ``cythonize``
to regenerate the output files regardless of modification times and changes.
@@ -35,6 +171,10 @@ Features added
smaller set of Cython's own modules, which can be used to reduce the package
and install size.
+* Improvements to ``PyTypeObject`` definitions in pxd wrapping of libpython.
+ Patch by John Kirkham. (Github issue :issue:`4699`)
+
+
Bugs fixed
----------
@@ -44,16 +184,33 @@ Bugs fixed
* Exceptions within for-loops that run over memoryviews could lead to a ref-counting error.
Patch by David Woods. (Github issue :issue:`4662`)
+* Using memoryview arguments in closures of inner functions could lead to ref-counting errors.
+ Patch by David Woods. (Github issue :issue:`4798`)
+
* Several optimised string methods failed to accept ``None`` as arguments to their options.
Test patch by Kirill Smelkov. (Github issue :issue:`4737`)
+* A regression in 3.0.0a10 was resolved that prevented property setter methods from
+ having the same name as their value argument.
+ Patch by David Woods. (Github issue :issue:`4836`)
+
* Typedefs for the ``bint`` type did not always behave like ``bint``.
- Patch by 0dminnimda. (Github issue :issue:`4660`)
+ Patch by Nathan Manville and 0dminnimda. (Github issue :issue:`4660`)
* The return type of a fused function is no longer ignored for function pointers,
since it is relevant when passing them e.g. as argument into other fused functions.
Patch by David Woods. (Github issue :issue:`4644`)
+* The ``__self__`` attribute of fused functions reports its availability correctly
+ with ``hasattr()``. Patch by David Woods.
+ (Github issue :issue:`4808`)
+
+* ``pyximport`` no longer uses the deprecated ``imp`` module.
+ Patch by Matus Valo. (Github issue :issue:`4560`)
+
+* ``pyximport`` failed for long filenames on Windows.
+ Patch by Matti Picus. (Github issue :issue:`4630`)
+
* The generated C code failed to compile in CPython 3.11a4 and later.
(Github issue :issue:`4500`)
@@ -65,7 +222,31 @@ Bugs fixed
* A work-around for StacklessPython < 3.8 was disabled in Py3.8 and later.
(Github issue :issue:`4329`)
-* Includes all bug-fixes from the :ref:`0.29.30` release.
+* Improve conversion between function pointers with non-identical but
+ compatible exception specifications. Patches by David Woods.
+ (Github issues :issue:`4770`, :issue:`4689`)
+
+* The runtime size check for imported ``PyVarObject`` types was improved
+ to reduce false positives and adapt to Python 3.11.
+ Patch by David Woods. (Github issues :issue:`4827`, :issue:`4894`)
+
+* The generated modules no longer import NumPy internally when using
+ fused types but no memoryviews.
+ Patch by David Woods. (Github issue :issue:`4935`)
+
+* Improve compatibility with forthcoming CPython 3.12 release.
+
+* Limited API C preprocessor warning is compatible with MSVC. Patch by
+ Victor Molina Garcia. (Github issue :issue:`4826`)
+
+* Some C compiler warnings were fixed.
+ Patch by mwtian. (Github issue :issue:`4831`)
+
+* The parser allowed some invalid spellings of ``...``.
+ Patch by 0dminnimda. (Github issue :issue:`4868`)
+
+* Includes all bug-fixes and features from the 0.29 maintenance branch
+ up to the :ref:`0.29.32` release.
Other changes
-------------
@@ -76,9 +257,13 @@ Other changes
for users who did not expect ``None`` to be allowed as input. To allow ``None``, use
``typing.Optional`` as in ``func(x: Optional[list])``. ``None`` is also automatically
allowed when it is used as default argument, i.e. ``func(x: list = None)``.
- Note that, for backwards compatibility reasons, this does not apply when using Cython's
- C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always.
- (Github issues :issue:`3883`, :issue:`2696`)
+ ``int`` and ``float`` are now also recognised in type annotations and restrict the
+ value type at runtime. They were previously ignored.
+ Note that, for backwards compatibility reasons, the new behaviour does not apply when using
+ Cython's C notation, as in ``func(list x)``. Here, ``None`` is still allowed, as always.
+ Also, the ``annotation_typing`` directive can now be enabled and disabled more finely
+ within the module.
+ (Github issues :issue:`3883`, :issue:`2696`, :issue:`4669`, :issue:`4606`, :issue:`4886`)
* The compile-time ``DEF`` and ``IF`` statements are deprecated and generate a warning.
They should be replaced with normal constants, code generation or C macros.
@@ -87,6 +272,10 @@ Other changes
* Reusing an extension type attribute name as a method name is now an error.
Patch by 0dminnimda. (Github issue :issue:`4661`)
+* Improve compatibility between classes pickled in Cython 3.0 and 0.29.x
+ by accepting MD5, SHA-1 and SHA-256 checksums.
+ (Github issue :issue:`4680`)
+
3.0.0 alpha 10 (2022-01-06)
===========================
@@ -977,6 +1166,120 @@ Other changes
.. _`PEP-563`: https://www.python.org/dev/peps/pep-0563
.. _`PEP-479`: https://www.python.org/dev/peps/pep-0479
+.. _0.29.33:
+
+0.29.33 (????-??-??)
+====================
+
+Features added
+--------------
+
+* The ``cythonize`` and ``cython`` commands have a new option ``-M`` / ``--depfile``
+ to generate ``.dep`` dependency files for the compilation unit. This can be used
+ by external build tools to track these dependencies.
+ The ``cythonize`` option was already available in Cython :ref:`0.29.27`.
+ Patches by Evgeni Burovski and Eli Schwartz. (Github issue :issue:`1214`)
+
+Bugs fixed
+----------
+
+* Fixed various compiler warnings. One patch by Lisandro Dalcin.
+ (Github issues :issue:`4948`, :issue:`5086`)
+
+* Fixed error when calculating complex powers of negative numbers.
+ (Github issue :issue:`5014`)
+
+* Corrected a small mis-formatting of exception messages on Python 2.
+ (Github issue :issue:`5018`)
+
+Other changes
+-------------
+
+* The undocumented, untested and apparently useless syntax
+ ``from somemodule cimport class/struct/union somename`` was deprecated
+ in anticipation of its removal in Cython 3. The type
+ modifier is not needed here and a plain ``cimport`` of the name will do.
+ (Github issue :issue:`4905`)
+
+* Properly disable generation of descriptor docstrings on PyPy since
+ they cause crashes. It was previously disabled, but only accidentally
+ via a typo. Patch by Matti Picus.
+ (Github issue :issue:`5083`)
+
+
+
+.. _0.29.32:
+
+0.29.32 (2022-07-29)
+====================
+
+Bugs fixed
+----------
+
+* Revert "Using memoryview typed arguments in inner functions is now rejected as unsupported."
+ Patch by David Woods. (Github issue :issue:`4798`)
+
+* ``from module import *`` failed in 0.29.31 when using memoryviews.
+ Patch by David Woods. (Github issue :issue:`4927`)
+
+
+.. _0.29.31:
+
+0.29.31 (2022-07-27)
+====================
+
+Features added
+--------------
+
+* A new argument ``--module-name`` was added to the ``cython`` command to
+ provide the (one) exact target module name from the command line.
+ Patch by Matthew Brett and h-vetinari. (Github issue :issue:`4906`)
+
+Bugs fixed
+----------
+
+* Use ``importlib.util.find_spec()`` instead of the deprecated ``importlib.find_loader()``
+ function when setting up the package path at import-time.
+ Patch by Matti Picus. (Github issue :issue:`4764`)
+
+* Require the C compiler to support the two-arg form of ``va_start``
+ on Python 3.10 and higher.
+ Patch by Thomas Caswell. (Github issue :issue:`4820`)
+
+* Make ``fused_type`` subscriptable in Shadow.py.
+ Patch by Pfebrer. (Github issue :issue:`4842`)
+
+* Fix the incorrect code generation of the target type in ``bytearray`` loops.
+ Patch by Kenrick Everett. (Github issue :issue:`4108`)
+
+* Atomic refcounts for memoryviews were not used on some GCC versions by accident.
+ Patch by Sam Gross. (Github issue :issue:`4915`)
+
+* Silence some GCC ``-Wconversion`` warnings in C utility code.
+ Patch by Lisandro Dalcin. (Github issue :issue:`4854`)
+
+* Tuple multiplication was ignored in expressions such as ``[*(1,) * 2]``.
+ Patch by David Woods. (Github issue :issue:`4864`)
+
+* Calling ``append`` methods on extension types could fail to find the method
+ in some cases.
+ Patch by David Woods. (Github issue :issue:`4828`)
+
+* Ensure that object buffers (e.g. ``ndarray[object, ndim=1]``) containing
+ ``NULL`` pointers are safe to use, returning ``None`` instead of the ``NULL``
+ pointer.
+ Patch by Sebastian Berg. (Github issue :issue:`4859`)
+
+* Using memoryview typed arguments in inner functions is now rejected as unsupported.
+ Patch by David Woods. (Github issue :issue:`4798`)
+
+* Compilation could fail on systems (e.g. FIPS) that block MD5 checksums at runtime.
+ (Github issue :issue:`4909`)
+
+* Experimental adaptations for the CPython "nogil" fork was added.
+ Note that there is no official support for this in Cython 0.x.
+ Patch by Sam Gross. (Github issue :issue:`4912`)
+
.. _0.29.30:
@@ -988,7 +1291,7 @@ Bugs fixed
* The GIL handling changes in 0.29.29 introduced a regression where
objects could be deallocated without holding the GIL.
- (Github issue :issue`4796`)
+ (Github issue :issue:`4796`)
.. _0.29.29:
@@ -1002,7 +1305,7 @@ Features added
* Avoid acquiring the GIL at the end of nogil functions.
This change was backported in order to avoid generating wrong C code
that would trigger C compiler warnings with tracing support enabled.
- Backport by Oleksandr Pavlyk. (Github issue :issue`4637`)
+ Backport by Oleksandr Pavlyk. (Github issue :issue:`4637`)
Bugs fixed
----------
@@ -1018,15 +1321,15 @@ Bugs fixed
* Cython now correctly generates Python methods for both the provided regular and
reversed special numeric methods of extension types.
- Patch by David Woods. (Github issue :issue`4750`)
+ Patch by David Woods. (Github issue :issue:`4750`)
* Calling unbound extension type methods without arguments could raise an
``IndexError`` instead of a ``TypeError``.
- Patch by David Woods. (Github issue :issue`4779`)
+ Patch by David Woods. (Github issue :issue:`4779`)
* Calling unbound ``.__contains__()`` super class methods on some builtin base
types could trigger an infinite recursion.
- Patch by David Woods. (Github issue :issue`4785`)
+ Patch by David Woods. (Github issue :issue:`4785`)
* The C union type in pure Python mode mishandled some field names.
Patch by Jordan Brière. (Github issue :issue:`4727`)
diff --git a/Cython/Build/Cythonize.py b/Cython/Build/Cythonize.py
index 1f79589f8..179c04060 100644
--- a/Cython/Build/Cythonize.py
+++ b/Cython/Build/Cythonize.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import os
import shutil
@@ -45,10 +45,12 @@ def find_package_base(path):
package_path = '%s/%s' % (parent, package_path)
return base_dir, package_path
-
def cython_compile(path_pattern, options):
- pool = None
all_paths = map(os.path.abspath, extended_iglob(path_pattern))
+ _cython_compile_files(all_paths, options)
+
+def _cython_compile_files(all_paths, options):
+ pool = None
try:
for path in all_paths:
if options.build_inplace:
@@ -121,10 +123,18 @@ def run_distutils(args):
def create_args_parser():
- from argparse import ArgumentParser
+ from argparse import ArgumentParser, RawDescriptionHelpFormatter
from ..Compiler.CmdLine import ParseDirectivesAction, ParseOptionsAction, ParseCompileTimeEnvAction
- parser = ArgumentParser()
+ parser = ArgumentParser(
+ formatter_class=RawDescriptionHelpFormatter,
+ epilog="""\
+Environment variables:
+ CYTHON_FORCE_REGEN: if set to 1, forces cythonize to regenerate the output files regardless
+ of modification times and changes.
+ Environment variables accepted by setuptools are supported to configure the C compiler and build:
+ https://setuptools.pypa.io/en/latest/userguide/ext_modules.html#compiler-and-linker-options"""
+ )
parser.add_argument('-X', '--directive', metavar='NAME=VALUE,...',
dest='directives', default={}, type=str,
@@ -222,8 +232,15 @@ def parse_args(args):
def main(args=None):
options, paths = parse_args(args)
+ all_paths = []
for path in paths:
- cython_compile(path, options)
+ expanded_path = [os.path.abspath(p) for p in extended_iglob(path)]
+ if not expanded_path:
+ import sys
+ print("{}: No such file or directory: '{}'".format(sys.argv[0], path), file=sys.stderr)
+ sys.exit(1)
+ all_paths.extend(expanded_path)
+ _cython_compile_files(all_paths, options)
if __name__ == '__main__':
diff --git a/Cython/Build/Dependencies.py b/Cython/Build/Dependencies.py
index f14166f7a..c60cbf34a 100644
--- a/Cython/Build/Dependencies.py
+++ b/Cython/Build/Dependencies.py
@@ -43,10 +43,11 @@ except:
from .. import Utils
from ..Utils import (cached_function, cached_method, path_exists,
- safe_makedirs, copy_file_to_dir_if_newer, is_package_dir)
+ safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, write_depfile)
from ..Compiler import Errors
from ..Compiler.Main import Context
-from ..Compiler.Options import CompilationOptions, default_options
+from ..Compiler.Options import (CompilationOptions, default_options,
+ get_directive_defaults)
join_path = cached_function(os.path.join)
copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
@@ -85,11 +86,14 @@ def extended_iglob(pattern):
for path in extended_iglob(before + case + after):
yield path
return
- if '**/' in pattern:
+
+ # We always accept '/' and also '\' on Windows,
+ # because '/' is generally common for relative paths.
+ if '**/' in pattern or os.sep == '\\' and '**\\' in pattern:
seen = set()
- first, rest = pattern.split('**/', 1)
+ first, rest = re.split(r'\*\*[%s]' % ('/\\\\' if os.sep == '\\' else '/'), pattern, 1)
if first:
- first = iglob(first+'/')
+ first = iglob(first + os.sep)
else:
first = ['']
for root in first:
@@ -97,7 +101,7 @@ def extended_iglob(pattern):
if path not in seen:
seen.add(path)
yield path
- for path in extended_iglob(join_path(root, '*', '**/' + rest)):
+ for path in extended_iglob(join_path(root, '*', '**', rest)):
if path not in seen:
seen.add(path)
yield path
@@ -728,7 +732,8 @@ def create_dependency_tree(ctx=None, quiet=False):
global _dep_tree
if _dep_tree is None:
if ctx is None:
- ctx = Context(["."], CompilationOptions(default_options))
+ ctx = Context(["."], get_directive_defaults(),
+ options=CompilationOptions(default_options))
_dep_tree = DependencyTree(ctx, quiet=quiet)
return _dep_tree
@@ -1049,21 +1054,7 @@ def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False,
# write out the depfile, if requested
if depfile:
dependencies = deps.all_dependencies(source)
- src_base_dir, _ = os.path.split(source)
- if not src_base_dir.endswith(os.sep):
- src_base_dir += os.sep
- # paths below the base_dir are relative, otherwise absolute
- paths = []
- for fname in dependencies:
- if fname.startswith(src_base_dir):
- paths.append(os.path.relpath(fname, src_base_dir))
- else:
- paths.append(os.path.abspath(fname))
-
- depline = os.path.split(c_file)[1] + ": \\\n "
- depline += " \\\n ".join(paths) + "\n"
- with open(c_file+'.dep', 'w') as outfile:
- outfile.write(depline)
+ write_depfile(c_file, source, dependencies)
# Missing files and those generated by other Cython versions should always be recreated.
if Utils.file_generated_by_this_cython(c_file):
diff --git a/Cython/Build/Inline.py b/Cython/Build/Inline.py
index 15d26dbf8..abb891265 100644
--- a/Cython/Build/Inline.py
+++ b/Cython/Build/Inline.py
@@ -11,7 +11,8 @@ from distutils.command.build_ext import build_ext
import Cython
from ..Compiler.Main import Context
-from ..Compiler.Options import default_options
+from ..Compiler.Options import (default_options, CompilationOptions,
+ get_directive_defaults)
from ..Compiler.Visitor import CythonTransform, EnvTransform
from ..Compiler.ParseTreeTransforms import SkipDeclarations
@@ -41,18 +42,19 @@ if sys.version_info < (3, 5):
def load_dynamic(name, module_path):
return imp.load_dynamic(name, module_path)
else:
- import importlib.util as _importlib_util
- def load_dynamic(name, module_path):
- spec = _importlib_util.spec_from_file_location(name, module_path)
- module = _importlib_util.module_from_spec(spec)
- # sys.modules[name] = module
+ import importlib.util
+ from importlib.machinery import ExtensionFileLoader
+
+ def load_dynamic(name, path):
+ spec = importlib.util.spec_from_file_location(name, loader=ExtensionFileLoader(name, path))
+ module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
class UnboundSymbols(EnvTransform, SkipDeclarations):
def __init__(self):
- CythonTransform.__init__(self, None)
+ super(EnvTransform, self).__init__(context=None)
self.unbound = set()
def visit_NameNode(self, node):
if not self.current_env().lookup(node.name):
@@ -67,7 +69,8 @@ class UnboundSymbols(EnvTransform, SkipDeclarations):
def unbound_symbols(code, context=None):
code = to_unicode(code)
if context is None:
- context = Context([], default_options)
+ context = Context([], get_directive_defaults(),
+ options=CompilationOptions(default_options))
from ..Compiler.ParseTreeTransforms import AnalyseDeclarationsTransform
tree = parse_from_strings('(tree fragment)', code)
for phase in Pipeline.create_pipeline(context, 'pyx'):
@@ -128,7 +131,11 @@ def _get_build_extension():
@cached_function
def _create_context(cython_include_dirs):
- return Context(list(cython_include_dirs), default_options)
+ return Context(
+ list(cython_include_dirs),
+ get_directive_defaults(),
+ options=CompilationOptions(default_options)
+ )
_cython_inline_cache = {}
diff --git a/Cython/Build/IpythonMagic.py b/Cython/Build/IpythonMagic.py
index 36031a78c..3fa43c96d 100644
--- a/Cython/Build/IpythonMagic.py
+++ b/Cython/Build/IpythonMagic.py
@@ -46,7 +46,6 @@ Parts of this code were taken from Cython.inline.
from __future__ import absolute_import, print_function
-import imp
import io
import os
import re
@@ -75,7 +74,7 @@ from IPython.utils.text import dedent
from ..Shadow import __version__ as cython_version
from ..Compiler.Errors import CompileError
-from .Inline import cython_inline
+from .Inline import cython_inline, load_dynamic
from .Dependencies import cythonize
from ..Utils import captured_fd, print_captured
@@ -357,7 +356,7 @@ class CythonMagics(Magics):
# Build seems ok, but we might still want to show any warnings that occurred
print_compiler_output(get_stdout(), get_stderr(), sys.stdout)
- module = imp.load_dynamic(module_name, module_path)
+ module = load_dynamic(module_name, module_path)
self._import_all(module)
if args.annotate:
@@ -420,7 +419,7 @@ class CythonMagics(Magics):
# import and execute module code to generate profile
so_module_path = os.path.join(lib_dir, pgo_module_name + self.so_ext)
- imp.load_dynamic(pgo_module_name, so_module_path)
+ load_dynamic(pgo_module_name, so_module_path)
def _cythonize(self, module_name, code, lib_dir, args, quiet=True):
pyx_file = os.path.join(lib_dir, module_name + '.pyx')
diff --git a/Cython/Build/Tests/TestDependencies.py b/Cython/Build/Tests/TestDependencies.py
new file mode 100644
index 000000000..d3888117d
--- /dev/null
+++ b/Cython/Build/Tests/TestDependencies.py
@@ -0,0 +1,142 @@
+import contextlib
+import os.path
+import sys
+import tempfile
+import unittest
+from io import open
+from os.path import join as pjoin
+
+from ..Dependencies import extended_iglob
+
+
+@contextlib.contextmanager
+def writable_file(dir_path, filename):
+ with open(pjoin(dir_path, filename), "w", encoding="utf8") as f:
+ yield f
+
+
+class TestGlobbing(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls._orig_dir = os.getcwd()
+ if sys.version_info[0] < 3:
+ temp_path = cls._tmpdir = tempfile.mkdtemp()
+ else:
+ cls._tmpdir = tempfile.TemporaryDirectory()
+ temp_path = cls._tmpdir.name
+ os.chdir(temp_path)
+
+ for dir1 in "abcd":
+ for dir1x in [dir1, dir1 + 'x']:
+ for dir2 in "xyz":
+ dir_path = pjoin(dir1x, dir2)
+ os.makedirs(dir_path)
+ with writable_file(dir_path, "file2_pyx.pyx") as f:
+ f.write(u'""" PYX """')
+ with writable_file(dir_path, "file2_py.py") as f:
+ f.write(u'""" PY """')
+
+ with writable_file(dir1x, "file1_pyx.pyx") as f:
+ f.write(u'""" PYX """')
+ with writable_file(dir1x, "file1_py.py") as f:
+ f.write(u'""" PY """')
+
+ @classmethod
+ def tearDownClass(cls):
+ os.chdir(cls._orig_dir)
+ if sys.version_info[0] < 3:
+ import shutil
+ shutil.rmtree(cls._tmpdir)
+ else:
+ cls._tmpdir.cleanup()
+
+ def files_equal(self, pattern, expected_files):
+ expected_files = sorted(expected_files)
+ # It's the users's choice whether '/' will appear on Windows.
+ matched_files = sorted(path.replace('/', os.sep) for path in extended_iglob(pattern))
+ self.assertListEqual(matched_files, expected_files) # /
+
+ # Special case for Windows: also support '\' in patterns.
+ if os.sep == '\\' and '/' in pattern:
+ matched_files = sorted(extended_iglob(pattern.replace('/', '\\')))
+ self.assertListEqual(matched_files, expected_files) # \
+
+ def test_extended_iglob_simple(self):
+ ax_files = [pjoin("a", "x", "file2_pyx.pyx"), pjoin("a", "x", "file2_py.py")]
+ self.files_equal("a/x/*", ax_files)
+ self.files_equal("a/x/*.c12", [])
+ self.files_equal("a/x/*.{py,pyx,c12}", ax_files)
+ self.files_equal("a/x/*.{py,pyx}", ax_files)
+ self.files_equal("a/x/*.{pyx}", ax_files[:1])
+ self.files_equal("a/x/*.pyx", ax_files[:1])
+ self.files_equal("a/x/*.{py}", ax_files[1:])
+ self.files_equal("a/x/*.py", ax_files[1:])
+
+ def test_extended_iglob_simple_star(self):
+ for basedir in "ad":
+ files = [
+ pjoin(basedir, dirname, filename)
+ for dirname in "xyz"
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ self.files_equal(basedir + "/*/*", files)
+ self.files_equal(basedir + "/*/*.c12", [])
+ self.files_equal(basedir + "/*/*.{py,pyx,c12}", files)
+ self.files_equal(basedir + "/*/*.{py,pyx}", files)
+ self.files_equal(basedir + "/*/*.{pyx}", files[::2])
+ self.files_equal(basedir + "/*/*.pyx", files[::2])
+ self.files_equal(basedir + "/*/*.{py}", files[1::2])
+ self.files_equal(basedir + "/*/*.py", files[1::2])
+
+ for subdir in "xy*":
+ files = [
+ pjoin(basedir, dirname, filename)
+ for dirname in "xyz"
+ if subdir in ('*', dirname)
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ path = basedir + '/' + subdir + '/'
+ self.files_equal(path + "*", files)
+ self.files_equal(path + "*.{py,pyx}", files)
+ self.files_equal(path + "*.{pyx}", files[::2])
+ self.files_equal(path + "*.pyx", files[::2])
+ self.files_equal(path + "*.{py}", files[1::2])
+ self.files_equal(path + "*.py", files[1::2])
+
+ def test_extended_iglob_double_star(self):
+ basedirs = os.listdir(".")
+ files = [
+ pjoin(basedir, dirname, filename)
+ for basedir in basedirs
+ for dirname in "xyz"
+ for filename in ["file2_pyx.pyx", "file2_py.py"]
+ ]
+ all_files = [
+ pjoin(basedir, filename)
+ for basedir in basedirs
+ for filename in ["file1_pyx.pyx", "file1_py.py"]
+ ] + files
+ self.files_equal("*/*/*", files)
+ self.files_equal("*/*/**/*", files)
+ self.files_equal("*/**/*.*", all_files)
+ self.files_equal("**/*.*", all_files)
+ self.files_equal("*/**/*.c12", [])
+ self.files_equal("**/*.c12", [])
+ self.files_equal("*/*/*.{py,pyx,c12}", files)
+ self.files_equal("*/*/**/*.{py,pyx,c12}", files)
+ self.files_equal("*/**/*/*.{py,pyx,c12}", files)
+ self.files_equal("**/*/*/*.{py,pyx,c12}", files)
+ self.files_equal("**/*.{py,pyx,c12}", all_files)
+ self.files_equal("*/*/*.{py,pyx}", files)
+ self.files_equal("**/*/*/*.{py,pyx}", files)
+ self.files_equal("*/**/*/*.{py,pyx}", files)
+ self.files_equal("**/*.{py,pyx}", all_files)
+ self.files_equal("*/*/*.{pyx}", files[::2])
+ self.files_equal("**/*.{pyx}", all_files[::2])
+ self.files_equal("*/**/*/*.pyx", files[::2])
+ self.files_equal("*/*/*.pyx", files[::2])
+ self.files_equal("**/*.pyx", all_files[::2])
+ self.files_equal("*/*/*.{py}", files[1::2])
+ self.files_equal("**/*.{py}", all_files[1::2])
+ self.files_equal("*/*/*.py", files[1::2])
+ self.files_equal("**/*.py", all_files[1::2])
diff --git a/Cython/Compiler/Annotate.py b/Cython/Compiler/Annotate.py
index 48e73f853..8e8d2c4a8 100644
--- a/Cython/Compiler/Annotate.py
+++ b/Cython/Compiler/Annotate.py
@@ -49,8 +49,8 @@ class AnnotationCCodeWriter(CCodeWriter):
def create_new(self, create_from, buffer, copy_formatting):
return AnnotationCCodeWriter(create_from, buffer, copy_formatting)
- def write(self, s):
- CCodeWriter.write(self, s)
+ def _write_to_buffer(self, s):
+ self.buffer.write(s)
self.annotation_buffer.write(s)
def mark_pos(self, pos, trace=True):
@@ -73,7 +73,7 @@ class AnnotationCCodeWriter(CCodeWriter):
"""css template will later allow to choose a colormap"""
css = [self._css_template]
for i in range(255):
- color = u"FFFF%02x" % int(255/(1+i/10.0))
+ color = u"FFFF%02x" % int(255.0 // (1.0 + i/10.0))
css.append('.cython.score-%d {background-color: #%s;}' % (i, color))
try:
from pygments.formatters import HtmlFormatter
diff --git a/Cython/Compiler/Builtin.py b/Cython/Compiler/Builtin.py
index 577c20775..26fd68ff6 100644
--- a/Cython/Compiler/Builtin.py
+++ b/Cython/Compiler/Builtin.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import
from .StringEncoding import EncodedString
-from .Symtab import BuiltinScope, StructOrUnionScope, ModuleScope
+from .Symtab import BuiltinScope, StructOrUnionScope, ModuleScope, Entry
from .Code import UtilityCode
from .TypeSlots import Signature
from . import PyrexTypes
@@ -427,6 +427,7 @@ def init_builtins():
global list_type, tuple_type, dict_type, set_type, frozenset_type
global bytes_type, str_type, unicode_type, basestring_type, slice_type
global float_type, long_type, bool_type, type_type, complex_type, bytearray_type
+ global int_type
type_type = builtin_scope.lookup('type').type
list_type = builtin_scope.lookup('list').type
tuple_type = builtin_scope.lookup('tuple').type
@@ -443,6 +444,18 @@ def init_builtins():
long_type = builtin_scope.lookup('long').type
bool_type = builtin_scope.lookup('bool').type
complex_type = builtin_scope.lookup('complex').type
+ # Be careful with int type while Py2 is still supported
+ int_type = builtin_scope.lookup('int').type
+
+ # Set up type inference links between equivalent Python/C types
+ bool_type.equivalent_type = PyrexTypes.c_bint_type
+ PyrexTypes.c_bint_type.equivalent_type = bool_type
+
+ float_type.equivalent_type = PyrexTypes.c_double_type
+ PyrexTypes.c_double_type.equivalent_type = float_type
+
+ complex_type.equivalent_type = PyrexTypes.c_double_complex_type
+ PyrexTypes.c_double_complex_type.equivalent_type = complex_type
init_builtins()
@@ -466,21 +479,37 @@ def get_known_standard_library_module_scope(module_name):
('Set', set_type),
('FrozenSet', frozenset_type),
]:
- name = EncodedString(name)
if name == "Tuple":
indexed_type = PyrexTypes.PythonTupleTypeConstructor(EncodedString("typing."+name), tp)
else:
indexed_type = PyrexTypes.PythonTypeConstructor(EncodedString("typing."+name), tp)
+ name = EncodedString(name)
entry = mod.declare_type(name, indexed_type, pos = None)
+ var_entry = Entry(name, None, PyrexTypes.py_object_type)
+ var_entry.is_pyglobal = True
+ var_entry.is_variable = True
+ var_entry.scope = mod
+ entry.as_variable = var_entry
for name in ['ClassVar', 'Optional']:
+ name = EncodedString(name)
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("typing."+name))
entry = mod.declare_type(name, indexed_type, pos = None)
+ var_entry = Entry(name, None, PyrexTypes.py_object_type)
+ var_entry.is_pyglobal = True
+ var_entry.is_variable = True
+ var_entry.scope = mod
+ entry.as_variable = var_entry
_known_module_scopes[module_name] = mod
elif module_name == "dataclasses":
mod = ModuleScope(module_name, None, None)
indexed_type = PyrexTypes.SpecialPythonTypeConstructor(EncodedString("dataclasses.InitVar"))
- entry = mod.declare_type(EncodedString("InitVar"), indexed_type, pos = None)
+ initvar_string = EncodedString("InitVar")
+ entry = mod.declare_type(initvar_string, indexed_type, pos = None)
+ var_entry = Entry(initvar_string, None, PyrexTypes.py_object_type)
+ var_entry.is_pyglobal = True
+ var_entry.scope = mod
+ entry.as_variable = var_entry
_known_module_scopes[module_name] = mod
return mod
diff --git a/Cython/Compiler/CmdLine.py b/Cython/Compiler/CmdLine.py
index ffff6a61c..c330fcc05 100644
--- a/Cython/Compiler/CmdLine.py
+++ b/Cython/Compiler/CmdLine.py
@@ -4,11 +4,17 @@
from __future__ import absolute_import
+import sys
import os
from argparse import ArgumentParser, Action, SUPPRESS
from . import Options
+if sys.version_info < (3, 3):
+ # TODO: This workaround can be removed in Cython 3.1
+ FileNotFoundError = IOError
+
+
class ParseDirectivesAction(Action):
def __call__(self, parser, namespace, values, option_string=None):
old_directives = dict(getattr(namespace, self.dest,
@@ -145,6 +151,12 @@ def create_cython_argparser():
dest='compile_time_env', type=str,
action=ParseCompileTimeEnvAction,
help='Provides compile time env like DEF would do.')
+ parser.add_argument("--module-name",
+ dest='module_name', type=str, action='store',
+ help='Fully qualified module name. If not given, is '
+ 'deduced from the import path if source file is in '
+ 'a package, or equals the filename otherwise.')
+ parser.add_argument('-M', '--depfile', action='store_true', help='produce depfiles for the sources')
parser.add_argument('sources', nargs='*', default=[])
# TODO: add help
@@ -203,6 +215,10 @@ def parse_command_line_raw(parser, args):
def parse_command_line(args):
parser = create_cython_argparser()
arguments, sources = parse_command_line_raw(parser, args)
+ for source in sources:
+ if not os.path.exists(source):
+ import errno
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), source)
options = Options.CompilationOptions(Options.default_options)
for name, value in vars(arguments).items():
@@ -222,5 +238,10 @@ def parse_command_line(args):
if len(sources) == 0 and not options.show_version:
parser.error("cython: Need at least one source file\n")
if Options.embed and len(sources) > 1:
- parser.error("cython: Only one source file allowed when using -embed\n")
+ parser.error("cython: Only one source file allowed when using --embed\n")
+ if options.module_name:
+ if options.timestamps:
+ parser.error("cython: Cannot use --module-name with --timestamps\n")
+ if len(sources) > 1:
+ parser.error("cython: Only one source file allowed when using --module-name\n")
return options, sources
diff --git a/Cython/Compiler/Code.pxd b/Cython/Compiler/Code.pxd
index e17e0fb1d..4601474b2 100644
--- a/Cython/Compiler/Code.pxd
+++ b/Cython/Compiler/Code.pxd
@@ -54,6 +54,7 @@ cdef class FunctionState:
cdef public object closure_temps
cdef public bint should_declare_error_indicator
cdef public bint uses_error_indicator
+ cdef public bint error_without_exception
@cython.locals(n=size_t)
cpdef new_label(self, name=*)
@@ -109,6 +110,9 @@ cdef class CCodeWriter(object):
cdef bint bol
cpdef write(self, s)
+ @cython.final
+ cdef _write_lines(self, s)
+ cpdef _write_to_buffer(self, s)
cpdef put(self, code)
cpdef put_safe(self, code)
cpdef putln(self, code=*, bint safe=*)
@@ -116,6 +120,8 @@ cdef class CCodeWriter(object):
cdef increase_indent(self)
@cython.final
cdef decrease_indent(self)
+ @cython.final
+ cdef indent(self)
cdef class PyrexCodeWriter:
diff --git a/Cython/Compiler/Code.py b/Cython/Compiler/Code.py
index 4695b240c..1f561da02 100644
--- a/Cython/Compiler/Code.py
+++ b/Cython/Compiler/Code.py
@@ -21,7 +21,7 @@ import shutil
import textwrap
from string import Template
from functools import partial
-from contextlib import closing
+from contextlib import closing, contextmanager
from collections import defaultdict
from . import Naming
@@ -691,6 +691,7 @@ class LazyUtilityCode(UtilityCodeBase):
class FunctionState(object):
# return_label string function return point label
# error_label string error catch point label
+ # error_without_exception boolean Can go to the error label without an exception (e.g. __next__ can return NULL)
# continue_label string loop continue point label
# break_label string loop break point label
# return_from_error_cleanup_label string
@@ -739,6 +740,8 @@ class FunctionState(object):
self.should_declare_error_indicator = False
self.uses_error_indicator = False
+ self.error_without_exception = False
+
# safety checks
def validate_exit(self):
@@ -828,14 +831,14 @@ class FunctionState(object):
allocated and released one of the same type). Type is simply registered
and handed back, but will usually be a PyrexType.
- If type.is_pyobject, manage_ref comes into play. If manage_ref is set to
+ If type.needs_refcounting, manage_ref comes into play. If manage_ref is set to
True, the temp will be decref-ed on return statements and in exception
handling clauses. Otherwise the caller has to deal with any reference
counting of the variable.
- If not type.is_pyobject, then manage_ref will be ignored, but it
+ If not type.needs_refcounting, then manage_ref will be ignored, but it
still has to be passed. It is recommended to pass False by convention
- if it is known that type will never be a Python object.
+ if it is known that type will never be a reference counted type.
static=True marks the temporary declaration with "static".
This is only used when allocating backing store for a module-level
@@ -854,7 +857,7 @@ class FunctionState(object):
type = PyrexTypes.c_ptr_type(type) # A function itself isn't an l-value
elif type.is_cpp_class and not type.is_fake_reference and self.scope.directives['cpp_locals']:
self.scope.use_utility_code(UtilityCode.load_cached("OptionalLocals", "CppSupport.cpp"))
- if not type.is_pyobject and not type.is_memoryviewslice:
+ if not type.needs_refcounting:
# Make manage_ref canonical, so that manage_ref will always mean
# a decref is needed.
manage_ref = False
@@ -907,17 +910,17 @@ class FunctionState(object):
for name, type, manage_ref, static in self.temps_allocated:
freelist = self.temps_free.get((type, manage_ref))
if freelist is None or name not in freelist[1]:
- used.append((name, type, manage_ref and type.is_pyobject))
+ used.append((name, type, manage_ref and type.needs_refcounting))
return used
def temps_holding_reference(self):
"""Return a list of (cname,type) tuples of temp names and their type
- that are currently in use. This includes only temps of a
- Python object type which owns its reference.
+ that are currently in use. This includes only temps
+ with a reference counted type which owns its reference.
"""
return [(name, type)
for name, type, manage_ref in self.temps_in_use()
- if manage_ref and type.is_pyobject]
+ if manage_ref and type.needs_refcounting]
def all_managed_temps(self):
"""Return a list of (cname, type) tuples of refcount-managed Python objects.
@@ -1647,7 +1650,7 @@ class GlobalState(object):
init_constants.putln("#if !CYTHON_USE_MODULE_STATE")
init_constants.putln(
- "if (__Pyx_InitStrings(%s) < 0) %s;" % (
+ "if (__Pyx_InitStrings(%s) < 0) %s" % (
Naming.stringtab_cname,
init_constants.error_goto(self.module_pos)))
init_constants.putln("#endif")
@@ -1857,13 +1860,21 @@ class CCodeWriter(object):
return self.buffer.getvalue()
def write(self, s):
+ if '\n' in s:
+ self._write_lines(s)
+ else:
+ self._write_to_buffer(s)
+
+ def _write_lines(self, s):
# Cygdb needs to know which Cython source line corresponds to which C line.
# Therefore, we write this information into "self.buffer.markers" and then write it from there
# into cython_debug/cython_debug_info_* (see ModuleNode._serialize_lineno_map).
-
filename_line = self.last_marked_pos[:2] if self.last_marked_pos else (None, 0)
self.buffer.markers.extend([filename_line] * s.count('\n'))
+ self._write_to_buffer(s)
+
+ def _write_to_buffer(self, s):
self.buffer.write(s)
def insertion_point(self):
@@ -1967,13 +1978,13 @@ class CCodeWriter(object):
self.emit_marker()
if self.code_config.emit_linenums and self.last_marked_pos:
source_desc, line, _ = self.last_marked_pos
- self.write('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description()))
+ self._write_lines('\n#line %s "%s"\n' % (line, source_desc.get_escaped_description()))
if code:
if safe:
self.put_safe(code)
else:
self.put(code)
- self.write("\n")
+ self._write_lines("\n")
self.bol = 1
def mark_pos(self, pos, trace=True):
@@ -1987,13 +1998,13 @@ class CCodeWriter(object):
pos, trace = self.last_pos
self.last_marked_pos = pos
self.last_pos = None
- self.write("\n")
+ self._write_lines("\n")
if self.code_config.emit_code_comments:
self.indent()
- self.write("/* %s */\n" % self._build_marker(pos))
+ self._write_lines("/* %s */\n" % self._build_marker(pos))
if trace and self.funcstate and self.funcstate.can_trace and self.globalstate.directives['linetrace']:
self.indent()
- self.write('__Pyx_TraceLine(%d,%d,%s)\n' % (
+ self._write_lines('__Pyx_TraceLine(%d,%d,%s)\n' % (
pos[1], not self.funcstate.gil_owned, self.error_goto(pos)))
def _build_marker(self, pos):
@@ -2070,7 +2081,7 @@ class CCodeWriter(object):
self.putln("}")
def indent(self):
- self.write(" " * self.level)
+ self._write_to_buffer(" " * self.level)
def get_py_version_hex(self, pyversion):
return "0x%02X%02X%02X%02X" % (tuple(pyversion) + (0,0,0,0))[:4]
@@ -2092,10 +2103,10 @@ class CCodeWriter(object):
if entry.visibility == "private" and not entry.used:
#print "...private and not used, skipping", entry.cname ###
return
- if storage_class:
- self.put("%s " % storage_class)
if not entry.cf_used:
self.put('CYTHON_UNUSED ')
+ if storage_class:
+ self.put("%s " % storage_class)
if entry.is_cpp_optional:
self.put(entry.type.cpp_optional_declaration_code(
entry.cname, dll_linkage=dll_linkage))
@@ -2332,8 +2343,16 @@ class CCodeWriter(object):
if method_noargs in method_flags:
# Special NOARGS methods really take no arguments besides 'self', but PyCFunction expects one.
func_cname = Naming.method_wrapper_prefix + func_cname
- self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {return %s(self);}" % (
- func_cname, entry.func_cname))
+ self.putln("static PyObject *%s(PyObject *self, CYTHON_UNUSED PyObject *arg) {" % func_cname)
+ func_call = "%s(self)" % entry.func_cname
+ if entry.name == "__next__":
+ self.putln("PyObject *res = %s;" % func_call)
+ # tp_iternext can return NULL without an exception
+ self.putln("if (!res && !PyErr_Occurred()) { PyErr_SetNone(PyExc_StopIteration); }")
+ self.putln("return res;")
+ else:
+ self.putln("return %s;" % func_call)
+ self.putln("}")
return func_cname
# GIL methods
@@ -2600,9 +2619,7 @@ class PyrexCodeWriter(object):
class PyxCodeWriter(object):
"""
- Can be used for writing out some Cython code. To use the indenter
- functionality, the Cython.Compiler.Importer module will have to be used
- to load the code to support python 2.4
+ Can be used for writing out some Cython code.
"""
def __init__(self, buffer=None, indent_level=0, context=None, encoding='ascii'):
@@ -2618,22 +2635,16 @@ class PyxCodeWriter(object):
def dedent(self, levels=1):
self.level -= levels
+ @contextmanager
def indenter(self, line):
"""
- Instead of
-
- with pyx_code.indenter("for i in range(10):"):
- pyx_code.putln("print i")
-
- write
-
- if pyx_code.indenter("for i in range(10);"):
- pyx_code.putln("print i")
- pyx_code.dedent()
+ with pyx_code.indenter("for i in range(10):"):
+ pyx_code.putln("print i")
"""
self.putln(line)
self.indent()
- return True
+ yield
+ self.dedent()
def getvalue(self):
result = self.buffer.getvalue()
diff --git a/Cython/Compiler/CythonScope.py b/Cython/Compiler/CythonScope.py
index 08f3da9eb..f73be0070 100644
--- a/Cython/Compiler/CythonScope.py
+++ b/Cython/Compiler/CythonScope.py
@@ -51,7 +51,7 @@ class CythonScope(ModuleScope):
def find_module(self, module_name, pos):
error("cython.%s is not available" % module_name, pos)
- def find_submodule(self, module_name):
+ def find_submodule(self, module_name, as_package=False):
entry = self.entries.get(module_name, None)
if not entry:
self.load_cythonscope()
diff --git a/Cython/Compiler/Dataclass.py b/Cython/Compiler/Dataclass.py
index 48c1888d6..7cbbab954 100644
--- a/Cython/Compiler/Dataclass.py
+++ b/Cython/Compiler/Dataclass.py
@@ -81,6 +81,59 @@ class RemoveAssignmentsToNames(VisitorTransform, SkipDeclarations):
return node
+class TemplateCode(object):
+ _placeholder_count = 0
+
+ def __init__(self):
+ self.code_lines = []
+ self.placeholders = {}
+ self.extra_stats = []
+
+ def insertion_point(self):
+ return len(self.code_lines)
+
+ def insert_code_line(self, insertion_point, code_line):
+ self.code_lines.insert(insertion_point, code_line)
+
+ def reset(self, insertion_point=0):
+ del self.code_lines[insertion_point:]
+
+ def add_code_line(self, code_line):
+ self.code_lines.append(code_line)
+
+ def add_code_lines(self, code_lines):
+ self.code_lines.extend(code_lines)
+
+ def new_placeholder(self, field_names, value):
+ name = self._new_placeholder_name(field_names)
+ self.placeholders[name] = value
+ return name
+
+ def add_extra_statements(self, statements):
+ self.extra_stats.extend(statements)
+
+ def _new_placeholder_name(self, field_names):
+ while True:
+ name = "INIT_PLACEHOLDER_%d" % self._placeholder_count
+ if (name not in self.placeholders
+ and name not in field_names):
+ # make sure name isn't already used and doesn't
+ # conflict with a variable name (which is unlikely but possible)
+ break
+ self._placeholder_count += 1
+ return name
+
+ def generate_tree(self, level='c_class'):
+ stat_list_node = TreeFragment(
+ "\n".join(self.code_lines),
+ level=level,
+ pipeline=[NormalizeTree(None)],
+ ).substitute(self.placeholders)
+
+ stat_list_node.stats += self.extra_stats
+ return stat_list_node
+
+
class _MISSING_TYPE(object):
pass
MISSING = _MISSING_TYPE()
@@ -147,33 +200,39 @@ def process_class_get_fields(node):
transform(node)
default_value_assignments = transform.removed_assignments
- if node.base_type and node.base_type.dataclass_fields:
- fields = node.base_type.dataclass_fields.copy()
- else:
- fields = OrderedDict()
+ base_type = node.base_type
+ fields = OrderedDict()
+ while base_type:
+ if base_type.is_external or not base_type.scope.implemented:
+ warning(node.pos, "Cannot reliably handle Cython dataclasses with base types "
+ "in external modules since it is not possible to tell what fields they have", 2)
+ if base_type.dataclass_fields:
+ fields = base_type.dataclass_fields.copy()
+ break
+ base_type = base_type.base_type
for entry in var_entries:
name = entry.name
- is_initvar = (entry.type.python_type_constructor_name == "dataclasses.InitVar")
+ is_initvar = entry.declared_with_pytyping_modifier("dataclasses.InitVar")
# TODO - classvars aren't included in "var_entries" so are missed here
# and thus this code is never triggered
- is_classvar = (entry.type.python_type_constructor_name == "typing.ClassVar")
- if is_initvar or is_classvar:
- entry.type = entry.type.resolve() # no longer need the special type
+ is_classvar = entry.declared_with_pytyping_modifier("typing.ClassVar")
if name in default_value_assignments:
assignment = default_value_assignments[name]
if (isinstance(assignment, ExprNodes.CallNode)
and assignment.function.as_cython_attribute() == "dataclasses.field"):
# I believe most of this is well-enforced when it's treated as a directive
# but it doesn't hurt to make sure
- if (not isinstance(assignment, ExprNodes.GeneralCallNode)
- or not isinstance(assignment.positional_args, ExprNodes.TupleNode)
- or assignment.positional_args.args
- or not isinstance(assignment.keyword_args, ExprNodes.DictNode)):
+ valid_general_call = (isinstance(assignment, ExprNodes.GeneralCallNode)
+ and isinstance(assignment.positional_args, ExprNodes.TupleNode)
+ and not assignment.positional_args.args
+ and (assignment.keyword_args is None or isinstance(assignment.keyword_args, ExprNodes.DictNode)))
+ valid_simple_call = (isinstance(assignment, ExprNodes.SimpleCallNode) and not assignment.args)
+ if not (valid_general_call or valid_simple_call):
error(assignment.pos, "Call to 'cython.dataclasses.field' must only consist "
"of compile-time keyword arguments")
continue
- keyword_args = assignment.keyword_args.as_python_dict()
+ keyword_args = assignment.keyword_args.as_python_dict() if valid_general_call and assignment.keyword_args else {}
if 'default' in keyword_args and 'default_factory' in keyword_args:
error(assignment.pos, "cannot specify both default and default_factory")
continue
@@ -218,7 +277,7 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
if not isinstance(v, ExprNodes.BoolNode):
error(node.pos,
"Arguments passed to cython.dataclasses.dataclass must be True or False")
- kwargs[k] = v
+ kwargs[k] = v.value
# remove everything that does not belong into _DataclassParams()
kw_only = kwargs.pop("kw_only")
@@ -251,23 +310,14 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
stats = Nodes.StatListNode(node.pos,
stats=[dataclass_params_assignment] + dataclass_fields_stats)
- code_lines = []
- placeholders = {}
- extra_stats = []
- for cl, ph, es in [ generate_init_code(kwargs['init'], node, fields, kw_only),
- generate_repr_code(kwargs['repr'], node, fields),
- generate_eq_code(kwargs['eq'], node, fields),
- generate_order_code(kwargs['order'], node, fields),
- generate_hash_code(kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields) ]:
- code_lines.append(cl)
- placeholders.update(ph)
- extra_stats.extend(extra_stats)
-
- code_lines = "\n".join(code_lines)
- code_tree = TreeFragment(code_lines, level='c_class', pipeline=[NormalizeTree(node.scope)]
- ).substitute(placeholders)
-
- stats.stats += (code_tree.stats + extra_stats)
+ code = TemplateCode()
+ generate_init_code(code, kwargs['init'], node, fields, kw_only)
+ generate_repr_code(code, kwargs['repr'], node, fields)
+ generate_eq_code(code, kwargs['eq'], node, fields)
+ generate_order_code(code, kwargs['order'], node, fields)
+ generate_hash_code(code, kwargs['unsafe_hash'], kwargs['eq'], kwargs['frozen'], node, fields)
+
+ stats.stats += code.generate_tree().stats
# turn off annotation typing, so all arguments to __init__ are accepted as
# generic objects and thus can accept _HAS_DEFAULT_FACTORY.
@@ -285,14 +335,8 @@ def handle_cclass_dataclass(node, dataclass_args, analyse_decs_transform):
node.body.stats.append(comp_directives)
-def generate_init_code(init, node, fields, kw_only):
+def generate_init_code(code, init, node, fields, kw_only):
"""
- All of these "generate_*_code" functions return a tuple of:
- - code string
- - placeholder dict (often empty)
- - stat list (often empty)
- which can then be combined later and processed once.
-
Notes on CPython generated "__init__":
* Implemented in `_init_fn`.
* The use of the `dataclasses._HAS_DEFAULT_FACTORY` sentinel value as
@@ -304,9 +348,15 @@ def generate_init_code(init, node, fields, kw_only):
* seen_default and the associated error message are copied directly from Python
* Call to user-defined __post_init__ function (if it exists) is copied from
CPython.
+
+ Cython behaviour deviates a little here (to be decided if this is right...)
+ Because the class variable from the assignment does not exist Cython fields will
+ return None (or whatever their type default is) if not initialized while Python
+ dataclasses will fall back to looking up the class variable.
"""
if not init or node.scope.lookup_here("__init__"):
- return "", {}, []
+ return
+
# selfname behaviour copied from the cpython module
selfname = "__dataclass_self__" if "self" in fields else "self"
args = [selfname]
@@ -314,8 +364,7 @@ def generate_init_code(init, node, fields, kw_only):
if kw_only:
args.append("*")
- placeholders = {}
- placeholder_count = [0]
+ function_start_point = code.insertion_point()
# create a temp to get _HAS_DEFAULT_FACTORY
dataclass_module = make_dataclasses_module_callnode(node.pos)
@@ -325,26 +374,10 @@ def generate_init_code(init, node, fields, kw_only):
attribute=EncodedString("_HAS_DEFAULT_FACTORY")
)
- def get_placeholder_name():
- while True:
- name = "INIT_PLACEHOLDER_%d" % placeholder_count[0]
- if (name not in placeholders
- and name not in fields):
- # make sure name isn't already used and doesn't
- # conflict with a variable name (which is unlikely but possible)
- break
- placeholder_count[0] += 1
- return name
-
- default_factory_placeholder = get_placeholder_name()
- placeholders[default_factory_placeholder] = has_default_factory
-
- function_body_code_lines = []
+ default_factory_placeholder = code.new_placeholder(fields, has_default_factory)
seen_default = False
for name, field in fields.items():
- if not field.init.value:
- continue
entry = node.scope.lookup(name)
if entry.annotation:
annotation = u": %s" % entry.annotation.string.value
@@ -356,50 +389,53 @@ def generate_init_code(init, node, fields, kw_only):
if field.default_factory is not MISSING:
ph_name = default_factory_placeholder
else:
- ph_name = get_placeholder_name()
- placeholders[ph_name] = field.default # should be a node
+ ph_name = code.new_placeholder(fields, field.default) # 'default' should be a node
assignment = u" = %s" % ph_name
- elif seen_default and not kw_only:
+ elif seen_default and not kw_only and field.init.value:
error(entry.pos, ("non-default argument '%s' follows default argument "
"in dataclass __init__") % name)
- return "", {}, []
+ code.reset(function_start_point)
+ return
- args.append(u"%s%s%s" % (name, annotation, assignment))
+ if field.init.value:
+ args.append(u"%s%s%s" % (name, annotation, assignment))
if field.is_initvar:
continue
elif field.default_factory is MISSING:
if field.init.value:
- function_body_code_lines.append(u" %s.%s = %s" % (selfname, name, name))
+ code.add_code_line(u" %s.%s = %s" % (selfname, name, name))
+ elif assignment:
+ # not an argument to the function, but is still initialized
+ code.add_code_line(u" %s.%s%s" % (selfname, name, assignment))
else:
- ph_name = get_placeholder_name()
- placeholders[ph_name] = field.default_factory
+ ph_name = code.new_placeholder(fields, field.default_factory)
if field.init.value:
# close to:
# def __init__(self, name=_PLACEHOLDER_VALUE):
# self.name = name_default_factory() if name is _PLACEHOLDER_VALUE else name
- function_body_code_lines.append(u" %s.%s = %s() if %s is %s else %s" % (
+ code.add_code_line(u" %s.%s = %s() if %s is %s else %s" % (
selfname, name, ph_name, name, default_factory_placeholder, name))
else:
# still need to use the default factory to initialize
- function_body_code_lines.append(u" %s.%s = %s()"
- % (selfname, name, ph_name))
-
- args = u", ".join(args)
- func_def = u"def __init__(%s):" % args
-
- code_lines = [func_def] + (function_body_code_lines or ["pass"])
+ code.add_code_line(u" %s.%s = %s()" % (
+ selfname, name, ph_name))
if node.scope.lookup("__post_init__"):
post_init_vars = ", ".join(name for name, field in fields.items()
if field.is_initvar)
- code_lines.append(" %s.__post_init__(%s)" % (selfname, post_init_vars))
- return u"\n".join(code_lines), placeholders, []
+ code.add_code_line(" %s.__post_init__(%s)" % (selfname, post_init_vars))
+ if function_start_point == code.insertion_point():
+ code.add_code_line(" pass")
-def generate_repr_code(repr, node, fields):
+ args = u", ".join(args)
+ code.insert_code_line(function_start_point, u"def __init__(%s):" % args)
+
+
+def generate_repr_code(code, repr, node, fields):
"""
- The CPython implementation is just:
+ The core of the CPython implementation is just:
['return self.__class__.__qualname__ + f"(' +
', '.join([f"{f.name}={{self.{f.name}!r}}"
for f in fields]) +
@@ -407,38 +443,65 @@ def generate_repr_code(repr, node, fields):
The only notable difference here is self.__class__.__qualname__ -> type(self).__name__
which is because Cython currently supports Python 2.
+
+ However, it also has some guards for recursive repr invokations. In the standard
+ library implementation they're done with a wrapper decorator that captures a set
+ (with the set keyed by id and thread). Here we create a set as a thread local
+ variable and key only by id.
"""
if not repr or node.scope.lookup("__repr__"):
- return "", {}, []
- code_lines = ["def __repr__(self):"]
+ return
+
+ # The recursive guard is likely a little costly, so skip it if possible.
+ # is_gc_simple defines where it can contain recursive objects
+ needs_recursive_guard = False
+ for name in fields.keys():
+ entry = node.scope.lookup(name)
+ type_ = entry.type
+ if type_.is_memoryviewslice:
+ type_ = type_.dtype
+ if not type_.is_pyobject:
+ continue # no GC
+ if not type_.is_gc_simple:
+ needs_recursive_guard = True
+ break
+
+ if needs_recursive_guard:
+ code.add_code_line("__pyx_recursive_repr_guard = __import__('threading').local()")
+ code.add_code_line("__pyx_recursive_repr_guard.running = set()")
+ code.add_code_line("def __repr__(self):")
+ if needs_recursive_guard:
+ code.add_code_line(" key = id(self)")
+ code.add_code_line(" guard_set = self.__pyx_recursive_repr_guard.running")
+ code.add_code_line(" if key in guard_set: return '...'")
+ code.add_code_line(" guard_set.add(key)")
+ code.add_code_line(" try:")
strs = [u"%s={self.%s!r}" % (name, name)
for name, field in fields.items()
if field.repr.value and not field.is_initvar]
format_string = u", ".join(strs)
- code_lines.append(u' name = getattr(type(self), "__qualname__", type(self).__name__)')
- code_lines.append(u" return f'{name}(%s)'" % format_string)
- code_lines = u"\n".join(code_lines)
- return code_lines, {}, []
+ code.add_code_line(u' name = getattr(type(self), "__qualname__", type(self).__name__)')
+ code.add_code_line(u" return f'{name}(%s)'" % format_string)
+ if needs_recursive_guard:
+ code.add_code_line(" finally:")
+ code.add_code_line(" guard_set.remove(key)")
-def generate_cmp_code(op, funcname, node, fields):
+def generate_cmp_code(code, op, funcname, node, fields):
if node.scope.lookup_here(funcname):
- return "", {}, []
+ return
names = [name for name, field in fields.items() if (field.compare.value and not field.is_initvar)]
- if not names:
- return "", {}, [] # no comparable types
-
- code_lines = [
+ code.add_code_lines([
"def %s(self, other):" % funcname,
+ " if not isinstance(other, %s):" % node.class_name,
+ " return NotImplemented",
+ #
" cdef %s other_cast" % node.class_name,
- " if isinstance(other, %s):" % node.class_name,
- " other_cast = <%s>other" % node.class_name,
- " else:",
- " return NotImplemented"
- ]
+ " other_cast = <%s>other" % node.class_name,
+ ])
# The Python implementation of dataclasses.py does a tuple comparison
# (roughly):
@@ -456,42 +519,32 @@ def generate_cmp_code(op, funcname, node, fields):
name, op, name))
if checks:
- code_lines.append(" return " + " and ".join(checks))
+ code.add_code_line(" return " + " and ".join(checks))
else:
if "=" in op:
- code_lines.append(" return True") # "() == ()" is True
+ code.add_code_line(" return True") # "() == ()" is True
else:
- code_lines.append(" return False")
+ code.add_code_line(" return False")
- code_lines = u"\n".join(code_lines)
- return code_lines, {}, []
-
-
-def generate_eq_code(eq, node, fields):
+def generate_eq_code(code, eq, node, fields):
if not eq:
- return code_lines, {}, []
- return generate_cmp_code("==", "__eq__", node, fields)
+ return
+ generate_cmp_code(code, "==", "__eq__", node, fields)
-def generate_order_code(order, node, fields):
+def generate_order_code(code, order, node, fields):
if not order:
- return "", {}, []
- code_lines = []
- placeholders = {}
- stats = []
+ return
+
for op, name in [("<", "__lt__"),
("<=", "__le__"),
(">", "__gt__"),
(">=", "__ge__")]:
- res = generate_cmp_code(op, name, node, fields)
- code_lines.append(res[0])
- placeholders.update(res[1])
- stats.extend(res[2])
- return "\n".join(code_lines), placeholders, stats
+ generate_cmp_code(code, op, name, node, fields)
-def generate_hash_code(unsafe_hash, eq, frozen, node, fields):
+def generate_hash_code(code, unsafe_hash, eq, frozen, node, fields):
"""
Copied from CPython implementation - the intention is to follow this as far as
is possible:
@@ -536,35 +589,37 @@ def generate_hash_code(unsafe_hash, eq, frozen, node, fields):
if unsafe_hash:
# error message taken from CPython dataclasses module
error(node.pos, "Cannot overwrite attribute __hash__ in class %s" % node.class_name)
- return "", {}, []
+ return
+
if not unsafe_hash:
if not eq:
return
if not frozen:
- return "", {}, [Nodes.SingleAssignmentNode(
- node.pos,
- lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")),
- rhs=ExprNodes.NoneNode(node.pos),
- )]
+ code.add_extra_statements([
+ Nodes.SingleAssignmentNode(
+ node.pos,
+ lhs=ExprNodes.NameNode(node.pos, name=EncodedString("__hash__")),
+ rhs=ExprNodes.NoneNode(node.pos),
+ )
+ ])
+ return
names = [
name for name, field in fields.items()
- if (not field.is_initvar and
- (field.compare.value if field.hash.value is None else field.hash.value))
+ if not field.is_initvar and (
+ field.compare.value if field.hash.value is None else field.hash.value)
]
- if not names:
- return "", {}, [] # nothing to hash
# make a tuple of the hashes
- tpl = u", ".join(u"hash(self.%s)" % name for name in names )
+ hash_tuple_items = u", ".join(u"self.%s" % name for name in names)
+ if hash_tuple_items:
+ hash_tuple_items += u"," # ensure that one arg form is a tuple
# if we're here we want to generate a hash
- code_lines = dedent(u"""\
- def __hash__(self):
- return hash((%s))
- """) % tpl
-
- return code_lines, {}, []
+ code.add_code_lines([
+ "def __hash__(self):",
+ " return hash((%s))" % hash_tuple_items,
+ ])
def get_field_type(pos, entry):
@@ -666,8 +721,11 @@ def _set_up_dataclass_fields(node, fields, dataclass_module):
name)
# create an entry in the global scope for this variable to live
field_node = ExprNodes.NameNode(field_default.pos, name=EncodedString(module_field_name))
- field_node.entry = global_scope.declare_var(field_node.name, type=field_default.type or PyrexTypes.unspecified_type,
- pos=field_default.pos, cname=field_node.name, is_cdef=1)
+ field_node.entry = global_scope.declare_var(
+ field_node.name, type=field_default.type or PyrexTypes.unspecified_type,
+ pos=field_default.pos, cname=field_node.name, is_cdef=True,
+ # TODO: do we need to set 'pytyping_modifiers' here?
+ )
# replace the field so that future users just receive the namenode
setattr(field, attrname, field_node)
diff --git a/Cython/Compiler/ExprNodes.py b/Cython/Compiler/ExprNodes.py
index 312b37329..881851535 100644
--- a/Cython/Compiler/ExprNodes.py
+++ b/Cython/Compiler/ExprNodes.py
@@ -328,14 +328,13 @@ class ExprNode(Node):
# is_sequence_constructor
# boolean Is a list or tuple constructor expression
# is_starred boolean Is a starred expression (e.g. '*a')
- # saved_subexpr_nodes
- # [ExprNode or [ExprNode or None] or None]
- # Cached result of subexpr_nodes()
# use_managed_ref boolean use ref-counted temps/assignments/etc.
# result_is_used boolean indicates that the result will be dropped and the
- # is_numpy_attribute boolean Is a Numpy module attribute
# result_code/temp_result can safely be set to None
+ # is_numpy_attribute boolean Is a Numpy module attribute
# annotation ExprNode or None PEP526 annotation for names or expressions
+ # generator_arg_tag None or Node A tag to mark ExprNodes that potentially need to
+ # be changed to a generator argument
result_ctype = None
type = None
@@ -345,6 +344,7 @@ class ExprNode(Node):
use_managed_ref = True # can be set by optimisation transforms
result_is_used = True
is_numpy_attribute = False
+ generator_arg_tag = None
# The Analyse Expressions phase for expressions is split
# into two sub-phases:
@@ -473,7 +473,6 @@ class ExprNode(Node):
is_memview_broadcast = False
is_memview_copy_assignment = False
- saved_subexpr_nodes = None
is_temp = False
has_temp_moved = False # if True then attempting to do anything but free the temp is invalid
is_target = False
@@ -548,7 +547,7 @@ class ExprNode(Node):
if is_pythran_supported_node_or_none(self):
return to_pythran(self)
- assert(type_ is not None)
+ assert type_ is not None
return to_pythran(self, type_)
def is_c_result_required(self):
@@ -1103,6 +1102,8 @@ class ExprNode(Node):
type = self.type
if type.is_enum or type.is_error:
return self
+ elif type is PyrexTypes.c_bint_type:
+ return self
elif type.is_pyobject or type.is_int or type.is_ptr or type.is_float:
return CoerceToBooleanNode(self, env)
elif type.is_cpp_class and type.scope and type.scope.lookup("operator bool"):
@@ -1530,14 +1531,23 @@ class FloatNode(ConstNode):
def _analyse_name_as_type(name, pos, env):
- type = PyrexTypes.parse_basic_type(name)
- if type is not None:
- return type
+ ctype = PyrexTypes.parse_basic_type(name)
+ if ctype is not None and env.in_c_type_context:
+ return ctype
global_entry = env.global_scope().lookup(name)
- if global_entry and global_entry.is_type and global_entry.type:
- return global_entry.type
+ if global_entry and global_entry.is_type:
+ type = global_entry.type
+ if (not env.in_c_type_context and
+ name == 'int' and type is Builtin.int_type):
+ # While we still support Python2 this needs to be downgraded
+ # to a generic Python object to include both int and long
+ type = py_object_type
+ if type and (type.is_pyobject or env.in_c_type_context):
+ return type
+ ctype = ctype or type
+ # This is fairly heavy, so it's worth trying some easier things above.
from .TreeFragment import TreeFragment
with local_errors(ignore=True):
pos = (pos[0], pos[1], pos[2]-7)
@@ -1550,8 +1560,11 @@ def _analyse_name_as_type(name, pos, env):
if isinstance(sizeof_node, SizeofTypeNode):
sizeof_node = sizeof_node.analyse_types(env)
if isinstance(sizeof_node, SizeofTypeNode):
- return sizeof_node.arg_type
- return None
+ type = sizeof_node.arg_type
+ if type and (type.is_pyobject or env.in_c_type_context):
+ return type
+ ctype = ctype or type
+ return ctype
class BytesNode(ConstNode):
@@ -2025,6 +2038,8 @@ class NameNode(AtomicExprNode):
# annotations never create global cdef names
if env.is_module_scope:
return
+
+ modifiers = ()
if (
# name: "description" => not a type, but still a declared variable or attribute
annotation.expr.is_string_literal
@@ -2036,10 +2051,11 @@ class NameNode(AtomicExprNode):
# For Python class scopes every attribute is a Python object
atype = py_object_type
else:
- _, atype = annotation.analyse_type_annotation(env)
+ modifiers, atype = annotation.analyse_type_annotation(env)
+
if atype is None:
atype = unspecified_type if as_target and env.directives['infer_types'] != False else py_object_type
- if atype.is_fused and env.fused_to_specific:
+ elif atype.is_fused and env.fused_to_specific:
try:
atype = atype.specialize(env.fused_to_specific)
except CannotSpecialize:
@@ -2047,25 +2063,26 @@ class NameNode(AtomicExprNode):
"'%s' cannot be specialized since its type is not a fused argument to this function" %
self.name)
atype = error_type
+
visibility = 'private'
- if 'dataclasses.dataclass' in env.directives:
+ if env.is_c_dataclass_scope:
# handle "frozen" directive - full inspection of the dataclass directives happens
# in Dataclass.py
- frozen_directive = None
- dataclass_directive = env.directives['dataclasses.dataclass']
- if dataclass_directive:
- dataclass_directive_kwds = dataclass_directive[1]
- frozen_directive = dataclass_directive_kwds.get('frozen', None)
- is_frozen = frozen_directive and frozen_directive.is_literal and frozen_directive.value
+ is_frozen = env.is_c_dataclass_scope == "frozen"
if atype.is_pyobject or atype.can_coerce_to_pyobject(env):
visibility = 'readonly' if is_frozen else 'public'
# If the object can't be coerced that's fine - we just don't create a property
+
if as_target and env.is_c_class_scope and not (atype.is_pyobject or atype.is_error):
# TODO: this will need revising slightly if annotated cdef attributes are implemented
atype = py_object_type
warning(annotation.pos, "Annotation ignored since class-level attributes must be Python objects. "
"Were you trying to set up an instance attribute?", 2)
- entry = self.entry = env.declare_var(name, atype, self.pos, is_cdef=not as_target, visibility=visibility)
+
+ entry = self.entry = env.declare_var(
+ name, atype, self.pos, is_cdef=not as_target, visibility=visibility,
+ pytyping_modifiers=modifiers)
+
# Even if the entry already exists, make sure we're supplying an annotation if we can.
if annotation and not entry.annotation:
entry.annotation = annotation
@@ -2085,23 +2102,42 @@ class NameNode(AtomicExprNode):
return None
def analyse_as_type(self, env):
+ type = None
if self.cython_attribute:
type = PyrexTypes.parse_basic_type(self.cython_attribute)
- else:
+ elif env.in_c_type_context:
type = PyrexTypes.parse_basic_type(self.name)
if type:
return type
+
entry = self.entry
if not entry:
entry = env.lookup(self.name)
- if entry and entry.is_type:
- return entry.type
- elif entry and entry.known_standard_library_import:
+ if entry and not entry.is_type and entry.known_standard_library_import:
entry = Builtin.get_known_standard_library_entry(entry.known_standard_library_import)
- if entry and entry.is_type:
- return entry.type
- else:
- return None
+ if entry and entry.is_type:
+ # Infer equivalent C types instead of Python types when possible.
+ type = entry.type
+ if not env.in_c_type_context and type is Builtin.long_type:
+ # Try to give a helpful warning when users write plain C type names.
+ warning(self.pos, "Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?")
+ type = py_object_type
+ elif type.is_pyobject and type.equivalent_type:
+ type = type.equivalent_type
+ elif type is Builtin.int_type:
+ # while we still support Python 2 this must be an object
+ # so that it can be either int or long
+ type = py_object_type
+ return type
+ if self.name == 'object':
+ # This is normally parsed as "simple C type", but not if we don't parse C types.
+ return py_object_type
+
+ # Try to give a helpful warning when users write plain C type names.
+ if not env.in_c_type_context and PyrexTypes.parse_basic_type(self.name):
+ warning(self.pos, "Found C type '%s' in a Python annotation. Did you mean to use a Python type?" % self.name)
+
+ return None
def analyse_as_extension_type(self, env):
# Try to interpret this as a reference to an extension type.
@@ -2131,10 +2167,13 @@ class NameNode(AtomicExprNode):
self.entry.known_standard_library_import = "" # already exists somewhere and so is now ambiguous
if not self.entry and self.annotation is not None:
# name : type = ...
- is_dataclass = 'dataclasses.dataclass' in env.directives
+ is_dataclass = env.is_c_dataclass_scope
# In a dataclass, an assignment should not prevent a name from becoming an instance attribute.
# Hence, "as_target = not is_dataclass".
self.declare_from_annotation(env, as_target=not is_dataclass)
+ elif (self.entry and self.entry.is_inherited and
+ self.annotation and env.is_c_dataclass_scope):
+ error(self.pos, "Cannot redeclare inherited fields in Cython dataclasses")
if not self.entry:
if env.directives['warn.undeclared']:
warning(self.pos, "implicit declaration of '%s'" % self.name, 1)
@@ -2761,7 +2800,98 @@ class ImportNode(ExprNode):
return self.module_name.value
-class IteratorNode(ExprNode):
+class ScopedExprNode(ExprNode):
+ # Abstract base class for ExprNodes that have their own local
+ # scope, such as generator expressions.
+ #
+ # expr_scope Scope the inner scope of the expression
+
+ subexprs = []
+ expr_scope = None
+
+ # does this node really have a local scope, e.g. does it leak loop
+ # variables or not? non-leaking Py3 behaviour is default, except
+ # for list comprehensions where the behaviour differs in Py2 and
+ # Py3 (set in Parsing.py based on parser context)
+ has_local_scope = True
+
+ def init_scope(self, outer_scope, expr_scope=None):
+ if expr_scope is not None:
+ self.expr_scope = expr_scope
+ elif self.has_local_scope:
+ self.expr_scope = Symtab.ComprehensionScope(outer_scope)
+ elif not self.expr_scope: # don't unset if it's already been set
+ self.expr_scope = None
+
+ def analyse_declarations(self, env):
+ self.init_scope(env)
+
+ def analyse_scoped_declarations(self, env):
+ # this is called with the expr_scope as env
+ pass
+
+ def analyse_types(self, env):
+ # no recursion here, the children will be analysed separately below
+ return self
+
+ def analyse_scoped_expressions(self, env):
+ # this is called with the expr_scope as env
+ return self
+
+ def generate_evaluation_code(self, code):
+ # set up local variables and free their references on exit
+ generate_inner_evaluation_code = super(ScopedExprNode, self).generate_evaluation_code
+ if not self.has_local_scope or not self.expr_scope.var_entries:
+ # no local variables => delegate, done
+ generate_inner_evaluation_code(code)
+ return
+
+ code.putln('{ /* enter inner scope */')
+ py_entries = []
+ for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]):
+ if not entry.in_closure:
+ if entry.type.is_pyobject and entry.used:
+ py_entries.append(entry)
+ if not py_entries:
+ # no local Python references => no cleanup required
+ generate_inner_evaluation_code(code)
+ code.putln('} /* exit inner scope */')
+ return
+
+ # must free all local Python references at each exit point
+ old_loop_labels = code.new_loop_labels()
+ old_error_label = code.new_error_label()
+
+ generate_inner_evaluation_code(code)
+
+ # normal (non-error) exit
+ self._generate_vars_cleanup(code, py_entries)
+
+ # error/loop body exit points
+ exit_scope = code.new_label('exit_scope')
+ code.put_goto(exit_scope)
+ for label, old_label in ([(code.error_label, old_error_label)] +
+ list(zip(code.get_loop_labels(), old_loop_labels))):
+ if code.label_used(label):
+ code.put_label(label)
+ self._generate_vars_cleanup(code, py_entries)
+ code.put_goto(old_label)
+ code.put_label(exit_scope)
+ code.putln('} /* exit inner scope */')
+
+ code.set_loop_labels(old_loop_labels)
+ code.error_label = old_error_label
+
+ def _generate_vars_cleanup(self, code, py_entries):
+ for entry in py_entries:
+ if entry.is_cglobal:
+ code.put_var_gotref(entry)
+ code.put_var_decref_set(entry, "Py_None")
+ else:
+ code.put_var_xdecref_clear(entry)
+
+
+class IteratorNode(ScopedExprNode):
# Used as part of for statement implementation.
#
# Implements result = iter(sequence)
@@ -2773,10 +2903,13 @@ class IteratorNode(ExprNode):
counter_cname = None
reversed = False # currently only used for list/tuple types (see Optimize.py)
is_async = False
+ has_local_scope = False
subexprs = ['sequence']
def analyse_types(self, env):
+ if self.expr_scope:
+ env = self.expr_scope # actually evaluate sequence in this scope instead
self.sequence = self.sequence.analyse_types(env)
if (self.sequence.type.is_array or self.sequence.type.is_ptr) and \
not self.sequence.type.is_string:
@@ -2784,6 +2917,9 @@ class IteratorNode(ExprNode):
self.type = self.sequence.type
elif self.sequence.type.is_cpp_class:
return CppIteratorNode(self.pos, sequence=self.sequence).analyse_types(env)
+ elif self.is_reversed_cpp_iteration():
+ sequence = self.sequence.arg_tuple.args[0].arg
+ return CppIteratorNode(self.pos, sequence=sequence, reversed=True).analyse_types(env)
else:
self.sequence = self.sequence.coerce_to_pyobject(env)
if self.sequence.type in (list_type, tuple_type):
@@ -2798,8 +2934,27 @@ class IteratorNode(ExprNode):
PyrexTypes.CFuncTypeArg("it", PyrexTypes.py_object_type, None),
]))
+ def is_reversed_cpp_iteration(self):
+ """
+ Returns True if the 'reversed' function is applied to a C++ iterable.
+
+ This supports C++ classes with reverse_iterator implemented.
+ """
+ if not (isinstance(self.sequence, SimpleCallNode) and
+ self.sequence.arg_tuple and len(self.sequence.arg_tuple.args) == 1):
+ return False
+ func = self.sequence.function
+ if func.is_name and func.name == "reversed":
+ if not func.entry.is_builtin:
+ return False
+ arg = self.sequence.arg_tuple.args[0]
+ if isinstance(arg, CoercionNode) and arg.arg.is_name:
+ arg = arg.arg.entry
+ return arg.type.is_cpp_class
+ return False
+
def type_dependencies(self, env):
- return self.sequence.type_dependencies(env)
+ return self.sequence.type_dependencies(self.expr_scope or env)
def infer_type(self, env):
sequence_type = self.sequence.infer_type(env)
@@ -2961,25 +3116,30 @@ class CppIteratorNode(ExprNode):
cpp_attribute_op = "."
extra_dereference = ""
is_temp = True
+ reversed = False
subexprs = ['sequence']
+ def get_iterator_func_names(self):
+ return ("begin", "end") if not self.reversed else ("rbegin", "rend")
+
def analyse_types(self, env):
sequence_type = self.sequence.type
if sequence_type.is_ptr:
sequence_type = sequence_type.base_type
- begin = sequence_type.scope.lookup("begin")
- end = sequence_type.scope.lookup("end")
+ begin_name, end_name = self.get_iterator_func_names()
+ begin = sequence_type.scope.lookup(begin_name)
+ end = sequence_type.scope.lookup(end_name)
if (begin is None
or not begin.type.is_cfunction
or begin.type.args):
- error(self.pos, "missing begin() on %s" % self.sequence.type)
+ error(self.pos, "missing %s() on %s" % (begin_name, self.sequence.type))
self.type = error_type
return self
if (end is None
or not end.type.is_cfunction
or end.type.args):
- error(self.pos, "missing end() on %s" % self.sequence.type)
+ error(self.pos, "missing %s() on %s" % (end_name, self.sequence.type))
self.type = error_type
return self
iter_type = begin.type.return_type
@@ -2990,37 +3150,40 @@ class CppIteratorNode(ExprNode):
self.pos,
"!=",
[iter_type, end.type.return_type]) is None:
- error(self.pos, "missing operator!= on result of begin() on %s" % self.sequence.type)
+ error(self.pos, "missing operator!= on result of %s() on %s" % (begin_name, self.sequence.type))
self.type = error_type
return self
if env.lookup_operator_for_types(self.pos, '++', [iter_type]) is None:
- error(self.pos, "missing operator++ on result of begin() on %s" % self.sequence.type)
+ error(self.pos, "missing operator++ on result of %s() on %s" % (begin_name, self.sequence.type))
self.type = error_type
return self
if env.lookup_operator_for_types(self.pos, '*', [iter_type]) is None:
- error(self.pos, "missing operator* on result of begin() on %s" % self.sequence.type)
+ error(self.pos, "missing operator* on result of %s() on %s" % (begin_name, self.sequence.type))
self.type = error_type
return self
self.type = iter_type
elif iter_type.is_ptr:
if not (iter_type == end.type.return_type):
- error(self.pos, "incompatible types for begin() and end()")
+ error(self.pos, "incompatible types for %s() and %s()" % (begin_name, end_name))
self.type = iter_type
else:
- error(self.pos, "result type of begin() on %s must be a C++ class or pointer" % self.sequence.type)
+ error(self.pos, "result type of %s() on %s must be a C++ class or pointer" % (begin_name, self.sequence.type))
self.type = error_type
return self
def generate_result_code(self, code):
sequence_type = self.sequence.type
+ begin_name, _ = self.get_iterator_func_names()
# essentially 3 options:
- if self.sequence.is_name or self.sequence.is_attribute:
- # 1) is a name and can be accessed directly;
+ if self.sequence.is_simple():
+ # 1) Sequence can be accessed directly, like a name;
# assigning to it may break the container, but that's the responsibility
# of the user
- code.putln("%s = %s%sbegin();" % (self.result(),
- self.sequence.result(),
- self.cpp_attribute_op))
+ code.putln("%s = %s%s%s();" % (
+ self.result(),
+ self.sequence.result(),
+ self.cpp_attribute_op,
+ begin_name))
else:
# (while it'd be nice to limit the scope of the loop temp, it's essentially
# impossible to do while supporting generators)
@@ -3038,23 +3201,50 @@ class CppIteratorNode(ExprNode):
code.putln("%s = %s%s;" % (self.cpp_sequence_cname,
"&" if temp_type.is_ptr else "",
self.sequence.move_result_rhs()))
- code.putln("%s = %s%sbegin();" % (self.result(), self.cpp_sequence_cname,
- self.cpp_attribute_op))
+ code.putln("%s = %s%s%s();" % (
+ self.result(),
+ self.cpp_sequence_cname,
+ self.cpp_attribute_op,
+ begin_name))
def generate_iter_next_result_code(self, result_name, code):
# end call isn't cached to support containers that allow adding while iterating
# (much as this is usually a bad idea)
- code.putln("if (!(%s%s != %s%send())) break;" % (
+ _, end_name = self.get_iterator_func_names()
+ code.putln("if (!(%s%s != %s%s%s())) break;" % (
self.extra_dereference,
self.result(),
self.cpp_sequence_cname or self.sequence.result(),
- self.cpp_attribute_op))
+ self.cpp_attribute_op,
+ end_name))
code.putln("%s = *%s%s;" % (
result_name,
self.extra_dereference,
self.result()))
code.putln("++%s%s;" % (self.extra_dereference, self.result()))
+ def generate_subexpr_disposal_code(self, code):
+ if not self.cpp_sequence_cname:
+ # the sequence is accessed directly so any temporary result in its
+ # subexpressions must remain available until the iterator is not needed
+ return
+ ExprNode.generate_subexpr_disposal_code(self, code)
+
+ def free_subexpr_temps(self, code):
+ if not self.cpp_sequence_cname:
+ # the sequence is accessed directly so any temporary result in its
+ # subexpressions must remain available until the iterator is not needed
+ return
+ ExprNode.free_subexpr_temps(self, code)
+
+ def generate_disposal_code(self, code):
+ if not self.cpp_sequence_cname:
+ # postponed from CppIteratorNode.generate_subexpr_disposal_code
+ # and CppIteratorNode.free_subexpr_temps
+ ExprNode.generate_subexpr_disposal_code(self, code)
+ ExprNode.free_subexpr_temps(self, code)
+ ExprNode.generate_disposal_code(self, code)
+
def free_temps(self, code):
if self.cpp_sequence_cname:
code.funcstate.release_temp(self.cpp_sequence_cname)
@@ -3062,6 +3252,32 @@ class CppIteratorNode(ExprNode):
ExprNode.free_temps(self, code)
+def remove_const(item_type):
+ """
+ Removes the constness of a given type and its underlying templates
+ if any.
+
+ This is to solve the compilation error when the temporary variable used to
+ store the result of an iterator cannot be changed due to its constness.
+ For example, the value_type of std::map, which will also be the type of
+ the temporarry variable, is std::pair<const Key, T>. This means the first
+ component of the variable cannot be reused to store the result of each
+ iteration, which leads to a compilation error.
+ """
+ if item_type.is_const:
+ item_type = item_type.cv_base_type
+ if item_type.is_typedef:
+ item_type = remove_const(item_type.typedef_base_type)
+ if item_type.is_cpp_class and item_type.templates:
+ templates = [remove_const(t) if t.is_const else t for t in item_type.templates]
+ template_type = item_type.template_type
+ item_type = PyrexTypes.CppClassType(
+ template_type.name, template_type.scope,
+ template_type.cname, template_type.base_classes,
+ templates, template_type)
+ return item_type
+
+
class NextNode(AtomicExprNode):
# Used as part of for statement implementation.
# Implements result = next(iterator)
@@ -3104,6 +3320,7 @@ class NextNode(AtomicExprNode):
def analyse_types(self, env):
self.type = self.infer_type(env, self.iterator.type)
+ self.type = remove_const(self.type)
self.is_temp = 1
return self
@@ -3111,7 +3328,7 @@ class NextNode(AtomicExprNode):
self.iterator.generate_iter_next_result_code(self.result(), code)
-class AsyncIteratorNode(ExprNode):
+class AsyncIteratorNode(ScopedExprNode):
# Used as part of 'async for' statement implementation.
#
# Implements result = sequence.__aiter__()
@@ -3123,11 +3340,14 @@ class AsyncIteratorNode(ExprNode):
is_async = True
type = py_object_type
is_temp = 1
+ has_local_scope = False
def infer_type(self, env):
return py_object_type
def analyse_types(self, env):
+ if self.expr_scope:
+ env = self.expr_scope
self.sequence = self.sequence.analyse_types(env)
if not self.sequence.type.is_pyobject:
error(self.pos, "async for loops not allowed on C/C++ types")
@@ -3702,6 +3922,18 @@ class IndexNode(_IndexingBaseNode):
error(self.pos, "Array size must be a compile time constant")
return None
+ def analyse_pytyping_modifiers(self, env):
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ # TODO: somehow bring this together with TemplatedTypeNode.analyse_pytyping_modifiers()
+ modifiers = []
+ modifier_node = self
+ while modifier_node.is_subscript:
+ modifier_type = modifier_node.base.analyse_as_type(env)
+ if modifier_type.python_type_constructor_name and modifier_type.modifier_name:
+ modifiers.append(modifier_type.modifier_name)
+ modifier_node = modifier_node.index
+ return modifiers
+
def type_dependencies(self, env):
return self.base.type_dependencies(env) + self.index.type_dependencies(env)
@@ -3932,12 +4164,16 @@ class IndexNode(_IndexingBaseNode):
if base_type in (list_type, tuple_type) and self.index.type.is_int:
item_type = infer_sequence_item_type(
env, self.base, self.index, seq_type=base_type)
- if item_type is None:
- item_type = py_object_type
- self.type = item_type
if base_type in (list_type, tuple_type, dict_type):
# do the None check explicitly (not in a helper) to allow optimising it away
self.base = self.base.as_none_safe_node("'NoneType' object is not subscriptable")
+ if item_type is None or not item_type.is_pyobject:
+ # Even if we inferred a C type as result, we will read a Python object, so trigger coercion if needed.
+ # We could potentially use "item_type.equivalent_type" here, but that may trigger assumptions
+ # about the actual runtime item types, rather than just their ability to coerce to the C "item_type".
+ self.type = py_object_type
+ else:
+ self.type = item_type
self.wrap_in_nonecheck_node(env, getting)
return self
@@ -4233,6 +4469,7 @@ class IndexNode(_IndexingBaseNode):
return
utility_code = None
+ error_value = None
if self.type.is_pyobject:
error_value = 'NULL'
if self.index.type.is_int:
@@ -4268,8 +4505,8 @@ class IndexNode(_IndexingBaseNode):
error_value = '-1'
utility_code = UtilityCode.load_cached("GetItemIntByteArray", "StringTools.c")
elif not (self.base.type.is_cpp_class and self.exception_check):
- assert False, "unexpected type %s and base type %s for indexing" % (
- self.type, self.base.type)
+ assert False, "unexpected type %s and base type %s for indexing (%s)" % (
+ self.type, self.base.type, self.pos)
if utility_code is not None:
code.globalstate.use_utility_code(utility_code)
@@ -4582,17 +4819,17 @@ class BufferIndexNode(_IndexingBaseNode):
buffer_entry, ptrexpr = self.buffer_lookup_code(code)
if self.buffer_type.dtype.is_pyobject:
- # Must manage refcounts. Decref what is already there
- # and incref what we put in.
+ # Must manage refcounts. XDecref what is already there
+ # and incref what we put in (NumPy allows there to be NULL)
ptr = code.funcstate.allocate_temp(buffer_entry.buf_ptr_type,
manage_ref=False)
rhs_code = rhs.result()
code.putln("%s = %s;" % (ptr, ptrexpr))
- code.put_gotref("*%s" % ptr, self.buffer_type.dtype)
- code.putln("__Pyx_INCREF(%s); __Pyx_DECREF(*%s);" % (
+ code.put_xgotref("*%s" % ptr, self.buffer_type.dtype)
+ code.putln("__Pyx_INCREF(%s); __Pyx_XDECREF(*%s);" % (
rhs_code, ptr))
code.putln("*%s %s= %s;" % (ptr, op, rhs_code))
- code.put_giveref("*%s" % ptr, self.buffer_type.dtype)
+ code.put_xgiveref("*%s" % ptr, self.buffer_type.dtype)
code.funcstate.release_temp(ptr)
else:
# Simple case
@@ -4613,8 +4850,11 @@ class BufferIndexNode(_IndexingBaseNode):
# is_temp is True, so must pull out value and incref it.
# NOTE: object temporary results for nodes are declared
# as PyObject *, so we need a cast
- code.putln("%s = (PyObject *) *%s;" % (self.result(), self.buffer_ptr_code))
- code.putln("__Pyx_INCREF((PyObject*)%s);" % self.result())
+ res = self.result()
+ code.putln("%s = (PyObject *) *%s;" % (res, self.buffer_ptr_code))
+ # NumPy does (occasionally) allow NULL to denote None.
+ code.putln("if (unlikely(%s == NULL)) %s = Py_None;" % (res, res))
+ code.putln("__Pyx_INCREF((PyObject*)%s);" % res)
def free_subexpr_temps(self, code):
for temp in self.index_temps:
@@ -6991,6 +7231,35 @@ class AttributeNode(ExprNode):
self.entry = entry.as_variable
self.analyse_as_python_attribute(env)
return self
+ elif entry and entry.is_cfunction and self.obj.type is not Builtin.type_type:
+ # "bound" cdef function.
+ # This implementation is likely a little inefficient and could be improved.
+ # Essentially it does:
+ # __import__("functools").partial(coerce_to_object(self), self.obj)
+ from .UtilNodes import EvalWithTempExprNode, ResultRefNode
+ # take self.obj out to a temp because it's used twice
+ obj_node = ResultRefNode(self.obj, type=self.obj.type)
+ obj_node.result_ctype = self.obj.result_ctype
+ self.obj = obj_node
+ unbound_node = ExprNode.coerce_to(self, dst_type, env)
+ utility_code=UtilityCode.load_cached(
+ "PyMethodNew2Arg", "ObjectHandling.c"
+ )
+ func_type = PyrexTypes.CFuncType(
+ PyrexTypes.py_object_type, [
+ PyrexTypes.CFuncTypeArg("func", PyrexTypes.py_object_type, None),
+ PyrexTypes.CFuncTypeArg("self", PyrexTypes.py_object_type, None)
+ ],
+ )
+ binding_call = PythonCapiCallNode(
+ self.pos,
+ function_name="__Pyx_PyMethod_New2Arg",
+ func_type=func_type,
+ args=[unbound_node, obj_node],
+ utility_code=utility_code,
+ )
+ complete_call = EvalWithTempExprNode(obj_node, binding_call)
+ return complete_call.analyse_types(env)
return ExprNode.coerce_to(self, dst_type, env)
def calculate_constant_result(self):
@@ -8104,7 +8373,7 @@ class SequenceNode(ExprNode):
code.put_decref(target_list, py_object_type)
code.putln('%s = %s; %s = NULL;' % (target_list, sublist_temp, sublist_temp))
code.putln('#else')
- code.putln('(void)%s;' % sublist_temp) # avoid warning about unused variable
+ code.putln('CYTHON_UNUSED_VAR(%s);' % sublist_temp)
code.funcstate.release_temp(sublist_temp)
code.putln('#endif')
@@ -8417,97 +8686,6 @@ class ListNode(SequenceNode):
raise InternalError("List type never specified")
-class ScopedExprNode(ExprNode):
- # Abstract base class for ExprNodes that have their own local
- # scope, such as generator expressions.
- #
- # expr_scope Scope the inner scope of the expression
-
- subexprs = []
- expr_scope = None
-
- # does this node really have a local scope, e.g. does it leak loop
- # variables or not? non-leaking Py3 behaviour is default, except
- # for list comprehensions where the behaviour differs in Py2 and
- # Py3 (set in Parsing.py based on parser context)
- has_local_scope = True
-
- def init_scope(self, outer_scope, expr_scope=None):
- if expr_scope is not None:
- self.expr_scope = expr_scope
- elif self.has_local_scope:
- self.expr_scope = Symtab.ComprehensionScope(outer_scope)
- else:
- self.expr_scope = None
-
- def analyse_declarations(self, env):
- self.init_scope(env)
-
- def analyse_scoped_declarations(self, env):
- # this is called with the expr_scope as env
- pass
-
- def analyse_types(self, env):
- # no recursion here, the children will be analysed separately below
- return self
-
- def analyse_scoped_expressions(self, env):
- # this is called with the expr_scope as env
- return self
-
- def generate_evaluation_code(self, code):
- # set up local variables and free their references on exit
- generate_inner_evaluation_code = super(ScopedExprNode, self).generate_evaluation_code
- if not self.has_local_scope or not self.expr_scope.var_entries:
- # no local variables => delegate, done
- generate_inner_evaluation_code(code)
- return
-
- code.putln('{ /* enter inner scope */')
- py_entries = []
- for _, entry in sorted(item for item in self.expr_scope.entries.items() if item[0]):
- if not entry.in_closure:
- if entry.type.is_pyobject and entry.used:
- py_entries.append(entry)
- if not py_entries:
- # no local Python references => no cleanup required
- generate_inner_evaluation_code(code)
- code.putln('} /* exit inner scope */')
- return
-
- # must free all local Python references at each exit point
- old_loop_labels = code.new_loop_labels()
- old_error_label = code.new_error_label()
-
- generate_inner_evaluation_code(code)
-
- # normal (non-error) exit
- self._generate_vars_cleanup(code, py_entries)
-
- # error/loop body exit points
- exit_scope = code.new_label('exit_scope')
- code.put_goto(exit_scope)
- for label, old_label in ([(code.error_label, old_error_label)] +
- list(zip(code.get_loop_labels(), old_loop_labels))):
- if code.label_used(label):
- code.put_label(label)
- self._generate_vars_cleanup(code, py_entries)
- code.put_goto(old_label)
- code.put_label(exit_scope)
- code.putln('} /* exit inner scope */')
-
- code.set_loop_labels(old_loop_labels)
- code.error_label = old_error_label
-
- def _generate_vars_cleanup(self, code, py_entries):
- for entry in py_entries:
- if entry.is_cglobal:
- code.put_var_gotref(entry)
- code.put_var_decref_set(entry, "Py_None")
- else:
- code.put_var_xdecref_clear(entry)
-
-
class ComprehensionNode(ScopedExprNode):
# A list/set/dict comprehension
@@ -8522,6 +8700,12 @@ class ComprehensionNode(ScopedExprNode):
def analyse_declarations(self, env):
self.append.target = self # this is used in the PyList_Append of the inner loop
self.init_scope(env)
+ # setup loop scope
+ if isinstance(self.loop, Nodes._ForInStatNode):
+ assert isinstance(self.loop.iterator, ScopedExprNode), self.loop.iterator
+ self.loop.iterator.init_scope(None, env)
+ else:
+ assert isinstance(self.loop, Nodes.ForFromStatNode), self.loop
def analyse_scoped_declarations(self, env):
self.loop.analyse_declarations(env)
@@ -8699,7 +8883,7 @@ class MergedSequenceNode(ExprNode):
if type in (list_type, tuple_type) and args and args[0].is_sequence_constructor:
# construct a list directly from the first argument that we can then extend
if args[0].type is not list_type:
- args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True)
+ args[0] = ListNode(args[0].pos, args=args[0].args, is_temp=True, mult_factor=args[0].mult_factor)
ExprNode.__init__(self, pos, args=args, type=type)
def calculate_constant_result(self):
@@ -9790,6 +9974,12 @@ class CodeObjectNode(ExprNode):
flags.append('CO_VARARGS')
if self.def_node.starstar_arg:
flags.append('CO_VARKEYWORDS')
+ if self.def_node.is_asyncgen:
+ flags.append('CO_ASYNC_GENERATOR')
+ elif self.def_node.is_coroutine:
+ flags.append('CO_COROUTINE')
+ elif self.def_node.is_generator:
+ flags.append('CO_GENERATOR')
code.putln("%s = (PyObject*)__Pyx_PyCode_New(%d, %d, %d, %d, 0, %s, %s, %s, %s, %s, %s, %s, %s, %s, %d, %s); %s" % (
self.result_code,
@@ -9940,10 +10130,18 @@ class GeneratorExpressionNode(LambdaNode):
#
# loop ForStatNode the for-loop, containing a YieldExprNode
# def_node DefNode the underlying generator 'def' node
+ # call_parameters [ExprNode] (Internal) parameters passed to the DefNode call
name = StringEncoding.EncodedString('genexpr')
binding = False
+ child_attrs = LambdaNode.child_attrs + ["call_parameters"]
+ subexprs = LambdaNode.subexprs + ["call_parameters"]
+
+ def __init__(self, pos, *args, **kwds):
+ super(GeneratorExpressionNode, self).__init__(pos, *args, **kwds)
+ self.call_parameters = []
+
def analyse_declarations(self, env):
if hasattr(self, "genexpr_name"):
# this if-statement makes it safe to run twice
@@ -9956,13 +10154,22 @@ class GeneratorExpressionNode(LambdaNode):
self.def_node.is_cyfunction = False
# Force genexpr signature
self.def_node.entry.signature = TypeSlots.pyfunction_noargs
+ # setup loop scope
+ if isinstance(self.loop, Nodes._ForInStatNode):
+ assert isinstance(self.loop.iterator, ScopedExprNode)
+ self.loop.iterator.init_scope(None, env)
+ else:
+ assert isinstance(self.loop, Nodes.ForFromStatNode)
def generate_result_code(self, code):
+ args_to_call = ([self.closure_result_code()] +
+ [ cp.result() for cp in self.call_parameters ])
+ args_to_call = ", ".join(args_to_call)
code.putln(
'%s = %s(%s); %s' % (
self.result(),
self.def_node.entry.pyfunc_cname,
- self.closure_result_code(),
+ args_to_call,
code.error_goto_if_null(self.result(), self.pos)))
self.generate_gotref(code)
@@ -10066,6 +10273,8 @@ class YieldExprNode(ExprNode):
if type.is_pyobject:
code.putln('%s = 0;' % save_cname)
code.put_xgotref(cname, type)
+ elif type.is_memoryviewslice:
+ code.putln('%s.memview = NULL; %s.data = NULL;' % (save_cname, save_cname))
self.generate_sent_value_handling_code(code, Naming.sent_value_cname)
if self.result_is_used:
self.allocate_temp_result(code)
@@ -10289,6 +10498,7 @@ class UnopNode(ExprNode):
subexprs = ['operand']
infix = True
+ is_inc_dec_op = False
def calculate_constant_result(self):
func = compile_time_unary_operators[self.operator]
@@ -10400,7 +10610,10 @@ class UnopNode(ExprNode):
self.type = PyrexTypes.error_type
def analyse_cpp_operation(self, env, overload_check=True):
- entry = env.lookup_operator(self.operator, [self.operand])
+ operand_types = [self.operand.type]
+ if self.is_inc_dec_op and not self.is_prefix:
+ operand_types.append(PyrexTypes.c_int_type)
+ entry = env.lookup_operator_for_types(self.pos, self.operator, operand_types)
if overload_check and not entry:
self.type_error()
return
@@ -10414,7 +10627,12 @@ class UnopNode(ExprNode):
else:
self.exception_check = ''
self.exception_value = ''
- cpp_type = self.operand.type.find_cpp_operation_type(self.operator)
+ if self.is_inc_dec_op and not self.is_prefix:
+ cpp_type = self.operand.type.find_cpp_operation_type(
+ self.operator, operand_type=PyrexTypes.c_int_type
+ )
+ else:
+ cpp_type = self.operand.type.find_cpp_operation_type(self.operator)
if overload_check and cpp_type is None:
error(self.pos, "'%s' operator not defined for %s" % (
self.operator, type))
@@ -10556,6 +10774,17 @@ class DereferenceNode(CUnopNode):
class DecrementIncrementNode(CUnopNode):
# unary ++/-- operator
+ is_inc_dec_op = True
+
+ def type_error(self):
+ if not self.operand.type.is_error:
+ if self.is_prefix:
+ error(self.pos, "No match for 'operator%s' (operand type is '%s')" %
+ (self.operator, self.operand.type))
+ else:
+ error(self.pos, "No 'operator%s(int)' declared for postfix '%s' (operand type is '%s')" %
+ (self.operator, self.operator, self.operand.type))
+ self.type = PyrexTypes.error_type
def analyse_c_operation(self, env):
if self.operand.type.is_numeric:
@@ -14020,10 +14249,8 @@ class AnnotationNode(ExprNode):
def analyse_type_annotation(self, env, assigned_value=None):
if self.untyped:
# Already applied as a fused type, not re-evaluating it here.
- return None, None
+ return [], None
annotation = self.expr
- base_type = None
- is_ambiguous = False
explicit_pytype = explicit_ctype = False
if annotation.is_dict_literal:
warning(annotation.pos,
@@ -14040,36 +14267,29 @@ class AnnotationNode(ExprNode):
annotation = value
if explicit_pytype and explicit_ctype:
warning(annotation.pos, "Duplicate type declarations found in signature annotation", level=1)
- arg_type = annotation.analyse_as_type(env)
- if annotation.is_name and not annotation.cython_attribute and annotation.name in ('int', 'long', 'float'):
- # Map builtin numeric Python types to C types in safe cases.
- if assigned_value is not None and arg_type is not None and not arg_type.is_pyobject:
- assigned_type = assigned_value.infer_type(env)
- if assigned_type and assigned_type.is_pyobject:
- # C type seems unsafe, e.g. due to 'None' default value => ignore annotation type
- is_ambiguous = True
- arg_type = None
- # ignore 'int' and require 'cython.int' to avoid unsafe integer declarations
- if arg_type in (PyrexTypes.c_long_type, PyrexTypes.c_int_type, PyrexTypes.c_float_type):
- arg_type = PyrexTypes.c_double_type if annotation.name == 'float' else py_object_type
- elif arg_type is not None and annotation.is_string_literal:
+
+ with env.new_c_type_context(in_c_type_context=explicit_ctype):
+ arg_type = annotation.analyse_as_type(env)
+
+ if arg_type is None:
+ warning(annotation.pos, "Unknown type declaration in annotation, ignoring")
+ return [], arg_type
+
+ if annotation.is_string_literal:
warning(annotation.pos,
"Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.",
level=1)
- elif arg_type is not None and arg_type.is_complex:
+ if explicit_pytype and not explicit_ctype and not (arg_type.is_pyobject or arg_type.equivalent_type):
+ warning(annotation.pos,
+ "Python type declaration in signature annotation does not refer to a Python type")
+ if arg_type.is_complex:
# creating utility code needs to be special-cased for complex types
arg_type.create_declaration_utility_code(env)
- if arg_type is not None:
- if explicit_pytype and not explicit_ctype and not arg_type.is_pyobject:
- warning(annotation.pos,
- "Python type declaration in signature annotation does not refer to a Python type")
- base_type = Nodes.CAnalysedBaseTypeNode(
- annotation.pos, type=arg_type, is_arg=True)
- elif is_ambiguous:
- warning(annotation.pos, "Ambiguous types in annotation, ignoring")
- else:
- warning(annotation.pos, "Unknown type declaration in annotation, ignoring")
- return base_type, arg_type
+
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ modifiers = annotation.analyse_pytyping_modifiers(env) if annotation.is_subscript else []
+
+ return modifiers, arg_type
class AssignmentExpressionNode(ExprNode):
diff --git a/Cython/Compiler/FlowControl.pxd b/Cython/Compiler/FlowControl.pxd
index c876ee3b1..a15f86cf6 100644
--- a/Cython/Compiler/FlowControl.pxd
+++ b/Cython/Compiler/FlowControl.pxd
@@ -36,6 +36,7 @@ cdef class NameAssignment:
cdef public set refs
cdef public object bit
cdef public object inferred_type
+ cdef public object rhs_scope
cdef class AssignmentList:
cdef public object bit
@@ -58,12 +59,14 @@ cdef class ControlFlow:
cdef public dict assmts
+ cdef public Py_ssize_t in_try_block
+
cpdef newblock(self, ControlBlock parent=*)
cpdef nextblock(self, ControlBlock parent=*)
cpdef bint is_tracked(self, entry)
cpdef bint is_statically_assigned(self, entry)
cpdef mark_position(self, node)
- cpdef mark_assignment(self, lhs, rhs, entry)
+ cpdef mark_assignment(self, lhs, rhs, entry, rhs_scope=*)
cpdef mark_argument(self, lhs, rhs, entry)
cpdef mark_deletion(self, node, entry)
cpdef mark_reference(self, node, entry)
@@ -101,12 +104,11 @@ cdef class ControlFlowAnalysis(CythonTransform):
cdef object gv_ctx
cdef object constant_folder
cdef set reductions
- cdef list env_stack
- cdef list stack
+ cdef list stack # a stack of (env, flow) tuples
cdef object env
cdef ControlFlow flow
cdef object object_expr
cdef bint in_inplace_assignment
- cpdef mark_assignment(self, lhs, rhs=*)
+ cpdef mark_assignment(self, lhs, rhs=*, rhs_scope=*)
cpdef mark_position(self, node)
diff --git a/Cython/Compiler/FlowControl.py b/Cython/Compiler/FlowControl.py
index 4e0160e41..294bce9ee 100644
--- a/Cython/Compiler/FlowControl.py
+++ b/Cython/Compiler/FlowControl.py
@@ -110,6 +110,7 @@ class ControlFlow(object):
entries set tracked entries
loops list stack for loop descriptors
exceptions list stack for exception descriptors
+ in_try_block int track if we're in a try...except or try...finally block
"""
def __init__(self):
@@ -122,6 +123,7 @@ class ControlFlow(object):
self.exit_point = ExitBlock()
self.blocks.add(self.exit_point)
self.block = self.entry_point
+ self.in_try_block = 0
def newblock(self, parent=None):
"""Create floating block linked to `parent` if given.
@@ -170,9 +172,9 @@ class ControlFlow(object):
if self.block:
self.block.positions.add(node.pos[:2])
- def mark_assignment(self, lhs, rhs, entry):
+ def mark_assignment(self, lhs, rhs, entry, rhs_scope=None):
if self.block and self.is_tracked(entry):
- assignment = NameAssignment(lhs, rhs, entry)
+ assignment = NameAssignment(lhs, rhs, entry, rhs_scope=rhs_scope)
self.block.stats.append(assignment)
self.block.gen[entry] = assignment
self.entries.add(entry)
@@ -313,7 +315,7 @@ class ExceptionDescr(object):
class NameAssignment(object):
- def __init__(self, lhs, rhs, entry):
+ def __init__(self, lhs, rhs, entry, rhs_scope=None):
if lhs.cf_state is None:
lhs.cf_state = set()
self.lhs = lhs
@@ -324,16 +326,18 @@ class NameAssignment(object):
self.is_arg = False
self.is_deletion = False
self.inferred_type = None
+ # For generator expression targets, the rhs can have a different scope than the lhs.
+ self.rhs_scope = rhs_scope
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
def infer_type(self):
- self.inferred_type = self.rhs.infer_type(self.entry.scope)
+ self.inferred_type = self.rhs.infer_type(self.rhs_scope or self.entry.scope)
return self.inferred_type
def type_dependencies(self):
- return self.rhs.type_dependencies(self.entry.scope)
+ return self.rhs.type_dependencies(self.rhs_scope or self.entry.scope)
@property
def type(self):
@@ -675,6 +679,14 @@ class AssignmentCollector(TreeVisitor):
class ControlFlowAnalysis(CythonTransform):
+ def find_in_stack(self, env):
+ if env == self.env:
+ return self.flow
+ for e, flow in reversed(self.stack):
+ if e is env:
+ return flow
+ assert False
+
def visit_ModuleNode(self, node):
dot_output = self.current_directives['control_flow.dot_output']
self.gv_ctx = GVContext() if dot_output else None
@@ -686,10 +698,9 @@ class ControlFlowAnalysis(CythonTransform):
self.reductions = set()
self.in_inplace_assignment = False
- self.env_stack = []
self.env = node.scope
- self.stack = []
self.flow = ControlFlow()
+ self.stack = [] # a stack of (env, flow) tuples
self.object_expr = TypedExprNode(PyrexTypes.py_object_type, may_be_none=True)
self.visitchildren(node)
@@ -706,9 +717,8 @@ class ControlFlowAnalysis(CythonTransform):
if arg.default:
self.visitchildren(arg)
self.visitchildren(node, ('decorators',))
- self.env_stack.append(self.env)
+ self.stack.append((self.env, self.flow))
self.env = node.local_scope
- self.stack.append(self.flow)
self.flow = ControlFlow()
# Collect all entries
@@ -749,8 +759,7 @@ class ControlFlowAnalysis(CythonTransform):
if self.gv_ctx is not None:
self.gv_ctx.add(GV(node.local_scope.name, self.flow))
- self.flow = self.stack.pop()
- self.env = self.env_stack.pop()
+ self.env, self.flow = self.stack.pop()
return node
def visit_DefNode(self, node):
@@ -763,7 +772,7 @@ class ControlFlowAnalysis(CythonTransform):
def visit_CTypeDefNode(self, node):
return node
- def mark_assignment(self, lhs, rhs=None):
+ def mark_assignment(self, lhs, rhs=None, rhs_scope=None):
if not self.flow.block:
return
if self.flow.exceptions:
@@ -780,7 +789,7 @@ class ControlFlowAnalysis(CythonTransform):
entry = self.env.lookup(lhs.name)
if entry is None: # TODO: This shouldn't happen...
return
- self.flow.mark_assignment(lhs, rhs, entry)
+ self.flow.mark_assignment(lhs, rhs, entry, rhs_scope=rhs_scope)
elif lhs.is_sequence_constructor:
for i, arg in enumerate(lhs.args):
if arg.is_starred:
@@ -977,10 +986,11 @@ class ControlFlowAnalysis(CythonTransform):
is_special = False
sequence = node.iterator.sequence
target = node.target
+ env = node.iterator.expr_scope or self.env
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
- entry = self.env.lookup(function.name)
+ entry = env.lookup(function.name)
if not entry or entry.is_builtin:
if function.name == 'reversed' and len(sequence.args) == 1:
sequence = sequence.args[0]
@@ -988,30 +998,32 @@ class ControlFlowAnalysis(CythonTransform):
if target.is_sequence_constructor and len(target.args) == 2:
iterator = sequence.args[0]
if iterator.is_name:
- iterator_type = iterator.infer_type(self.env)
+ iterator_type = iterator.infer_type(env)
if iterator_type.is_builtin_type:
# assume that builtin types have a length within Py_ssize_t
self.mark_assignment(
target.args[0],
ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX',
- type=PyrexTypes.c_py_ssize_t_type))
+ type=PyrexTypes.c_py_ssize_t_type),
+ rhs_scope=node.iterator.expr_scope)
target = target.args[1]
sequence = sequence.args[0]
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
- entry = self.env.lookup(function.name)
+ entry = env.lookup(function.name)
if not entry or entry.is_builtin:
if function.name in ('range', 'xrange'):
is_special = True
for arg in sequence.args[:2]:
- self.mark_assignment(target, arg)
+ self.mark_assignment(target, arg, rhs_scope=node.iterator.expr_scope)
if len(sequence.args) > 2:
self.mark_assignment(target, self.constant_folder(
ExprNodes.binop_node(node.pos,
'+',
sequence.args[0],
- sequence.args[2])))
+ sequence.args[2])),
+ rhs_scope=node.iterator.expr_scope)
if not is_special:
# A for-loop basically translates to subsequent calls to
@@ -1020,7 +1032,7 @@ class ControlFlowAnalysis(CythonTransform):
# Python strings, etc., while correctly falling back to an
# object type when the base type cannot be handled.
- self.mark_assignment(target, node.item)
+ self.mark_assignment(target, node.item, rhs_scope=node.iterator.expr_scope)
def visit_AsyncForStatNode(self, node):
return self.visit_ForInStatNode(node)
@@ -1166,7 +1178,9 @@ class ControlFlowAnalysis(CythonTransform):
## XXX: children nodes
self.flow.block.add_child(entry_point)
self.flow.nextblock()
+ self.flow.in_try_block += 1
self._visit(node.body)
+ self.flow.in_try_block -= 1
self.flow.exceptions.pop()
# After exception
@@ -1226,7 +1240,9 @@ class ControlFlowAnalysis(CythonTransform):
self.flow.block = body_block
body_block.add_child(entry_point)
self.flow.nextblock()
+ self.flow.in_try_block += 1
self._visit(node.body)
+ self.flow.in_try_block -= 1
self.flow.exceptions.pop()
if self.flow.loops:
self.flow.loops[-1].exceptions.pop()
@@ -1245,6 +1261,8 @@ class ControlFlowAnalysis(CythonTransform):
if self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
self.flow.block = None
+ if self.flow.in_try_block:
+ node.in_try_block = True
return node
def visit_ReraiseStatNode(self, node):
@@ -1313,21 +1331,25 @@ class ControlFlowAnalysis(CythonTransform):
def visit_ComprehensionNode(self, node):
if node.expr_scope:
- self.env_stack.append(self.env)
+ self.stack.append((self.env, self.flow))
self.env = node.expr_scope
# Skip append node here
self._visit(node.loop)
if node.expr_scope:
- self.env = self.env_stack.pop()
+ self.env, _ = self.stack.pop()
return node
def visit_ScopedExprNode(self, node):
+ # currently this is written to deal with these two types
+ # (with comprehensions covered in their own function)
+ assert isinstance(node, (ExprNodes.IteratorNode, ExprNodes.AsyncIteratorNode)), node
if node.expr_scope:
- self.env_stack.append(self.env)
+ self.stack.append((self.env, self.flow))
+ self.flow = self.find_in_stack(node.expr_scope)
self.env = node.expr_scope
self.visitchildren(node)
if node.expr_scope:
- self.env = self.env_stack.pop()
+ self.env, self.flow = self.stack.pop()
return node
def visit_PyClassDefNode(self, node):
@@ -1335,14 +1357,21 @@ class ControlFlowAnalysis(CythonTransform):
'mkw', 'bases', 'class_result'))
self.flow.mark_assignment(node.target, node.classobj,
self.env.lookup(node.target.name))
- self.env_stack.append(self.env)
+ self.stack.append((self.env, self.flow))
self.env = node.scope
self.flow.nextblock()
if node.doc_node:
self.flow.mark_assignment(node.doc_node, fake_rhs_expr, node.doc_node.entry)
self.visitchildren(node, ('body',))
self.flow.nextblock()
- self.env = self.env_stack.pop()
+ self.env, _ = self.stack.pop()
+ return node
+
+ def visit_CClassDefNode(self, node):
+ # just make sure the nodes scope is findable in-case there is a list comprehension in it
+ self.stack.append((node.scope, self.flow))
+ self.visitchildren(node)
+ self.stack.pop()
return node
def visit_AmpersandNode(self, node):
diff --git a/Cython/Compiler/FusedNode.py b/Cython/Compiler/FusedNode.py
index 5639cdf28..4643cfb65 100644
--- a/Cython/Compiler/FusedNode.py
+++ b/Cython/Compiler/FusedNode.py
@@ -321,25 +321,21 @@ class FusedCFuncDefNode(StatListNode):
def _buffer_check_numpy_dtype_setup_cases(self, pyx_code):
"Setup some common cases to match dtypes against specializations"
- if pyx_code.indenter("if kind in b'iu':"):
+ with pyx_code.indenter("if kind in b'iu':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_int")
- pyx_code.dedent()
- if pyx_code.indenter("elif kind == b'f':"):
+ with pyx_code.indenter("elif kind == b'f':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_float")
- pyx_code.dedent()
- if pyx_code.indenter("elif kind == b'c':"):
+ with pyx_code.indenter("elif kind == b'c':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_complex")
- pyx_code.dedent()
- if pyx_code.indenter("elif kind == b'O':"):
+ with pyx_code.indenter("elif kind == b'O':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_object")
- pyx_code.dedent()
match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'"
no_match = "dest_sig[{{dest_sig_idx}}] = None"
@@ -376,11 +372,10 @@ class FusedCFuncDefNode(StatListNode):
if final_type.is_pythran_expr:
cond += ' and arg_is_pythran_compatible'
- if codewriter.indenter("if %s:" % cond):
+ with codewriter.indenter("if %s:" % cond):
#codewriter.putln("print 'buffer match found based on numpy dtype'")
codewriter.putln(self.match)
codewriter.putln("break")
- codewriter.dedent()
def _buffer_parse_format_string_check(self, pyx_code, decl_code,
specialized_type, env):
@@ -697,7 +692,7 @@ class FusedCFuncDefNode(StatListNode):
self._unpack_argument(pyx_code)
# 'unrolled' loop, first match breaks out of it
- if pyx_code.indenter("while 1:"):
+ with pyx_code.indenter("while 1:"):
if normal_types:
self._fused_instance_checks(normal_types, pyx_code, env)
if buffer_types or pythran_types:
@@ -709,7 +704,6 @@ class FusedCFuncDefNode(StatListNode):
else:
pyx_code.putln(self.no_match)
pyx_code.putln("break")
- pyx_code.dedent()
fused_index += 1
all_buffer_types.update(buffer_types)
diff --git a/Cython/Compiler/Lexicon.py b/Cython/Compiler/Lexicon.py
index 654febbe7..c3ca05b56 100644
--- a/Cython/Compiler/Lexicon.py
+++ b/Cython/Compiler/Lexicon.py
@@ -74,6 +74,7 @@ def make_lexicon():
bra = Any("([{")
ket = Any(")]}")
+ ellipsis = Str("...")
punct = Any(":,;+-*/|&<>=.%`~^?!@")
diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**", "//",
"+=", "-=", "*=", "/=", "%=", "|=", "^=", "&=",
@@ -89,7 +90,7 @@ def make_lexicon():
(intliteral, Method('strip_underscores', symbol='INT')),
(fltconst, Method('strip_underscores', symbol='FLOAT')),
(imagconst, Method('strip_underscores', symbol='IMAG')),
- (punct | diphthong, TEXT),
+ (ellipsis | punct | diphthong, TEXT),
(bra, Method('open_bracket_action')),
(ket, Method('close_bracket_action')),
diff --git a/Cython/Compiler/Main.py b/Cython/Compiler/Main.py
index 764d9af21..d5985457d 100644
--- a/Cython/Compiler/Main.py
+++ b/Cython/Compiler/Main.py
@@ -2,7 +2,7 @@
# Cython Top Level
#
-from __future__ import absolute_import
+from __future__ import absolute_import, print_function
import os
import re
@@ -143,6 +143,29 @@ class Context(object):
def nonfatal_error(self, exc):
return Errors.report_error(exc)
+ def _split_qualified_name(self, qualified_name):
+ # Splits qualified_name into parts in form of 2-tuples: (PART_NAME, IS_PACKAGE).
+ qualified_name_parts = qualified_name.split('.')
+ last_part = qualified_name_parts.pop()
+ qualified_name_parts = [(p, True) for p in qualified_name_parts]
+ if last_part != '__init__':
+ # If Last part is __init__, then it is omitted. Otherwise, we need to check whether we can find
+ # __init__.pyx/__init__.py file to determine if last part is package or not.
+ is_package = False
+ for suffix in ('.py', '.pyx'):
+ path = self.search_include_directories(
+ qualified_name, suffix=suffix, source_pos=None, source_file_path=None)
+ if path:
+ is_package = self._is_init_file(path)
+ break
+
+ qualified_name_parts.append((last_part, is_package))
+ return qualified_name_parts
+
+ @staticmethod
+ def _is_init_file(path):
+ return os.path.basename(path) in ('__init__.pyx', '__init__.py', '__init__.pxd') if path else False
+
def find_module(self, module_name, relative_to=None, pos=None, need_pxd=1,
absolute_fallback=True):
# Finds and returns the module scope corresponding to
@@ -182,16 +205,16 @@ class Context(object):
if not scope:
pxd_pathname = self.find_pxd_file(qualified_name, pos)
if pxd_pathname:
- scope = relative_to.find_submodule(module_name)
+ is_package = self._is_init_file(pxd_pathname)
+ scope = relative_to.find_submodule(module_name, as_package=is_package)
if not scope:
if debug_find_module:
print("...trying absolute import")
if absolute_fallback:
qualified_name = module_name
scope = self
- for name in qualified_name.split("."):
- scope = scope.find_submodule(name)
-
+ for name, is_package in self._split_qualified_name(qualified_name):
+ scope = scope.find_submodule(name, as_package=is_package)
if debug_find_module:
print("...scope = %s" % scope)
if not scope.pxd_file_loaded:
@@ -321,12 +344,12 @@ class Context(object):
# Look up a top-level module. Returns None if not found.
return self.modules.get(name, None)
- def find_submodule(self, name):
+ def find_submodule(self, name, as_package=False):
# Find a top-level module, creating a new one if needed.
scope = self.lookup_submodule(name)
if not scope:
scope = ModuleScope(name,
- parent_module = None, context = self)
+ parent_module = None, context = self, is_package=as_package)
self.modules[name] = scope
return scope
@@ -502,6 +525,10 @@ def run_pipeline(source, options, full_module_name=None, context=None):
err, enddata = Pipeline.run_pipeline(pipeline, source)
context.teardown_errors(err, options, result)
+ if options.depfile:
+ from ..Build.Dependencies import create_dependency_tree
+ dependencies = create_dependency_tree(context).all_dependencies(result.main_source_file)
+ Utils.write_depfile(result.c_file, result.main_source_file, dependencies)
return result
@@ -583,6 +610,9 @@ def compile_multiple(sources, options):
a CompilationResultSet. Performs timestamp checking and/or recursion
if these are specified in the options.
"""
+ if len(sources) > 1 and options.module_name:
+ raise RuntimeError('Full module name can only be set '
+ 'for single source compilation')
# run_pipeline creates the context
# context = Context.from_options(options)
sources = [os.path.abspath(source) for source in sources]
@@ -601,8 +631,9 @@ def compile_multiple(sources, options):
if (not timestamps) or out_of_date:
if verbose:
sys.stderr.write("Compiling %s\n" % source)
-
- result = run_pipeline(source, options, context=context)
+ result = run_pipeline(source, options,
+ full_module_name=options.module_name,
+ context=context)
results.add(source, result)
# Compiling multiple sources in one context doesn't quite
# work properly yet.
@@ -716,7 +747,16 @@ def main(command_line = 0):
args = sys.argv[1:]
any_failures = 0
if command_line:
- options, sources = parse_command_line(args)
+ try:
+ options, sources = parse_command_line(args)
+ except IOError as e:
+ # TODO: IOError can be replaced with FileNotFoundError in Cython 3.1
+ import errno
+ if errno.ENOENT != e.errno:
+ # Raised IOError is not caused by missing file.
+ raise
+ print("{}: No such file or directory: '{}'".format(sys.argv[0], e.filename), file=sys.stderr)
+ sys.exit(1)
else:
options = CompilationOptions(default_options)
sources = args
diff --git a/Cython/Compiler/MatchCaseNodes.py b/Cython/Compiler/MatchCaseNodes.py
index 1d2fdb339..e9007abac 100644
--- a/Cython/Compiler/MatchCaseNodes.py
+++ b/Cython/Compiler/MatchCaseNodes.py
@@ -1,9 +1,8 @@
# Nodes for structural pattern matching.
#
-# In a separate file because they're unlikely to be useful
-# for much else
+# In a separate file because they're unlikely to be useful for much else.
-from .Nodes import Node, StatNode
+from .Nodes import Node, StatNode, ErrorNode
from . import Nodes
from .Errors import error
from . import ExprNodes
@@ -21,7 +20,11 @@ class MatchNode(StatNode):
def validate_irrefutable(self):
found_irrefutable_case = None
- for c in self.cases:
+ for case in self.cases:
+ if isinstance(case, ErrorNode):
+ # This validation happens before error nodes have been
+ # transformed into actual errors, so we need to ignore them
+ continue
if found_irrefutable_case:
error(
found_irrefutable_case.pos,
@@ -31,9 +34,9 @@ class MatchNode(StatNode):
),
)
break
- if c.is_irrefutable():
- found_irrefutable_case = c
- c.validate_irrefutable()
+ if case.is_irrefutable():
+ found_irrefutable_case = case
+ case.validate_irrefutable()
def refactor_cases(self):
# An early transform - changes cases that can be represented as
@@ -118,6 +121,8 @@ class MatchCaseNode(Node):
child_attrs = ["pattern", "body", "guard"]
def is_irrefutable(self):
+ if isinstance(self.pattern, ErrorNode):
+ return True # value doesn't really matter
return self.pattern.is_irrefutable() and not self.guard
def is_simple_value_comparison(self):
@@ -126,9 +131,13 @@ class MatchCaseNode(Node):
return self.pattern.is_simple_value_comparison()
def validate_targets(self):
+ if isinstance(self.pattern, ErrorNode):
+ return
self.pattern.get_targets()
def validate_irrefutable(self):
+ if isinstance(self.pattern, ErrorNode):
+ return
self.pattern.validate_irrefutable()
def analyse_declarations(self, env):
@@ -165,7 +174,7 @@ class SubstitutedMatchCaseNode(MatchCaseBaseNode):
class PatternNode(Node):
"""
- DW decided that PatternNode shouldn't be an expression because
+ PatternNode is not an expression because
it does several things (evalutating a boolean expression,
assignment of targets), and they need to be done at different
times.
@@ -176,23 +185,22 @@ class PatternNode(Node):
child_attrs = ["as_targets"]
def __init__(self, pos, **kwds):
+ if "as_targets" not in kwds:
+ kwds["as_targets"] = []
super(PatternNode, self).__init__(pos, **kwds)
- if not hasattr(self, "as_targets"):
- self.as_targets = []
def is_irrefutable(self):
return False
def get_targets(self):
targets = self.get_main_pattern_targets()
- for t in self.as_targets:
- self.add_target_to_targets(targets, t.name)
+ for target in self.as_targets:
+ self.add_target_to_targets(targets, target.name)
return targets
def update_targets_with_targets(self, targets, other_targets):
- intersection = targets.intersection(other_targets)
- for i in intersection:
- error(self.pos, "multiple assignments to name '%s' in pattern" % i)
+ for name in targets.intersection(other_targets):
+ error(self.pos, "multiple assignments to name '%s' in pattern" % name)
targets.update(other_targets)
def add_target_to_targets(self, targets, target):
@@ -221,7 +229,7 @@ class PatternNode(Node):
def validate_irrefutable(self):
for attr in self.child_attrs:
child = getattr(self, attr)
- if isinstance(child, PatternNode):
+ if child is not None and isinstance(child, PatternNode):
child.validate_irrefutable()
@@ -289,9 +297,9 @@ class OrPatternNode(PatternNode):
child_attrs = PatternNode.child_attrs + ["alternatives"]
def get_first_irrefutable(self):
- for a in self.alternatives:
- if a.is_irrefutable():
- return a
+ for alternative in self.alternatives:
+ if alternative.is_irrefutable():
+ return alternative
return None
def is_irrefutable(self):
@@ -302,17 +310,17 @@ class OrPatternNode(PatternNode):
def get_main_pattern_targets(self):
child_targets = None
- for ch in self.alternatives:
- ch_targets = ch.get_targets()
- if child_targets is not None and child_targets != ch_targets:
+ for alternative in self.alternatives:
+ alternative_targets = alternative.get_targets()
+ if child_targets is not None and child_targets != alternative_targets:
error(self.pos, "alternative patterns bind different names")
- child_targets = ch_targets
+ child_targets = alternative_targets
return child_targets
def validate_irrefutable(self):
super(OrPatternNode, self).validate_irrefutable()
found_irrefutable_case = None
- for a in self.alternatives:
+ for alternative in self.alternatives:
if found_irrefutable_case:
error(
found_irrefutable_case.pos,
@@ -322,9 +330,9 @@ class OrPatternNode(PatternNode):
),
)
break
- if a.is_irrefutable():
- found_irrefutable_case = a
- a.validate_irrefutable()
+ if alternative.is_irrefutable():
+ found_irrefutable_case = alternative
+ alternative.validate_irrefutable()
def is_simple_value_comparison(self):
return all(
@@ -362,8 +370,8 @@ class MatchSequencePatternNode(PatternNode):
def get_main_pattern_targets(self):
targets = set()
- for p in self.patterns:
- self.update_targets_with_targets(targets, p.get_targets())
+ for pattern in self.patterns:
+ self.update_targets_with_targets(targets, pattern.get_targets())
return targets
@@ -378,7 +386,7 @@ class MatchMappingPatternNode(PatternNode):
value_patterns = []
double_star_capture_target = None
- child_atts = PatternNode.child_attrs + [
+ child_attrs = PatternNode.child_attrs + [
"keys",
"value_patterns",
"double_star_capture_target",
@@ -386,8 +394,8 @@ class MatchMappingPatternNode(PatternNode):
def get_main_pattern_targets(self):
targets = set()
- for p in self.value_patterns:
- self.update_targets_with_targets(targets, p.get_targets())
+ for pattern in self.value_patterns:
+ self.update_targets_with_targets(targets, pattern.get_targets())
if self.double_star_capture_target:
self.add_target_to_targets(targets, self.double_star_capture_target.name)
return targets
@@ -416,6 +424,6 @@ class ClassPatternNode(PatternNode):
def get_main_pattern_targets(self):
targets = set()
- for p in self.positional_patterns + self.keyword_pattern_patterns:
- self.update_targets_with_targets(targets, p.get_targets())
+ for pattern in self.positional_patterns + self.keyword_pattern_patterns:
+ self.update_targets_with_targets(targets, pattern.get_targets())
return targets
diff --git a/Cython/Compiler/MemoryView.py b/Cython/Compiler/MemoryView.py
index 6df53dcb6..5ebd396be 100644
--- a/Cython/Compiler/MemoryView.py
+++ b/Cython/Compiler/MemoryView.py
@@ -295,7 +295,7 @@ class MemoryViewSliceBufferEntry(Buffer.BufferEntry):
dim += 1
access, packing = self.type.axes[dim]
- if isinstance(index, ExprNodes.SliceNode):
+ if index.is_slice:
# slice, unspecified dimension, or part of ellipsis
d = dict(locals())
for s in "start stop step".split():
diff --git a/Cython/Compiler/ModuleNode.py b/Cython/Compiler/ModuleNode.py
index f83a51706..53aaf026e 100644
--- a/Cython/Compiler/ModuleNode.py
+++ b/Cython/Compiler/ModuleNode.py
@@ -262,7 +262,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
api_guard = self.api_name(Naming.api_guard_prefix, env)
h_code_start.putln("#ifndef %s" % api_guard)
h_code_start.putln("")
- self.generate_extern_c_macro_definition(h_code_start)
+ self.generate_extern_c_macro_definition(h_code_start, env.is_cpp())
h_code_start.putln("")
self.generate_dl_import_macro(h_code_start)
if h_extension_types:
@@ -804,7 +804,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.putln(" { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }")
code.putln("")
- self.generate_extern_c_macro_definition(code)
+ self.generate_extern_c_macro_definition(code, env.is_cpp())
code.putln("")
code.putln("#define %s" % self.api_name(Naming.h_guard_prefix, env))
@@ -876,14 +876,17 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if has_np_pythran(env):
env.use_utility_code(UtilityCode.load_cached("PythranConversion", "CppSupport.cpp"))
- def generate_extern_c_macro_definition(self, code):
+ def generate_extern_c_macro_definition(self, code, is_cpp):
name = Naming.extern_c_macro
code.putln("#ifndef %s" % name)
- code.putln(" #ifdef __cplusplus")
- code.putln(' #define %s extern "C"' % name)
- code.putln(" #else")
- code.putln(" #define %s extern" % name)
- code.putln(" #endif")
+ if is_cpp:
+ code.putln(' #define %s extern "C++"' % name)
+ else:
+ code.putln(" #ifdef __cplusplus")
+ code.putln(' #define %s extern "C"' % name)
+ code.putln(" #else")
+ code.putln(" #define %s extern" % name)
+ code.putln(" #endif")
code.putln("#endif")
def generate_dl_import_macro(self, code):
@@ -972,7 +975,8 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
def generate_typedef(self, entry, code):
base_type = entry.type.typedef_base_type
- if base_type.is_numeric:
+ enclosing_scope = entry.scope
+ if base_type.is_numeric and not enclosing_scope.is_cpp_class_scope:
try:
writer = code.globalstate['numeric_typedefs']
except KeyError:
@@ -1048,6 +1052,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
[base_class.empty_declaration_code() for base_class in type.base_classes])
code.put(" : public %s" % base_class_decl)
code.putln(" {")
+ self.generate_type_header_code(scope.type_entries, code)
py_attrs = [e for e in scope.entries.values()
if e.type.is_pyobject and not e.is_inherited]
has_virtual_methods = False
@@ -1632,7 +1637,6 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
entry = scope.lookup_here("__del__")
if entry is None or not entry.is_special:
return # nothing to wrap
- slot_func_cname = scope.mangle_internal("tp_finalize")
code.putln("")
if tp_slot.used_ifdef:
@@ -1677,7 +1681,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if py_attrs or cpp_destructable_attrs or memoryview_slices or weakref_slot or dict_slot:
self.generate_self_cast(scope, code)
- if not is_final_type:
+ if not is_final_type or scope.may_have_finalize():
# in Py3.4+, call tp_finalize() as early as possible
code.putln("#if CYTHON_USE_TP_FINALIZE")
if needs_gc:
@@ -3112,7 +3116,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if Options.generate_cleanup_code:
code.globalstate.use_utility_code(
UtilityCode.load_cached("RegisterModuleCleanup", "ModuleSetupCode.c"))
- code.putln("if (__Pyx_RegisterCleanup()) %s;" % code.error_goto(self.pos))
+ code.putln("if (__Pyx_RegisterCleanup()) %s" % code.error_goto(self.pos))
code.put_goto(code.return_label)
code.put_label(code.error_label)
@@ -3526,7 +3530,7 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
code.error_goto_if_null(Naming.cython_runtime_cname, self.pos)))
code.put_incref(Naming.cython_runtime_cname, py_object_type, nanny=False)
code.putln(
- 'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s;' % (
+ 'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s' % (
env.module_cname,
Naming.builtins_cname,
code.error_goto(self.pos)))
@@ -3772,14 +3776,14 @@ class ModuleNode(Nodes.Node, Nodes.BlockNode):
if not condition:
code.putln("") # start in new line
code.putln("#if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000")
- code.putln('sizeof(%s),' % objstruct)
+ code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct))
code.putln("#elif CYTHON_COMPILING_IN_LIMITED_API")
- code.putln('sizeof(%s),' % objstruct)
+ code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct))
code.putln("#else")
- code.putln('sizeof(%s),' % sizeof_objstruct)
+ code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (sizeof_objstruct, sizeof_objstruct))
code.putln("#endif")
else:
- code.put('sizeof(%s), ' % objstruct)
+ code.putln('sizeof(%s), __PYX_GET_STRUCT_ALIGNMENT(%s),' % (objstruct, objstruct))
# check_size
if type.check_size and type.check_size in ('error', 'warn', 'ignore'):
diff --git a/Cython/Compiler/Naming.py b/Cython/Compiler/Naming.py
index 7845e4aa1..1931e5976 100644
--- a/Cython/Compiler/Naming.py
+++ b/Cython/Compiler/Naming.py
@@ -17,6 +17,7 @@ pyunicode_identifier_prefix = pyrex_prefix + 'U'
builtin_prefix = pyrex_prefix + "builtin_"
arg_prefix = pyrex_prefix + "arg_"
+genexpr_arg_prefix = pyrex_prefix + "genexpr_arg_"
funcdoc_prefix = pyrex_prefix + "doc_"
enum_prefix = pyrex_prefix + "e_"
func_prefix = pyrex_prefix + "f_"
@@ -126,6 +127,7 @@ cur_scope_cname = pyrex_prefix + "cur_scope"
enc_scope_cname = pyrex_prefix + "enc_scope"
frame_cname = pyrex_prefix + "frame"
frame_code_cname = pyrex_prefix + "frame_code"
+error_without_exception_cname = pyrex_prefix + "error_without_exception"
binding_cfunc = pyrex_prefix + "binding_PyCFunctionType"
fused_func_prefix = pyrex_prefix + 'fuse_'
quick_temp_cname = pyrex_prefix + "temp" # temp variable for quick'n'dirty temping
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
index 4cad762ab..5c3321326 100644
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -158,6 +158,7 @@ class Node(object):
is_terminator = 0
is_wrapper = False # is a DefNode wrapper for a C function
is_cproperty = False
+ is_templated_type_node = False
temps = None
# All descendants should set child_attrs to a list of the attributes
@@ -729,13 +730,15 @@ class CFuncDeclaratorNode(CDeclaratorNode):
# Use an explicit exception return value to speed up exception checks.
# Even if it is not declared, we can use the default exception value of the return type,
# unless the function is some kind of external function that we do not control.
- if (return_type.exception_value is not None and (visibility != 'extern' and not in_pxd)
- # Ideally the function-pointer test would be better after self.base is analysed
- # however that is hard to do with the current implementation so it lives here
- # for now
- and not isinstance(self.base, CPtrDeclaratorNode)):
- # Extension types are more difficult because the signature must match the base type signature.
- if not env.is_c_class_scope:
+ if (return_type.exception_value is not None and (visibility != 'extern' and not in_pxd)):
+ # - We skip this optimization for extension types; they are more difficult because
+ # the signature must match the base type signature.
+ # - Same for function pointers, as we want them to be able to match functions
+ # with any exception value.
+ # - Ideally the function-pointer test would be better after self.base is analysed
+ # however that is hard to do with the current implementation so it lives here
+ # for now.
+ if not env.is_c_class_scope and not isinstance(self.base, CPtrDeclaratorNode):
from .ExprNodes import ConstNode
self.exception_value = ConstNode(
self.pos, value=return_type.exception_value, type=return_type)
@@ -966,27 +969,34 @@ class CArgDeclNode(Node):
annotation = self.annotation
if not annotation:
return None
- base_type, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default)
- if base_type is not None:
- self.base_type = base_type
-
- if arg_type and arg_type.python_type_constructor_name == "typing.Optional":
- # "x: Optional[...]" => explicitly allow 'None'
- arg_type = arg_type.resolve()
- if arg_type and not arg_type.is_pyobject:
- error(annotation.pos, "Only Python type arguments can use typing.Optional[...]")
- else:
- self.or_none = True
- elif arg_type is py_object_type:
- # exclude ": object" from the None check - None is a generic object.
- self.or_none = True
- elif arg_type and arg_type.is_pyobject and self.default and self.default.is_none:
- # "x: ... = None" => implicitly allow 'None', but warn about it.
- if not self.or_none:
- warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.")
+
+ modifiers, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default)
+ if arg_type is not None:
+ self.base_type = CAnalysedBaseTypeNode(
+ annotation.pos, type=arg_type, is_arg=True)
+
+ if arg_type:
+ if "typing.Optional" in modifiers:
+ # "x: Optional[...]" => explicitly allow 'None'
+ arg_type = arg_type.resolve()
+ if arg_type and not arg_type.is_pyobject:
+ # We probably already reported this as "cannot be applied to non-Python type".
+ # error(annotation.pos, "Only Python type arguments can use typing.Optional[...]")
+ pass
+ else:
+ self.or_none = True
+ elif arg_type is py_object_type:
+ # exclude ": object" from the None check - None is a generic object.
self.or_none = True
- elif arg_type and arg_type.is_pyobject and not self.or_none:
- self.not_none = True
+ elif self.default and self.default.is_none and (arg_type.is_pyobject or arg_type.equivalent_type):
+ # "x: ... = None" => implicitly allow 'None'
+ if not arg_type.is_pyobject:
+ arg_type = arg_type.equivalent_type
+ if not self.or_none:
+ warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.")
+ self.or_none = True
+ elif arg_type.is_pyobject and not self.or_none:
+ self.not_none = True
return arg_type
@@ -1076,9 +1086,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
else:
type = py_object_type
else:
+ scope = env
if self.module_path:
# Maybe it's a nested C++ class.
- scope = env
for item in self.module_path:
entry = scope.lookup(item)
if entry is not None and (
@@ -1099,8 +1109,6 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
if scope is None:
# Maybe it's a cimport.
scope = env.find_imported_module(self.module_path, self.pos)
- else:
- scope = env
if scope:
if scope.is_c_class_scope:
@@ -1139,10 +1147,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
type = PyrexTypes.c_double_complex_type
type.create_declaration_utility_code(env)
self.complex = True
- if type:
- return type
- else:
- return PyrexTypes.error_type
+ if not type:
+ type = PyrexTypes.error_type
+ return type
class MemoryViewSliceTypeNode(CBaseTypeNode):
@@ -1211,10 +1218,40 @@ class TemplatedTypeNode(CBaseTypeNode):
child_attrs = ["base_type_node", "positional_args",
"keyword_args", "dtype_node"]
+ is_templated_type_node = True
dtype_node = None
-
name = None
+ def _analyse_template_types(self, env, base_type):
+ require_python_types = base_type.python_type_constructor_name in (
+ 'typing.Optional',
+ 'dataclasses.ClassVar',
+ )
+ in_c_type_context = env.in_c_type_context and not require_python_types
+
+ template_types = []
+ for template_node in self.positional_args:
+ # CBaseTypeNode -> allow C type declarations in a 'cdef' context again
+ with env.new_c_type_context(in_c_type_context or isinstance(template_node, CBaseTypeNode)):
+ ttype = template_node.analyse_as_type(env)
+ if ttype is None:
+ if base_type.is_cpp_class:
+ error(template_node.pos, "unknown type in template argument")
+ ttype = error_type
+ # For Python generics we can be a bit more flexible and allow None.
+ elif require_python_types and not ttype.is_pyobject:
+ if ttype.equivalent_type and not template_node.as_cython_attribute():
+ ttype = ttype.equivalent_type
+ else:
+ error(template_node.pos, "%s[...] cannot be applied to non-Python type %s" % (
+ base_type.python_type_constructor_name,
+ ttype,
+ ))
+ ttype = error_type
+ template_types.append(ttype)
+
+ return template_types
+
def analyse(self, env, could_be_name=False, base_type=None):
if base_type is None:
base_type = self.base_type_node.analyse(env)
@@ -1222,21 +1259,15 @@ class TemplatedTypeNode(CBaseTypeNode):
if ((base_type.is_cpp_class and base_type.is_template_type()) or
base_type.python_type_constructor_name):
- # Templated class
+ # Templated class, Python generics, etc.
if self.keyword_args and self.keyword_args.key_value_pairs:
tp = "c++ templates" if base_type.is_cpp_class else "indexed types"
error(self.pos, "%s cannot take keyword arguments" % tp)
self.type = PyrexTypes.error_type
- else:
- template_types = []
- for template_node in self.positional_args:
- type = template_node.analyse_as_type(env)
- if type is None and base_type.is_cpp_class:
- error(template_node.pos, "unknown type in template argument")
- type = error_type
- # for indexed_pytype we can be a bit more flexible and pass None
- template_types.append(type)
- self.type = base_type.specialize_here(template_node.pos, env, template_types)
+ return self.type
+
+ template_types = self._analyse_template_types(env, base_type)
+ self.type = base_type.specialize_here(self.pos, env, template_types)
elif base_type.is_pyobject:
# Buffer
@@ -1277,7 +1308,7 @@ class TemplatedTypeNode(CBaseTypeNode):
dimension=dimension)
self.type = self.array_declarator.analyse(base_type, env)[1]
- if self.type.is_fused and env.fused_to_specific:
+ if self.type and self.type.is_fused and env.fused_to_specific:
try:
self.type = self.type.specialize(env.fused_to_specific)
except CannotSpecialize:
@@ -1287,6 +1318,19 @@ class TemplatedTypeNode(CBaseTypeNode):
return self.type
+ def analyse_pytyping_modifiers(self, env):
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ # TODO: somehow bring this together with IndexNode.analyse_pytyping_modifiers()
+ modifiers = []
+ modifier_node = self
+ while modifier_node.is_templated_type_node and modifier_node.base_type_node and len(modifier_node.positional_args) == 1:
+ modifier_type = self.base_type_node.analyse_as_type(env)
+ if modifier_type.python_type_constructor_name and modifier_type.modifier_name:
+ modifiers.append(modifier_type.modifier_name)
+ modifier_node = modifier_node.positional_args[0]
+
+ return modifiers
+
class CComplexBaseTypeNode(CBaseTypeNode):
# base_type CBaseTypeNode
@@ -1414,6 +1458,11 @@ class CVarDefNode(StatNode):
base_type = self.base_type.analyse(env)
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ modifiers = None
+ if self.base_type.is_templated_type_node:
+ modifiers = self.base_type.analyse_pytyping_modifiers(env)
+
if base_type.is_fused and not self.in_pxd and (env.is_c_class_scope or
env.is_module_scope):
error(self.pos, "Fused types not allowed here")
@@ -1477,7 +1526,7 @@ class CVarDefNode(StatNode):
self.entry = dest_scope.declare_var(
name, type, declarator.pos,
cname=cname, visibility=visibility, in_pxd=self.in_pxd,
- api=self.api, is_cdef=1)
+ api=self.api, is_cdef=True, pytyping_modifiers=modifiers)
if Options.docstrings:
self.entry.doc = embed_position(self.pos, self.doc)
@@ -1586,6 +1635,9 @@ class CppClassNode(CStructOrUnionDefNode, BlockNode):
elif isinstance(attr, CompilerDirectivesNode):
for sub_attr in func_attributes(attr.body.stats):
yield sub_attr
+ elif isinstance(attr, CppClassNode) and attr.attributes is not None:
+ for sub_attr in func_attributes(attr.attributes):
+ yield sub_attr
if self.attributes is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
@@ -2070,7 +2122,6 @@ class FuncDefNode(StatNode, BlockNode):
self.generate_argument_parsing_code(env, code)
# If an argument is assigned to in the body, we must
# incref it to properly keep track of refcounts.
- is_cdef = isinstance(self, CFuncDefNode)
for entry in lenv.arg_entries:
if not entry.type.is_memoryviewslice:
if (acquire_gil or entry.cf_is_reassigned) and not entry.in_closure:
@@ -2079,7 +2130,7 @@ class FuncDefNode(StatNode, BlockNode):
# we acquire arguments from object conversion, so we have
# new references. If we are a cdef function, we need to
# incref our arguments
- elif is_cdef and entry.cf_is_reassigned:
+ elif entry.cf_is_reassigned and not entry.in_closure:
code.put_var_incref_memoryviewslice(entry,
have_gil=code.funcstate.gil_owned)
for entry in lenv.var_entries:
@@ -2184,7 +2235,14 @@ class FuncDefNode(StatNode, BlockNode):
# code.put_trace_exception()
assure_gil('error')
+ if code.funcstate.error_without_exception:
+ tempvardecl_code.putln(
+ "int %s = 0; /* StopIteration */" % Naming.error_without_exception_cname
+ )
+ code.putln("if (!%s) {" % Naming.error_without_exception_cname)
code.put_add_traceback(self.entry.qualified_name)
+ if code.funcstate.error_without_exception:
+ code.putln("}")
else:
warning(self.entry.pos,
"Unraisable exception in function '%s'." %
@@ -2274,14 +2332,14 @@ class FuncDefNode(StatNode, BlockNode):
# Decref any increfed args
for entry in lenv.arg_entries:
+ if entry.in_closure:
+ continue
if entry.type.is_memoryviewslice:
# decref slices of def functions and acquired slices from cdef
# functions, but not borrowed slices from cdef functions.
- if is_cdef and not entry.cf_is_reassigned:
+ if not entry.cf_is_reassigned:
continue
else:
- if entry.in_closure:
- continue
if not acquire_gil and not entry.cf_is_reassigned:
continue
if entry.type.needs_refcounting:
@@ -2827,8 +2885,11 @@ class CFuncDefNode(FuncDefNode):
def put_into_closure(entry):
if entry.in_closure and not arg.default:
code.putln('%s = %s;' % (entry.cname, entry.original_cname))
- code.put_var_incref(entry)
- code.put_var_giveref(entry)
+ if entry.type.is_memoryviewslice:
+ entry.type.generate_incref_memoryviewslice(code, entry.cname, True)
+ else:
+ code.put_var_incref(entry)
+ code.put_var_giveref(entry)
for arg in self.args:
put_into_closure(scope.lookup_here(arg.name))
@@ -3164,7 +3225,7 @@ class DefNode(FuncDefNode):
else:
# probably just a plain 'object'
arg.accept_none = True
- else:
+ elif not arg.type.is_error:
arg.accept_none = True # won't be used, but must be there
if arg.not_none:
error(arg.pos, "Only Python type arguments can have 'not None'")
@@ -3457,8 +3518,20 @@ class DefNode(FuncDefNode):
# Move arguments into closure if required
def put_into_closure(entry):
if entry.in_closure:
- code.putln('%s = %s;' % (entry.cname, entry.original_cname))
- if entry.xdecref_cleanup:
+ if entry.type.is_array:
+ # This applies to generator expressions that iterate over C arrays (and need to
+ # capture them by value), under most other circumstances C array arguments are dropped to
+ # pointers so this copy isn't used
+ assert entry.type.size is not None
+ code.globalstate.use_utility_code(UtilityCode.load_cached("IncludeStringH", "StringTools.c"))
+ code.putln("memcpy({0}, {1}, sizeof({0}));".format(entry.cname, entry.original_cname))
+ else:
+ code.putln('%s = %s;' % (entry.cname, entry.original_cname))
+ if entry.type.is_memoryviewslice:
+ # TODO - at some point reference count of memoryviews should
+ # genuinely be unified with PyObjects
+ entry.type.generate_incref_memoryviewslice(code, entry.cname, True)
+ elif entry.xdecref_cleanup:
# mostly applies to the starstar arg - this can sometimes be NULL
# so must be xincrefed instead
code.put_var_xincref(entry)
@@ -3616,11 +3689,20 @@ class DefNodeWrapper(FuncDefNode):
# ----- Non-error return cleanup
code.put_label(code.return_label)
for entry in lenv.var_entries:
- if entry.is_arg and entry.type.is_pyobject:
+ if entry.is_arg:
+ # mainly captures the star/starstar args
if entry.xdecref_cleanup:
code.put_var_xdecref(entry)
else:
code.put_var_decref(entry)
+ for arg in self.args:
+ if not arg.type.is_pyobject:
+ # This captures anything that's been converted from a PyObject.
+ # Primarily memoryviews at the moment
+ if arg.entry.xdecref_cleanup:
+ code.put_var_xdecref(arg.entry)
+ else:
+ code.put_var_decref(arg.entry)
code.put_finish_refcount_context()
if not self.return_type.is_void:
@@ -3673,7 +3755,7 @@ class DefNodeWrapper(FuncDefNode):
with_pymethdef = False
dc = self.return_type.declaration_code(entry.func_cname)
- header = "static %s%s(%s)" % (mf, dc, arg_code)
+ header = "%sstatic %s(%s)" % (mf, dc, arg_code)
code.putln("%s; /*proto*/" % header)
if proto_only:
@@ -5100,7 +5182,6 @@ class CClassDefNode(ClassDefNode):
check_size = None
decorators = None
shadow = False
- is_dataclass = False
@property
def punycode_class_name(self):
@@ -5142,6 +5223,8 @@ class CClassDefNode(ClassDefNode):
api=self.api,
buffer_defaults=self.buffer_defaults(env),
shadow=self.shadow)
+ if self.bases and len(self.bases.args) > 1:
+ self.entry.type.multiple_bases = True
def analyse_declarations(self, env):
#print "CClassDefNode.analyse_declarations:", self.class_name
@@ -5150,8 +5233,6 @@ class CClassDefNode(ClassDefNode):
if env.in_cinclude and not self.objstruct_name:
error(self.pos, "Object struct name specification required for C class defined in 'extern from' block")
- if "dataclasses.dataclass" in env.directives:
- self.is_dataclass = True
if self.decorators:
error(self.pos, "Decorators not allowed on cdef classes (used on type '%s')" % self.class_name)
self.base_type = None
@@ -5188,7 +5269,8 @@ class CClassDefNode(ClassDefNode):
error(base.pos, "Base class '%s' of type '%s' is final" % (
base_type, self.class_name))
elif base_type.is_builtin_type and \
- base_type.name in ('tuple', 'str', 'bytes'):
+ base_type.name in ('tuple', 'bytes'):
+ # str in Py2 is also included in this, but now checked at run-time
error(base.pos, "inheritance from PyVarObject types like '%s' is not currently supported"
% base_type.name)
else:
@@ -5232,6 +5314,8 @@ class CClassDefNode(ClassDefNode):
api=self.api,
buffer_defaults=self.buffer_defaults(env),
shadow=self.shadow)
+ if self.bases and len(self.bases.args) > 1:
+ self.entry.type.multiple_bases = True
if self.shadow:
home_scope.lookup(self.class_name).as_variable = self.entry
@@ -5240,6 +5324,15 @@ class CClassDefNode(ClassDefNode):
self.scope = scope = self.entry.type.scope
if scope is not None:
scope.directives = env.directives
+ if "dataclasses.dataclass" in env.directives:
+ is_frozen = False
+ # Retrieve the @dataclass config (args, kwargs), as passed into the decorator.
+ dataclass_config = env.directives["dataclasses.dataclass"]
+ if dataclass_config:
+ decorator_kwargs = dataclass_config[1]
+ frozen_flag = decorator_kwargs.get('frozen')
+ is_frozen = frozen_flag and frozen_flag.is_literal and frozen_flag.value
+ scope.is_c_dataclass_scope = "frozen" if is_frozen else True
if self.doc and Options.docstrings:
scope.doc = embed_position(self.pos, self.doc)
@@ -5435,8 +5528,10 @@ class CClassDefNode(ClassDefNode):
typeptr_cname, buffer_slot.slot_name,
))
code.putln("}")
+ code.putln("#elif defined(_MSC_VER)")
+ code.putln("#pragma message (\"The buffer protocol is not supported in the Limited C-API.\")")
code.putln("#else")
- code.putln("#warning The buffer protocol is not supported in the Limited C-API.")
+ code.putln("#warning \"The buffer protocol is not supported in the Limited C-API.\"")
code.putln("#endif")
code.globalstate.use_utility_code(
@@ -5455,6 +5550,22 @@ class CClassDefNode(ClassDefNode):
))
code.putln("#endif") # if CYTHON_USE_TYPE_SPECS
+ base_type = type.base_type
+ while base_type:
+ if base_type.is_external and not base_type.objstruct_cname == "PyTypeObject":
+ # 'type' is special-cased because it is actually based on PyHeapTypeObject
+ # Variable length bases are allowed if the current class doesn't grow
+ code.putln("if (sizeof(%s%s) != sizeof(%s%s)) {" % (
+ "" if type.typedef_flag else "struct ", type.objstruct_cname,
+ "" if base_type.typedef_flag else "struct ", base_type.objstruct_cname))
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("ValidateExternBase", "ExtensionTypes.c"))
+ code.put_error_if_neg(entry.pos, "__Pyx_validate_extern_base(%s)" % (
+ type.base_type.typeptr_cname))
+ code.putln("}")
+ break
+ base_type = base_type.base_type
+
code.putln("#if !CYTHON_COMPILING_IN_LIMITED_API")
# FIXME: these still need to get initialised even with the limited-API
for slot in TypeSlots.get_slot_table(code.globalstate.directives):
@@ -6654,11 +6765,15 @@ class RaiseStatNode(StatNode):
# exc_value ExprNode or None
# exc_tb ExprNode or None
# cause ExprNode or None
+ #
+ # set in FlowControl
+ # in_try_block bool
child_attrs = ["exc_type", "exc_value", "exc_tb", "cause"]
is_terminator = True
builtin_exc_name = None
wrap_tuple_value = False
+ in_try_block = False
def analyse_expressions(self, env):
if self.exc_type:
@@ -6687,9 +6802,19 @@ class RaiseStatNode(StatNode):
not (exc.args or (exc.arg_tuple is not None and exc.arg_tuple.args))):
exc = exc.function # extract the exception type
if exc.is_name and exc.entry.is_builtin:
+ from . import Symtab
self.builtin_exc_name = exc.name
if self.builtin_exc_name == 'MemoryError':
self.exc_type = None # has a separate implementation
+ elif (self.builtin_exc_name == 'StopIteration' and
+ env.is_local_scope and env.name == "__next__" and
+ env.parent_scope and env.parent_scope.is_c_class_scope and
+ not self.in_try_block):
+ # tp_iternext is allowed to return NULL without raising StopIteration.
+ # For the sake of simplicity, only allow this to happen when not in
+ # a try block
+ self.exc_type = None
+
return self
nogil_check = Node.gil_error
@@ -6700,6 +6825,11 @@ class RaiseStatNode(StatNode):
if self.builtin_exc_name == 'MemoryError':
code.putln('PyErr_NoMemory(); %s' % code.error_goto(self.pos))
return
+ elif self.builtin_exc_name == 'StopIteration' and not self.exc_type:
+ code.putln('%s = 1;' % Naming.error_without_exception_cname)
+ code.putln('%s;' % code.error_goto(None))
+ code.funcstate.error_without_exception = True
+ return
if self.exc_type:
self.exc_type.generate_evaluation_code(code)
@@ -8610,7 +8740,7 @@ class FromCImportStatNode(StatNode):
#
# module_name string Qualified name of module
# relative_level int or None Relative import: number of dots before module_name
- # imported_names [(pos, name, as_name, kind)] Names to be imported
+ # imported_names [(pos, name, as_name)] Names to be imported
child_attrs = []
module_name = None
@@ -8621,35 +8751,34 @@ class FromCImportStatNode(StatNode):
if not env.is_module_scope:
error(self.pos, "cimport only allowed at module level")
return
- if self.relative_level and self.relative_level > env.qualified_name.count('.'):
- error(self.pos, "relative cimport beyond main package is not allowed")
- return
+ qualified_name_components = env.qualified_name.count('.') + 1
+ if self.relative_level:
+ if self.relative_level > qualified_name_components:
+ # 1. case: importing beyond package: from .. import pkg
+ error(self.pos, "relative cimport beyond main package is not allowed")
+ return
+ elif self.relative_level == qualified_name_components and not env.is_package:
+ # 2. case: importing from same level but current dir is not package: from . import module
+ error(self.pos, "relative cimport from non-package directory is not allowed")
+ return
module_scope = env.find_module(self.module_name, self.pos, relative_level=self.relative_level)
module_name = module_scope.qualified_name
env.add_imported_module(module_scope)
- for pos, name, as_name, kind in self.imported_names:
+ for pos, name, as_name in self.imported_names:
if name == "*":
for local_name, entry in list(module_scope.entries.items()):
env.add_imported_entry(local_name, entry, pos)
else:
entry = module_scope.lookup(name)
if entry:
- if kind and not self.declaration_matches(entry, kind):
- entry.redeclared(pos)
entry.used = 1
else:
- if kind == 'struct' or kind == 'union':
- entry = module_scope.declare_struct_or_union(
- name, kind=kind, scope=None, typedef_flag=0, pos=pos)
- elif kind == 'class':
- entry = module_scope.declare_c_class(name, pos=pos, module_name=module_name)
+ submodule_scope = env.context.find_module(
+ name, relative_to=module_scope, pos=self.pos, absolute_fallback=False)
+ if submodule_scope.parent_module is module_scope:
+ env.declare_module(as_name or name, submodule_scope, self.pos)
else:
- submodule_scope = env.context.find_module(
- name, relative_to=module_scope, pos=self.pos, absolute_fallback=False)
- if submodule_scope.parent_module is module_scope:
- env.declare_module(as_name or name, submodule_scope, self.pos)
- else:
- error(pos, "Name '%s' not declared in module '%s'" % (name, module_name))
+ error(pos, "Name '%s' not declared in module '%s'" % (name, module_name))
if entry:
local_name = as_name or name
@@ -8658,7 +8787,7 @@ class FromCImportStatNode(StatNode):
if module_name.startswith('cpython') or module_name.startswith('cython'): # enough for now
if module_name in utility_code_for_cimports:
env.use_utility_code(utility_code_for_cimports[module_name]())
- for _, name, _, _ in self.imported_names:
+ for _, name, _ in self.imported_names:
fqname = '%s.%s' % (module_name, name)
if fqname in utility_code_for_cimports:
env.use_utility_code(utility_code_for_cimports[fqname]())
@@ -10028,13 +10157,13 @@ class CnameDecoratorNode(StatNode):
class ErrorNode(Node):
"""
- Node type for things that we want to get throught the parser
+ Node type for things that we want to get through the parser
(especially for things that are being scanned in "tentative_scan"
blocks), but should immediately raise and error afterwards.
what str
"""
- pass
+ child_attrs = []
#------------------------------------------------------------------------------------
diff --git a/Cython/Compiler/Optimize.py b/Cython/Compiler/Optimize.py
index a601d18c9..231d23419 100644
--- a/Cython/Compiler/Optimize.py
+++ b/Cython/Compiler/Optimize.py
@@ -319,16 +319,6 @@ class IterationTransform(Visitor.EnvTransform):
return self._optimise_for_loop(node, arg, reversed=True)
- PyBytes_AS_STRING_func_type = PyrexTypes.CFuncType(
- PyrexTypes.c_char_ptr_type, [
- PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
- ])
-
- PyBytes_GET_SIZE_func_type = PyrexTypes.CFuncType(
- PyrexTypes.c_py_ssize_t_type, [
- PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
- ])
-
def _transform_indexable_iteration(self, node, slice_node, is_mutable, reversed=False):
"""In principle can handle any iterable that Cython has a len() for and knows how to index"""
unpack_temp_node = UtilNodes.LetRefNode(
@@ -415,6 +405,16 @@ class IterationTransform(Visitor.EnvTransform):
body.stats.insert(1, node.body)
return ret
+ PyBytes_AS_STRING_func_type = PyrexTypes.CFuncType(
+ PyrexTypes.c_char_ptr_type, [
+ PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
+ ])
+
+ PyBytes_GET_SIZE_func_type = PyrexTypes.CFuncType(
+ PyrexTypes.c_py_ssize_t_type, [
+ PyrexTypes.CFuncTypeArg("s", Builtin.bytes_type, None)
+ ])
+
def _transform_bytes_iteration(self, node, slice_node, reversed=False):
target_type = node.target.type
if not target_type.is_int and target_type is not Builtin.bytes_type:
@@ -2105,7 +2105,8 @@ class InlineDefNodeCalls(Visitor.NodeRefCleanupMixin, Visitor.EnvTransform):
return node
inlined = ExprNodes.InlinedDefNodeCallNode(
node.pos, function_name=function_name,
- function=function, args=node.args)
+ function=function, args=node.args,
+ generator_arg_tag=node.generator_arg_tag)
if inlined.can_be_inlined():
return self.replace(node, inlined)
return node
@@ -3026,7 +3027,7 @@ class OptimizeBuiltinCalls(Visitor.NodeRefCleanupMixin,
"""Optimistic optimisation as X.append() is almost always
referring to a list.
"""
- if len(args) != 2 or node.result_is_used:
+ if len(args) != 2 or node.result_is_used or node.function.entry:
return node
return ExprNodes.PythonCapiCallNode(
diff --git a/Cython/Compiler/Options.py b/Cython/Compiler/Options.py
index af28a7187..73778aaf9 100644
--- a/Cython/Compiler/Options.py
+++ b/Cython/Compiler/Options.py
@@ -171,7 +171,7 @@ def copy_inherited_directives(outer_directives, **new_directives):
# For example, test_assert_path_exists and test_fail_if_path_exists should not be inherited
# otherwise they can produce very misleading test failures
new_directives_out = dict(outer_directives)
- for name in ('test_assert_path_exists', 'test_fail_if_path_exists'):
+ for name in ('test_assert_path_exists', 'test_fail_if_path_exists', 'test_assert_c_code_has', 'test_fail_if_c_code_has'):
new_directives_out.pop(name, None)
new_directives_out.update(new_directives)
return new_directives_out
@@ -247,6 +247,8 @@ _directive_defaults = {
# test support
'test_assert_path_exists' : [],
'test_fail_if_path_exists' : [],
+ 'test_assert_c_code_has' : [],
+ 'test_fail_if_c_code_has' : [],
# experimental, subject to change
'formal_grammar': False,
@@ -364,9 +366,10 @@ directive_scopes = { # defaults to available everywhere
'set_initial_path' : ('module',),
'test_assert_path_exists' : ('function', 'class', 'cclass'),
'test_fail_if_path_exists' : ('function', 'class', 'cclass'),
+ 'test_assert_c_code_has' : ('module',),
+ 'test_fail_if_c_code_has' : ('module',),
'freelist': ('cclass',),
'emit_code_comments': ('module',),
- 'annotation_typing': ('module',), # FIXME: analysis currently lacks more specific function scope
# Avoid scope-specific to/from_py_functions for c_string.
'c_string_type': ('module',),
'c_string_encoding': ('module',),
@@ -388,7 +391,7 @@ directive_scopes = { # defaults to available everywhere
# a list of directives that (when used as a decorator) are only applied to
# the object they decorate and not to its children.
immediate_decorator_directives = {
- 'cfunc', 'ccall', 'cclass',
+ 'cfunc', 'ccall', 'cclass', 'dataclasses.dataclass',
# function signature directives
'inline', 'exceptval', 'returns',
# class directives
@@ -510,6 +513,11 @@ def parse_directive_list(s, relaxed_bool=False, ignore_unknown=False,
result[directive] = parsed_value
if not found and not ignore_unknown:
raise ValueError('Unknown option: "%s"' % name)
+ elif directive_types.get(name) is list:
+ if name in result:
+ result[name].append(value)
+ else:
+ result[name] = [value]
else:
parsed_value = parse_directive_value(name, value, relaxed_bool=relaxed_bool)
result[name] = parsed_value
@@ -662,6 +670,9 @@ class CompilationOptions(object):
elif key in ['output_file', 'output_dir']:
# ignore the exact name of the output file
continue
+ elif key in ['depfile']:
+ # external build system dependency tracking file does not influence outputs
+ continue
elif key in ['timestamps']:
# the cache cares about the content of files, not about the timestamps of sources
continue
@@ -740,6 +751,7 @@ default_options = dict(
errors_to_stderr=1,
cplus=0,
output_file=None,
+ depfile=None,
annotate=None,
annotate_coverage_xml=None,
generate_pxi=0,
@@ -758,6 +770,7 @@ default_options = dict(
formal_grammar=False,
gdb_debug=False,
compile_time_env=None,
+ module_name=None,
common_utility_include_dir=None,
output_dir=None,
build_dir=None,
diff --git a/Cython/Compiler/ParseTreeTransforms.pxd b/Cython/Compiler/ParseTreeTransforms.pxd
index 92f9b0601..2778be4ef 100644
--- a/Cython/Compiler/ParseTreeTransforms.pxd
+++ b/Cython/Compiler/ParseTreeTransforms.pxd
@@ -6,8 +6,8 @@ from .Visitor cimport (
CythonTransform, VisitorTransform, TreeVisitor,
ScopeTrackingTransform, EnvTransform)
-cdef class SkipDeclarations: # (object):
- pass
+# Don't include mixins, only the main classes.
+#cdef class SkipDeclarations:
cdef class NormalizeTree(CythonTransform):
cdef bint is_in_statlist
@@ -18,6 +18,7 @@ cdef class PostParse(ScopeTrackingTransform):
cdef dict specialattribute_handlers
cdef size_t lambda_counter
cdef size_t genexpr_counter
+ cdef bint in_pattern_node
cdef _visit_assignment_node(self, node, list expr_list)
diff --git a/Cython/Compiler/ParseTreeTransforms.py b/Cython/Compiler/ParseTreeTransforms.py
index 52a355e7f..981e4b174 100644
--- a/Cython/Compiler/ParseTreeTransforms.py
+++ b/Cython/Compiler/ParseTreeTransforms.py
@@ -6,10 +6,12 @@ import cython
cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object,
Options=object, UtilNodes=object, LetNode=object,
LetRefNode=object, TreeFragment=object, EncodedString=object,
- error=object, warning=object, copy=object, _unicode=object)
+ error=object, warning=object, copy=object, hashlib=object, sys=object,
+ _unicode=object)
import copy
import hashlib
+import sys
from . import PyrexTypes
from . import Naming
@@ -191,6 +193,7 @@ class PostParse(ScopeTrackingTransform):
self.specialattribute_handlers = {
'__cythonbufferdefaults__' : self.handle_bufferdefaults
}
+ self.in_pattern_node = False
def visit_LambdaNode(self, node):
# unpack a lambda expression into the corresponding DefNode
@@ -397,6 +400,18 @@ class PostParse(ScopeTrackingTransform):
self.visitchildren(node)
return node
+ def visit_PatternNode(self, node):
+ in_pattern_node, self.in_pattern_node = self.in_pattern_node, True
+ self.visitchildren(node)
+ self.in_pattern_node = in_pattern_node
+ return node
+
+ def visit_JoinedStrNode(self, node):
+ if self.in_pattern_node:
+ error(node.pos, "f-strings are not accepted for pattern matching")
+ self.visitchildren(node)
+ return node
+
class _AssignmentExpressionTargetNameFinder(TreeVisitor):
def __init__(self):
super(_AssignmentExpressionTargetNameFinder, self).__init__()
@@ -844,6 +859,14 @@ class InterpretCompilerDirectives(CythonTransform):
}
special_methods.update(unop_method_nodes)
+ valid_cython_submodules = {
+ 'cimports',
+ 'dataclasses',
+ 'operator',
+ 'parallel',
+ 'view',
+ }
+
valid_parallel_directives = {
"parallel",
"prange",
@@ -872,6 +895,34 @@ class InterpretCompilerDirectives(CythonTransform):
error(pos, "Invalid directive: '%s'." % (directive,))
return True
+ def _check_valid_cython_module(self, pos, module_name):
+ if not module_name.startswith("cython."):
+ return
+ if module_name.split('.', 2)[1] in self.valid_cython_submodules:
+ return
+
+ extra = ""
+ # This is very rarely used, so don't waste space on static tuples.
+ hints = [
+ line.split() for line in """\
+ imp cimports
+ cimp cimports
+ para parallel
+ parra parallel
+ dataclass dataclasses
+ """.splitlines()[:-1]
+ ]
+ for wrong, correct in hints:
+ if module_name.startswith("cython." + wrong):
+ extra = "Did you mean 'cython.%s' ?" % correct
+ break
+
+ error(pos, "'%s' is not a valid cython.* module%s%s" % (
+ module_name,
+ ". " if extra else "",
+ extra,
+ ))
+
# Set up processing and handle the cython: comments.
def visit_ModuleNode(self, node):
for key in sorted(node.directive_comments):
@@ -942,6 +993,9 @@ class InterpretCompilerDirectives(CythonTransform):
elif module_name.startswith(u"cython."):
if module_name.startswith(u"cython.parallel."):
error(node.pos, node.module_name + " is not a module")
+ else:
+ self._check_valid_cython_module(node.pos, module_name)
+
if module_name == u"cython.parallel":
if node.as_name and node.as_name != u"cython":
self.parallel_directives[node.as_name] = module_name
@@ -968,10 +1022,10 @@ class InterpretCompilerDirectives(CythonTransform):
node.pos, module_name, node.relative_level, node.imported_names)
elif not node.relative_level and (
module_name == u"cython" or module_name.startswith(u"cython.")):
+ self._check_valid_cython_module(node.pos, module_name)
submodule = (module_name + u".")[7:]
newimp = []
-
- for pos, name, as_name, kind in node.imported_names:
+ for pos, name, as_name in node.imported_names:
full_name = submodule + name
qualified_name = u"cython." + full_name
if self.is_parallel_directive(qualified_name, node.pos):
@@ -980,15 +1034,12 @@ class InterpretCompilerDirectives(CythonTransform):
self.parallel_directives[as_name or name] = qualified_name
elif self.is_cython_directive(full_name):
self.directive_names[as_name or name] = full_name
- if kind is not None:
- self.context.nonfatal_error(PostParseError(pos,
- "Compiler directive imports must be plain imports"))
elif full_name in ['dataclasses', 'typing']:
self.directive_names[as_name or name] = full_name
# unlike many directives, still treat it as a regular module
- newimp.append((pos, name, as_name, kind))
+ newimp.append((pos, name, as_name))
else:
- newimp.append((pos, name, as_name, kind))
+ newimp.append((pos, name, as_name))
if not newimp:
return None
@@ -1003,10 +1054,11 @@ class InterpretCompilerDirectives(CythonTransform):
imported_names = []
for name, name_node in node.items:
imported_names.append(
- (name_node.pos, name, None if name == name_node.name else name_node.name, None))
+ (name_node.pos, name, None if name == name_node.name else name_node.name))
return self._create_cimport_from_import(
node.pos, module_name, import_node.level, imported_names)
elif module_name == u"cython" or module_name.startswith(u"cython."):
+ self._check_valid_cython_module(import_node.module_name.pos, module_name)
submodule = (module_name + u".")[7:]
newimp = []
for name, name_node in node.items:
@@ -1041,14 +1093,13 @@ class InterpretCompilerDirectives(CythonTransform):
module_name=dotted_name,
as_name=as_name,
is_absolute=level == 0)
- for pos, dotted_name, as_name, _ in imported_names
+ for pos, dotted_name, as_name in imported_names
]
def visit_SingleAssignmentNode(self, node):
if isinstance(node.rhs, ExprNodes.ImportNode):
module_name = node.rhs.module_name.value
- is_special_module = (module_name + u".").startswith((u"cython.parallel.", u"cython.cimports."))
- if module_name != u"cython" and not is_special_module:
+ if module_name != u"cython" and not module_name.startswith("cython."):
return node
node = Nodes.CImportStatNode(node.pos, module_name=module_name, as_name=node.lhs.name)
@@ -1197,7 +1248,7 @@ class InterpretCompilerDirectives(CythonTransform):
return (optname, directivetype(optname, str(args[0].value)))
elif directivetype is Options.DEFER_ANALYSIS_OF_ARGUMENTS:
# signal to pass things on without processing
- return (optname, (args, kwds.as_python_dict()))
+ return (optname, (args, kwds.as_python_dict() if kwds else {}))
else:
assert False
@@ -1290,8 +1341,7 @@ class InterpretCompilerDirectives(CythonTransform):
name, value = directive
if self.directives.get(name, object()) != value:
directives.append(directive)
- if (directive[0] == 'staticmethod' or
- (directive[0] == 'dataclasses.dataclass' and scope_name == 'class')):
+ if directive[0] == 'staticmethod':
both.append(dec)
# Adapt scope type based on decorators that change it.
if directive[0] == 'cclass' and scope_name == 'class':
@@ -1301,10 +1351,11 @@ class InterpretCompilerDirectives(CythonTransform):
if realdecs and (scope_name == 'cclass' or
isinstance(node, (Nodes.CClassDefNode, Nodes.CVarDefNode))):
for realdec in realdecs:
+ dec_pos = realdec.pos
realdec = realdec.decorator
if ((realdec.is_name and realdec.name == "dataclass") or
(realdec.is_attribute and realdec.attribute == "dataclass")):
- error(realdec.pos,
+ error(dec_pos,
"Use '@cython.dataclasses.dataclass' on cdef classes to create a dataclass")
# Note - arbitrary C function decorators are caught later in DecoratorTransform
raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.")
@@ -1602,6 +1653,128 @@ class WithTransform(VisitorTransform, SkipDeclarations):
visit_Node = VisitorTransform.recurse_to_children
+class _GeneratorExpressionArgumentsMarker(TreeVisitor, SkipDeclarations):
+ # called from "MarkClosureVisitor"
+ def __init__(self, gen_expr):
+ super(_GeneratorExpressionArgumentsMarker, self).__init__()
+ self.gen_expr = gen_expr
+
+ def visit_ExprNode(self, node):
+ if not node.is_literal:
+ # Don't bother tagging literal nodes
+ assert (not node.generator_arg_tag) # nobody has tagged this first
+ node.generator_arg_tag = self.gen_expr
+ self.visitchildren(node)
+
+ def visit_Node(self, node):
+ # We're only interested in the expressions that make up the iterator sequence,
+ # so don't go beyond ExprNodes (e.g. into ForFromStatNode).
+ return
+
+ def visit_GeneratorExpressionNode(self, node):
+ node.generator_arg_tag = self.gen_expr
+ # don't visit children, can't handle overlapping tags
+ # (and assume generator expressions don't end up optimized out in a way
+ # that would require overlapping tags)
+
+
+class _HandleGeneratorArguments(VisitorTransform, SkipDeclarations):
+ # used from within CreateClosureClasses
+
+ def __call__(self, node):
+ from . import Visitor
+ assert isinstance(node, ExprNodes.GeneratorExpressionNode)
+ self.gen_node = node
+
+ self.args = list(node.def_node.args)
+ self.call_parameters = list(node.call_parameters)
+ self.tag_count = 0
+ self.substitutions = {}
+
+ self.visitchildren(node)
+
+ for k, v in self.substitutions.items():
+ # doing another search for replacements here (at the end) allows us to sweep up
+ # CloneNodes too (which are often generated by the optimizer)
+ # (it could arguably be done more efficiently with a single traversal though)
+ Visitor.recursively_replace_node(node, k, v)
+
+ node.def_node.args = self.args
+ node.call_parameters = self.call_parameters
+ return node
+
+ def visit_GeneratorExpressionNode(self, node):
+ # a generator can also be substituted itself, so handle that case
+ new_node = self._handle_ExprNode(node, do_visit_children=False)
+ # However do not traverse into it. A new _HandleGeneratorArguments visitor will be used
+ # elsewhere to do that.
+ return node
+
+ def _handle_ExprNode(self, node, do_visit_children):
+ if (node.generator_arg_tag is not None and self.gen_node is not None and
+ self.gen_node == node.generator_arg_tag):
+ pos = node.pos
+ # The reason for using ".x" as the name is that this is how CPython
+ # tracks internal variables in loops (e.g.
+ # { locals() for v in range(10) }
+ # will produce "v" and ".0"). We don't replicate this behaviour completely
+ # but use it as a starting point
+ name_source = self.tag_count
+ self.tag_count += 1
+ name = EncodedString(".{0}".format(name_source))
+ def_node = self.gen_node.def_node
+ if not def_node.local_scope.lookup_here(name):
+ from . import Symtab
+ cname = EncodedString(Naming.genexpr_arg_prefix + Symtab.punycodify_name(str(name_source)))
+ name_decl = Nodes.CNameDeclaratorNode(pos=pos, name=name)
+ type = node.type
+ if type.is_reference and not type.is_fake_reference:
+ # It isn't obvious whether the right thing to do would be to capture by reference or by
+ # value (C++ itself doesn't know either for lambda functions and forces a choice).
+ # However, capture by reference involves converting to FakeReference which would require
+ # re-analysing AttributeNodes. Therefore I've picked capture-by-value out of convenience
+ # TODO - could probably be optimized by making the arg a reference but the closure not
+ # (see https://github.com/cython/cython/issues/2468)
+ type = type.ref_base_type
+
+ name_decl.type = type
+ new_arg = Nodes.CArgDeclNode(pos=pos, declarator=name_decl,
+ base_type=None, default=None, annotation=None)
+ new_arg.name = name_decl.name
+ new_arg.type = type
+
+ self.args.append(new_arg)
+ node.generator_arg_tag = None # avoid the possibility of this being caught again
+ self.call_parameters.append(node)
+ new_arg.entry = def_node.declare_argument(def_node.local_scope, new_arg)
+ new_arg.entry.cname = cname
+ new_arg.entry.in_closure = True
+
+ if do_visit_children:
+ # now visit the Nodes's children (but remove self.gen_node to not to further
+ # argument substitution)
+ gen_node, self.gen_node = self.gen_node, None
+ self.visitchildren(node)
+ self.gen_node = gen_node
+
+ # replace the node inside the generator with a looked-up name
+ # (initialized_check can safely be False because the source variable will be checked
+ # before it is captured if the check is required)
+ name_node = ExprNodes.NameNode(pos, name=name, initialized_check=False)
+ name_node.entry = self.gen_node.def_node.gbody.local_scope.lookup(name_node.name)
+ name_node.type = name_node.entry.type
+ self.substitutions[node] = name_node
+ return name_node
+ if do_visit_children:
+ self.visitchildren(node)
+ return node
+
+ def visit_ExprNode(self, node):
+ return self._handle_ExprNode(node, True)
+
+ visit_Node = VisitorTransform.recurse_to_children
+
+
class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations):
"""
Transforms method decorators in cdef classes into nested calls or properties.
@@ -2057,22 +2230,10 @@ if VALUE is not None:
if not e.type.is_pyobject:
e.type.create_to_py_utility_code(env)
e.type.create_from_py_utility_code(env)
- all_members_names = sorted([e.name for e in all_members])
-
- # Cython 0.x used MD5 for the checksum, which a few Python installations remove for security reasons.
- # SHA-256 should be ok for years to come, but early Cython 3.0 alpha releases used SHA-1,
- # which may not be.
- checksum_algos = [hashlib.sha256, hashlib.sha1]
- try:
- checksum_algos.append(hashlib.md5)
- except AttributeError:
- pass
- member_names_string = ' '.join(all_members_names).encode('utf-8')
- checksums = [
- '0x' + mkchecksum(member_names_string).hexdigest()[:7]
- for mkchecksum in checksum_algos
- ]
+ all_members_names = [e.name for e in all_members]
+ checksums = _calculate_pickle_checksums(all_members_names)
+
unpickle_func_name = '__pyx_unpickle_%s' % node.punycode_class_name
# TODO(robertwb): Move the state into the third argument
@@ -2315,11 +2476,17 @@ if VALUE is not None:
assmt.analyse_declarations(env)
return assmt
+ def visit_func_outer_attrs(self, node):
+ # any names in the outer attrs should not be looked up in the function "seen_vars_stack"
+ stack = self.seen_vars_stack.pop()
+ super(AnalyseDeclarationsTransform, self).visit_func_outer_attrs(node)
+ self.seen_vars_stack.append(stack)
+
def visit_ScopedExprNode(self, node):
env = self.current_env()
node.analyse_declarations(env)
# the node may or may not have a local scope
- if node.has_local_scope:
+ if node.expr_scope:
self.seen_vars_stack.append(set(self.seen_vars_stack[-1]))
self.enter_scope(node, node.expr_scope)
node.analyse_scoped_declarations(node.expr_scope)
@@ -2327,6 +2494,7 @@ if VALUE is not None:
self.exit_scope()
self.seen_vars_stack.pop()
else:
+
node.analyse_scoped_declarations(env)
self.visitchildren(node)
return node
@@ -2483,6 +2651,24 @@ if VALUE is not None:
return node
+def _calculate_pickle_checksums(member_names):
+ # Cython 0.x used MD5 for the checksum, which a few Python installations remove for security reasons.
+ # SHA-256 should be ok for years to come, but early Cython 3.0 alpha releases used SHA-1,
+ # which may not be.
+ member_names_string = ' '.join(member_names).encode('utf-8')
+ hash_kwargs = {'usedforsecurity': False} if sys.version_info >= (3, 9) else {}
+ checksums = []
+ for algo_name in ['sha256', 'sha1', 'md5']:
+ try:
+ mkchecksum = getattr(hashlib, algo_name)
+ checksum = mkchecksum(member_names_string, **hash_kwargs).hexdigest()
+ except (AttributeError, ValueError):
+ # The algorithm (i.e. MD5) might not be there at all, or might be blocked at runtime.
+ continue
+ checksums.append('0x' + checksum[:7])
+ return checksums
+
+
class CalculateQualifiedNamesTransform(EnvTransform):
"""
Calculate and store the '__qualname__' and the global
@@ -2874,8 +3060,7 @@ class RemoveUnreachableCode(CythonTransform):
if not self.current_directives['remove_unreachable']:
return node
self.visitchildren(node)
- for idx, stat in enumerate(node.stats):
- idx += 1
+ for idx, stat in enumerate(node.stats, 1):
if stat.is_terminator:
if idx < len(node.stats):
if self.current_directives['warn.unreachable']:
@@ -2974,6 +3159,8 @@ class YieldNodeCollector(TreeVisitor):
class MarkClosureVisitor(CythonTransform):
+ # In addition to marking closures this is also responsible to finding parts of the
+ # generator iterable and marking them
def visit_ModuleNode(self, node):
self.needs_closure = False
@@ -3044,6 +3231,19 @@ class MarkClosureVisitor(CythonTransform):
self.needs_closure = True
return node
+ def visit_GeneratorExpressionNode(self, node):
+ node = self.visit_LambdaNode(node)
+ if not isinstance(node.loop, Nodes._ForInStatNode):
+ # Possibly should handle ForFromStatNode
+ # but for now do nothing
+ return node
+ itseq = node.loop.iterator.sequence
+ # literals do not need replacing with an argument
+ if itseq.is_literal:
+ return node
+ _GeneratorExpressionArgumentsMarker(node).visit(itseq)
+ return node
+
class CreateClosureClasses(CythonTransform):
# Output closure classes in module scope for all functions
@@ -3188,6 +3388,10 @@ class CreateClosureClasses(CythonTransform):
self.visitchildren(node)
return node
+ def visit_GeneratorExpressionNode(self, node):
+ node = _HandleGeneratorArguments()(node)
+ return self.visit_LambdaNode(node)
+
class InjectGilHandling(VisitorTransform, SkipDeclarations):
"""
diff --git a/Cython/Compiler/Parsing.pxd b/Cython/Compiler/Parsing.pxd
index 7f4a1c220..997cdf513 100644
--- a/Cython/Compiler/Parsing.pxd
+++ b/Cython/Compiler/Parsing.pxd
@@ -21,17 +21,17 @@ cdef p_ident_list(PyrexScanner s)
cdef tuple p_binop_operator(PyrexScanner s)
cdef p_binop_expr(PyrexScanner s, ops, p_sub_expr_func p_sub_expr)
-cdef p_lambdef(PyrexScanner s, bint allow_conditional=*)
-cdef p_lambdef_nocond(PyrexScanner s)
-cdef p_test(PyrexScanner s, bint allow_assignment_expression=*)
-cdef p_test_nocond(PyrexScanner s, bint allow_assignment_expression=*)
-cdef p_walrus_test(PyrexScanner s, bint allow_assignment_expression=*)
+cdef p_lambdef(PyrexScanner s)
+cdef p_test(PyrexScanner s)
+cdef p_test_allow_walrus_after(PyrexScanner s)
+cdef p_namedexpr_test(PyrexScanner s)
cdef p_or_test(PyrexScanner s)
cdef p_rassoc_binop_expr(PyrexScanner s, unicode op, p_sub_expr_func p_subexpr)
cdef p_and_test(PyrexScanner s)
cdef p_not_test(PyrexScanner s)
cdef p_comparison(PyrexScanner s)
-cdef p_test_or_starred_expr(PyrexScanner s, bint is_expression=*)
+cdef p_test_or_starred_expr(PyrexScanner s)
+cdef p_namedexpr_test_or_starred_expr(PyrexScanner s)
cdef p_starred_expr(PyrexScanner s)
cdef p_cascaded_cmp(PyrexScanner s)
cdef p_cmp_op(PyrexScanner s)
@@ -62,6 +62,8 @@ cdef expect_ellipsis(PyrexScanner s)
cdef make_slice_nodes(pos, subscripts)
cpdef make_slice_node(pos, start, stop = *, step = *)
cdef p_atom(PyrexScanner s)
+cdef p_atom_string(PyrexScanner s)
+cdef p_atom_ident_constants(PyrexScanner s)
@cython.locals(value=unicode)
cdef p_int_literal(PyrexScanner s)
cdef p_name(PyrexScanner s, name)
@@ -85,9 +87,10 @@ cdef p_dict_or_set_maker(PyrexScanner s)
cdef p_backquote_expr(PyrexScanner s)
cdef p_simple_expr_list(PyrexScanner s, expr=*)
cdef p_test_or_starred_expr_list(PyrexScanner s, expr=*)
+cdef p_namedexpr_test_or_starred_expr_list(s, expr=*)
cdef p_testlist(PyrexScanner s)
cdef p_testlist_star_expr(PyrexScanner s)
-cdef p_testlist_comp(PyrexScanner s, bint is_expression=*)
+cdef p_testlist_comp(PyrexScanner s)
cdef p_genexp(PyrexScanner s, expr)
#-------------------------------------------------------
@@ -109,7 +112,7 @@ cdef p_return_statement(PyrexScanner s)
cdef p_raise_statement(PyrexScanner s)
cdef p_import_statement(PyrexScanner s)
cdef p_from_import_statement(PyrexScanner s, bint first_statement = *)
-cdef p_imported_name(PyrexScanner s, bint is_cimport)
+cdef p_imported_name(PyrexScanner s)
cdef p_dotted_name(PyrexScanner s, bint as_allowed)
cdef p_as_name(PyrexScanner s)
cdef p_assert_statement(PyrexScanner s)
@@ -129,6 +132,8 @@ cdef p_except_clause(PyrexScanner s)
cdef p_include_statement(PyrexScanner s, ctx)
cdef p_with_statement(PyrexScanner s)
cdef p_with_items(PyrexScanner s, bint is_async=*)
+cdef p_with_items_list(PyrexScanner s, bint is_async)
+cdef tuple p_with_item(PyrexScanner s, bint is_async)
cdef p_with_template(PyrexScanner s)
cdef p_simple_statement(PyrexScanner s, bint first_statement = *)
cdef p_simple_statement_list(PyrexScanner s, ctx, bint first_statement = *)
@@ -154,7 +159,6 @@ cdef bint looking_at_name(PyrexScanner s) except -2
cdef object looking_at_expr(PyrexScanner s)# except -2
cdef bint looking_at_base_type(PyrexScanner s) except -2
cdef bint looking_at_dotted_name(PyrexScanner s) except -2
-cdef bint looking_at_call(PyrexScanner s) except -2
cdef p_sign_and_longness(PyrexScanner s)
cdef p_opt_cname(PyrexScanner s)
cpdef p_c_declarator(PyrexScanner s, ctx = *, bint empty = *, bint is_type = *, bint cmethod_flag = *,
@@ -166,7 +170,7 @@ cdef p_c_simple_declarator(PyrexScanner s, ctx, bint empty, bint is_type, bint c
bint assignable, bint nonempty)
cdef p_nogil(PyrexScanner s)
cdef p_with_gil(PyrexScanner s)
-cdef p_exception_value_clause(PyrexScanner s)
+cdef p_exception_value_clause(PyrexScanner s, ctx)
cpdef p_c_arg_list(PyrexScanner s, ctx = *, bint in_pyfunc = *, bint cmethod_flag = *,
bint nonempty_declarators = *, bint kw_only = *, bint annotated = *)
cdef p_optional_ellipsis(PyrexScanner s)
diff --git a/Cython/Compiler/Parsing.py b/Cython/Compiler/Parsing.py
index 0b80eb4b0..94fc2eca1 100644
--- a/Cython/Compiler/Parsing.py
+++ b/Cython/Compiler/Parsing.py
@@ -4,7 +4,6 @@
#
from __future__ import absolute_import
-from ast import Expression
# This should be done automatically
import cython
@@ -20,7 +19,7 @@ cython.declare(Nodes=object, ExprNodes=object, EncodedString=object,
from io import StringIO
import re
import sys
-from unicodedata import lookup as lookup_unicodechar, category as unicode_category, name
+from unicodedata import lookup as lookup_unicodechar, category as unicode_category
from functools import partial, reduce
from .Scanning import PyrexScanner, FileSourceDescriptor, tentatively_scan
@@ -110,7 +109,7 @@ def p_binop_expr(s, ops, p_sub_expr):
#lambdef: 'lambda' [varargslist] ':' test
-def p_lambdef(s, allow_conditional=True):
+def p_lambdef(s):
# s.sy == 'lambda'
pos = s.position()
s.next()
@@ -121,61 +120,59 @@ def p_lambdef(s, allow_conditional=True):
args, star_arg, starstar_arg = p_varargslist(
s, terminator=':', annotated=False)
s.expect(':')
- if allow_conditional:
- expr = p_test(s, allow_assignment_expression=False)
- else:
- expr = p_test_nocond(s, allow_assignment_expression=False)
+ expr = p_test(s)
return ExprNodes.LambdaNode(
pos, args = args,
star_arg = star_arg, starstar_arg = starstar_arg,
result_expr = expr)
-#lambdef_nocond: 'lambda' [varargslist] ':' test_nocond
-
-def p_lambdef_nocond(s):
- return p_lambdef(s, allow_conditional=False)
-
#test: or_test ['if' or_test 'else' test] | lambdef
-def p_test(s, allow_assignment_expression=True):
+def p_test(s):
+ # The check for a following ':=' is only for error reporting purposes.
+ # It simply changes a
+ # expected ')', found ':='
+ # message into something a bit more descriptive.
+ # It is close to what the PEG parser does in CPython, where an expression has
+ # a lookahead assertion that it isn't followed by ':='
+ expr = p_test_allow_walrus_after(s)
+ if s.sy == ':=':
+ s.error("invalid syntax: assignment expression not allowed in this context")
+ return expr
+
+def p_test_allow_walrus_after(s):
if s.sy == 'lambda':
return p_lambdef(s)
pos = s.position()
- expr = p_walrus_test(s, allow_assignment_expression)
+ expr = p_or_test(s)
if s.sy == 'if':
s.next()
- # Assignment expressions are always allowed here
- # even if they wouldn't be allowed in the expression as a whole.
- test = p_walrus_test(s)
+ test = p_or_test(s)
s.expect('else')
other = p_test(s)
return ExprNodes.CondExprNode(pos, test=test, true_val=expr, false_val=other)
else:
return expr
-#test_nocond: or_test | lambdef_nocond
-
-def p_test_nocond(s, allow_assignment_expression=True):
- if s.sy == 'lambda':
- return p_lambdef_nocond(s)
- else:
- return p_walrus_test(s, allow_assignment_expression)
-
-# walrurus_test: IDENT := test | or_test
-
-def p_walrus_test(s, allow_assignment_expression=True):
- lhs = p_or_test(s)
+def p_namedexpr_test(s):
+ # defined in the LL parser as
+ # namedexpr_test: test [':=' test]
+ # The requirement that the LHS is a name is not enforced in the grammar.
+ # For comparison the PEG parser does:
+ # 1. look for "name :=", if found it's definitely a named expression
+ # so look for expression
+ # 2. Otherwise, look for expression
+ lhs = p_test_allow_walrus_after(s)
if s.sy == ':=':
position = s.position()
- if not allow_assignment_expression:
- s.error("invalid syntax: assignment expression not allowed in this context")
- elif not lhs.is_name:
- s.error("Left-hand side of assignment expression must be an identifier")
+ if not lhs.is_name:
+ s.error("Left-hand side of assignment expression must be an identifier", fatal=False)
s.next()
rhs = p_test(s)
return ExprNodes.AssignmentExpressionNode(position, lhs=lhs, rhs=rhs)
return lhs
+
#or_test: and_test ('or' and_test)*
COMMON_BINOP_MISTAKES = {'||': 'or', '&&': 'and'}
@@ -229,11 +226,17 @@ def p_comparison(s):
n1.cascade = p_cascaded_cmp(s)
return n1
-def p_test_or_starred_expr(s, is_expression=False):
+def p_test_or_starred_expr(s):
+ if s.sy == '*':
+ return p_starred_expr(s)
+ else:
+ return p_test(s)
+
+def p_namedexpr_test_or_starred_expr(s):
if s.sy == '*':
return p_starred_expr(s)
else:
- return p_test(s, allow_assignment_expression=is_expression)
+ return p_namedexpr_test(s)
def p_starred_expr(s):
pos = s.position()
@@ -507,7 +510,7 @@ def p_call_parse_args(s, allow_genexp=True):
keyword_args.append(p_test(s))
starstar_seen = True
else:
- arg = p_test(s)
+ arg = p_namedexpr_test(s)
if s.sy == '=':
s.next()
if not arg.is_name:
@@ -516,7 +519,7 @@ def p_call_parse_args(s, allow_genexp=True):
encoded_name = s.context.intern_ustring(arg.name)
keyword = ExprNodes.IdentifierStringNode(
arg.pos, value=encoded_name)
- arg = p_test(s, allow_assignment_expression=False)
+ arg = p_test(s)
keyword_args.append((keyword, arg))
else:
if keyword_args:
@@ -655,9 +658,7 @@ def p_slice_element(s, follow_set):
return None
def expect_ellipsis(s):
- s.expect('.')
- s.expect('.')
- s.expect('.')
+ s.expect('...')
def make_slice_nodes(pos, subscripts):
# Convert a list of subscripts as returned
@@ -694,7 +695,7 @@ def p_atom(s):
elif s.sy == 'yield':
result = p_yield_expression(s)
else:
- result = p_testlist_comp(s, is_expression=True)
+ result = p_testlist_comp(s)
s.expect(')')
return result
elif sy == '[':
@@ -703,7 +704,7 @@ def p_atom(s):
return p_dict_or_set_maker(s)
elif sy == '`':
return p_backquote_expr(s)
- elif sy == '.':
+ elif sy == '...':
expect_ellipsis(s)
return ExprNodes.EllipsisNode(pos)
elif sy == 'INT':
@@ -717,36 +718,55 @@ def p_atom(s):
s.next()
return ExprNodes.ImagNode(pos, value = value)
elif sy == 'BEGIN_STRING':
- kind, bytes_value, unicode_value = p_cat_string_literal(s)
- if kind == 'c':
- return ExprNodes.CharNode(pos, value = bytes_value)
- elif kind == 'u':
- return ExprNodes.UnicodeNode(pos, value = unicode_value, bytes_value = bytes_value)
- elif kind == 'b':
- return ExprNodes.BytesNode(pos, value = bytes_value)
- elif kind == 'f':
- return ExprNodes.JoinedStrNode(pos, values = unicode_value)
- elif kind == '':
- return ExprNodes.StringNode(pos, value = bytes_value, unicode_value = unicode_value)
- else:
- s.error("invalid string kind '%s'" % kind)
+ return p_atom_string(s)
elif sy == 'IDENT':
- name = s.systring
- if name == "None":
- result = ExprNodes.NoneNode(pos)
- elif name == "True":
- result = ExprNodes.BoolNode(pos, value=True)
- elif name == "False":
- result = ExprNodes.BoolNode(pos, value=False)
- elif name == "NULL" and not s.in_python_file:
- result = ExprNodes.NullNode(pos)
- else:
- result = p_name(s, name)
- s.next()
+ result = p_atom_ident_constants(s)
+ if result is None:
+ result = p_name(s, s.systring)
+ s.next()
return result
else:
s.error("Expected an identifier or literal")
+
+def p_atom_string(s):
+ pos = s.position()
+ kind, bytes_value, unicode_value = p_cat_string_literal(s)
+ if kind == 'c':
+ return ExprNodes.CharNode(pos, value=bytes_value)
+ elif kind == 'u':
+ return ExprNodes.UnicodeNode(pos, value=unicode_value, bytes_value=bytes_value)
+ elif kind == 'b':
+ return ExprNodes.BytesNode(pos, value=bytes_value)
+ elif kind == 'f':
+ return ExprNodes.JoinedStrNode(pos, values=unicode_value)
+ elif kind == '':
+ return ExprNodes.StringNode(pos, value=bytes_value, unicode_value=unicode_value)
+ else:
+ s.error("invalid string kind '%s'" % kind)
+
+
+def p_atom_ident_constants(s):
+ """
+ Returns None if it isn't one special-cased named constants.
+ Only calls s.next() if it successfully matches a matches.
+ """
+ pos = s.position()
+ name = s.systring
+ result = None
+ if name == "None":
+ result = ExprNodes.NoneNode(pos)
+ elif name == "True":
+ result = ExprNodes.BoolNode(pos, value=True)
+ elif name == "False":
+ result = ExprNodes.BoolNode(pos, value=False)
+ elif name == "NULL" and not s.in_python_file:
+ result = ExprNodes.NullNode(pos)
+ if result:
+ s.next()
+ return result
+
+
def p_int_literal(s):
pos = s.position()
value = s.systring
@@ -1265,7 +1285,7 @@ def p_f_string_expr(s, unicode_value, pos, starting_index, is_raw):
# since PEP 448:
# list_display ::= "[" [listmaker] "]"
-# listmaker ::= (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
+# listmaker ::= (named_test|star_expr) ( comp_for | (',' (named_test|star_expr))* [','] )
# comp_iter ::= comp_for | comp_if
# comp_for ::= ["async"] "for" expression_list "in" testlist [comp_iter]
# comp_if ::= "if" test [comp_iter]
@@ -1278,7 +1298,7 @@ def p_list_maker(s):
s.expect(']')
return ExprNodes.ListNode(pos, args=[])
- expr = p_test_or_starred_expr(s, is_expression=True)
+ expr = p_namedexpr_test_or_starred_expr(s)
if s.sy in ('for', 'async'):
if expr.is_starred:
s.error("iterable unpacking cannot be used in comprehension")
@@ -1293,7 +1313,7 @@ def p_list_maker(s):
# (merged) list literal
if s.sy == ',':
s.next()
- exprs = p_test_or_starred_expr_list(s, expr)
+ exprs = p_namedexpr_test_or_starred_expr_list(s, expr)
else:
exprs = [expr]
s.expect(']')
@@ -1327,7 +1347,12 @@ def p_comp_if(s, body):
# s.sy == 'if'
pos = s.position()
s.next()
- test = p_test_nocond(s)
+ # Note that Python 3.9+ is actually more restrictive here and Cython now follows
+ # the Python 3.9+ behaviour: https://github.com/python/cpython/issues/86014
+ # On Python <3.9 `[i for i in range(10) if lambda: i if True else 1]` was disallowed
+ # but `[i for i in range(10) if lambda: i]` was allowed.
+ # On Python >=3.9 they're both disallowed.
+ test = p_or_test(s)
return Nodes.IfStatNode(pos,
if_clauses = [Nodes.IfClauseNode(pos, condition = test,
body = p_comp_iter(s, body))],
@@ -1478,7 +1503,16 @@ def p_simple_expr_list(s, expr=None):
def p_test_or_starred_expr_list(s, expr=None):
exprs = expr is not None and [expr] or []
while s.sy not in expr_terminators:
- exprs.append(p_test_or_starred_expr(s, is_expression=(expr is not None)))
+ exprs.append(p_test_or_starred_expr(s))
+ if s.sy != ',':
+ break
+ s.next()
+ return exprs
+
+def p_namedexpr_test_or_starred_expr_list(s, expr=None):
+ exprs = expr is not None and [expr] or []
+ while s.sy not in expr_terminators:
+ exprs.append(p_namedexpr_test_or_starred_expr(s))
if s.sy != ',':
break
s.next()
@@ -1511,12 +1545,12 @@ def p_testlist_star_expr(s):
# testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
-def p_testlist_comp(s, is_expression=False):
+def p_testlist_comp(s):
pos = s.position()
- expr = p_test_or_starred_expr(s, is_expression)
+ expr = p_namedexpr_test_or_starred_expr(s)
if s.sy == ',':
s.next()
- exprs = p_test_or_starred_expr_list(s, expr)
+ exprs = p_namedexpr_test_or_starred_expr_list(s, expr)
return ExprNodes.TupleNode(pos, args = exprs)
elif s.sy in ('for', 'async'):
return p_genexp(s, expr)
@@ -1762,11 +1796,11 @@ def p_from_import_statement(s, first_statement = 0):
# s.sy == 'from'
pos = s.position()
s.next()
- if s.sy == '.':
+ if s.sy in ('.', '...'):
# count relative import level
level = 0
- while s.sy == '.':
- level += 1
+ while s.sy in ('.', '...'):
+ level += len(s.sy)
s.next()
else:
level = None
@@ -1785,18 +1819,18 @@ def p_from_import_statement(s, first_statement = 0):
is_cimport = kind == 'cimport'
is_parenthesized = False
if s.sy == '*':
- imported_names = [(s.position(), s.context.intern_ustring("*"), None, None)]
+ imported_names = [(s.position(), s.context.intern_ustring("*"), None)]
s.next()
else:
if s.sy == '(':
is_parenthesized = True
s.next()
- imported_names = [p_imported_name(s, is_cimport)]
+ imported_names = [p_imported_name(s)]
while s.sy == ',':
s.next()
if is_parenthesized and s.sy == ')':
break
- imported_names.append(p_imported_name(s, is_cimport))
+ imported_names.append(p_imported_name(s))
if is_parenthesized:
s.expect(')')
if dotted_name == '__future__':
@@ -1805,7 +1839,7 @@ def p_from_import_statement(s, first_statement = 0):
elif level:
s.error("invalid syntax")
else:
- for (name_pos, name, as_name, kind) in imported_names:
+ for (name_pos, name, as_name) in imported_names:
if name == "braces":
s.error("not a chance", name_pos)
break
@@ -1816,7 +1850,7 @@ def p_from_import_statement(s, first_statement = 0):
break
s.context.future_directives.add(directive)
return Nodes.PassStatNode(pos)
- elif kind == 'cimport':
+ elif is_cimport:
return Nodes.FromCImportStatNode(
pos, module_name=dotted_name,
relative_level=level,
@@ -1824,7 +1858,7 @@ def p_from_import_statement(s, first_statement = 0):
else:
imported_name_strings = []
items = []
- for (name_pos, name, as_name, kind) in imported_names:
+ for (name_pos, name, as_name) in imported_names:
imported_name_strings.append(
ExprNodes.IdentifierStringNode(name_pos, value=name))
items.append(
@@ -1839,18 +1873,11 @@ def p_from_import_statement(s, first_statement = 0):
items = items)
-imported_name_kinds = cython.declare(frozenset, frozenset((
- 'class', 'struct', 'union')))
-
-def p_imported_name(s, is_cimport):
+def p_imported_name(s):
pos = s.position()
- kind = None
- if is_cimport and s.systring in imported_name_kinds:
- kind = s.systring
- s.next()
name = p_ident(s)
as_name = p_as_name(s)
- return (pos, name, as_name, kind)
+ return (pos, name, as_name)
def p_dotted_name(s, as_allowed):
@@ -1904,7 +1931,7 @@ def p_if_statement(s):
def p_if_clause(s):
pos = s.position()
- test = p_test(s)
+ test = p_namedexpr_test(s)
body = p_suite(s)
return Nodes.IfClauseNode(pos,
condition = test, body = body)
@@ -1920,7 +1947,7 @@ def p_while_statement(s):
# s.sy == 'while'
pos = s.position()
s.next()
- test = p_test(s)
+ test = p_namedexpr_test(s)
body = p_suite(s)
else_clause = p_else_clause(s)
return Nodes.WhileStatNode(pos,
@@ -2118,6 +2145,51 @@ def p_with_statement(s):
def p_with_items(s, is_async=False):
+ """
+ Copied from CPython:
+ | 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block {
+ _PyAST_With(a, b, NULL, EXTRA) }
+ | 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
+ _PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
+ Therefore the first thing to try is the bracket-enclosed
+ version and if that fails try the regular version
+ """
+ brackets_succeeded = False
+ items = () # unused, but static analysis fails to track that below
+ if s.sy == '(':
+ with tentatively_scan(s) as errors:
+ s.next()
+ items = p_with_items_list(s, is_async)
+ s.expect(")")
+ brackets_succeeded = not errors
+ if not brackets_succeeded:
+ # try the non-bracket version
+ items = p_with_items_list(s, is_async)
+ body = p_suite(s)
+ for cls, pos, kwds in reversed(items):
+ # construct the actual nodes now that we know what the body is
+ body = cls(pos, body=body, **kwds)
+ return body
+
+
+def p_with_items_list(s, is_async):
+ items = []
+ while True:
+ items.append(p_with_item(s, is_async))
+ if s.sy != ",":
+ break
+ s.next()
+ if s.sy == ")":
+ # trailing commas allowed
+ break
+ return items
+
+
+def p_with_item(s, is_async):
+ # In contrast to most parsing functions, this returns a tuple of
+ # class, pos, kwd_dict
+ # This is because GILStatNode does a reasonable amount of initialization in its
+ # constructor, and requires "body" to be set, which we don't currently have
pos = s.position()
if not s.in_python_file and s.sy == 'IDENT' and s.systring in ('nogil', 'gil'):
if is_async:
@@ -2132,24 +2204,14 @@ def p_with_items(s, is_async=False):
condition = p_test(s)
s.expect(')')
- if s.sy == ',':
- s.next()
- body = p_with_items(s)
- else:
- body = p_suite(s)
- return Nodes.GILStatNode(pos, state=state, body=body, condition=condition)
+ return Nodes.GILStatNode, pos, {"state": state, "condition": condition}
else:
manager = p_test(s)
target = None
if s.sy == 'IDENT' and s.systring == 'as':
s.next()
target = p_starred_expr(s)
- if s.sy == ',':
- s.next()
- body = p_with_items(s, is_async=is_async)
- else:
- body = p_suite(s)
- return Nodes.WithStatNode(pos, manager=manager, target=target, body=body, is_async=is_async)
+ return Nodes.WithStatNode, pos, {"manager": manager, "target": target, "is_async": is_async}
def p_with_template(s):
@@ -2797,16 +2859,6 @@ def looking_at_dotted_name(s):
else:
return 0
-def looking_at_call(s):
- "See if we're looking at a.b.c("
- # Don't mess up the original position, so save and restore it.
- # Unfortunately there's no good way to handle this, as a subsequent call
- # to next() will not advance the position until it reads a new token.
- position = s.start_line, s.start_col
- result = looking_at_expr(s) == u'('
- if not result:
- s.start_line, s.start_col = position
- return result
basic_c_type_names = cython.declare(frozenset, frozenset((
"void", "char", "int", "float", "double", "bint")))
@@ -2911,7 +2963,17 @@ def p_c_func_declarator(s, pos, ctx, base, cmethod_flag):
ellipsis = p_optional_ellipsis(s)
s.expect(')')
nogil = p_nogil(s)
- exc_val, exc_check = p_exception_value_clause(s)
+ exc_val, exc_check, exc_clause = p_exception_value_clause(s, ctx)
+ if nogil and exc_clause:
+ warning(
+ s.position(),
+ "The keyword 'nogil' should appear at the end of the "
+ "function signature line. Placing it before 'except' "
+ "or 'noexcept' will be disallowed in a future version "
+ "of Cython.",
+ level=2
+ )
+ nogil = nogil or p_nogil(s)
with_gil = p_with_gil(s)
return Nodes.CFuncDeclaratorNode(pos,
base = base, args = args, has_varargs = ellipsis,
@@ -3021,18 +3083,54 @@ def p_with_gil(s):
else:
return 0
-def p_exception_value_clause(s):
+def p_exception_value_clause(s, ctx):
+ """
+ Parse exception value clause.
+
+ Maps clauses to exc_check / exc_value / exc_clause as follows:
+ ______________________________________________________________________
+ | | | | |
+ | Clause | exc_check | exc_value | exc_clause |
+ | ___________________________ | ___________ | ___________ | __________ |
+ | | | | |
+ | <nothing> (default func.) | True | None | False |
+ | <nothing> (cdef extern) | False | None | False |
+ | noexcept | False | None | True |
+ | except <val> | False | <val> | True |
+ | except? <val> | True | <val> | True |
+ | except * | True | None | True |
+ | except + | '+' | None | True |
+ | except +* | '+' | '*' | True |
+ | except +<PyErr> | '+' | <PyErr> | True |
+ | ___________________________ | ___________ | ___________ | __________ |
+
+ Note that the only reason we need `exc_clause` is to raise a
+ warning when `'except'` or `'noexcept'` is placed after the
+ `'nogil'` keyword.
+ """
+ exc_clause = False
exc_val = None
- exc_check = 0
- if s.sy == 'except':
+ if ctx.visibility == 'extern':
+ exc_check = False
+ else:
+ exc_check = True
+
+ if s.sy == 'IDENT' and s.systring == 'noexcept':
+ exc_clause = True
+ s.next()
+ exc_check = False
+ elif s.sy == 'except':
+ exc_clause = True
s.next()
if s.sy == '*':
- exc_check = 1
+ exc_check = True
s.next()
elif s.sy == '+':
exc_check = '+'
s.next()
- if s.sy == 'IDENT':
+ if p_nogil(s):
+ ctx.nogil = True
+ elif s.sy == 'IDENT':
name = s.systring
s.next()
exc_val = p_name(s, name)
@@ -3041,13 +3139,16 @@ def p_exception_value_clause(s):
s.next()
else:
if s.sy == '?':
- exc_check = 1
+ exc_check = True
s.next()
+ else:
+ exc_check = False
+ # exc_val can be non-None even if exc_check is False, c.f. "except -1"
exc_val = p_test(s)
- return exc_val, exc_check
+ return exc_val, exc_check, exc_clause
c_arg_list_terminators = cython.declare(frozenset, frozenset((
- '*', '**', '.', ')', ':', '/')))
+ '*', '**', '...', ')', ':', '/')))
def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0,
nonempty_declarators = 0, kw_only = 0, annotated = 1):
@@ -3066,7 +3167,7 @@ def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0,
return args
def p_optional_ellipsis(s):
- if s.sy == '.':
+ if s.sy == '...':
expect_ellipsis(s)
return 1
else:
@@ -3110,11 +3211,11 @@ def p_c_arg_decl(s, ctx, in_pyfunc, cmethod_flag = 0, nonempty = 0,
default = ExprNodes.NoneNode(pos)
s.next()
elif 'inline' in ctx.modifiers:
- default = p_test(s, allow_assignment_expression=False)
+ default = p_test(s)
else:
error(pos, "default values cannot be specified in pxd files, use ? or *")
else:
- default = p_test(s, allow_assignment_expression=False)
+ default = p_test(s)
return Nodes.CArgDeclNode(pos,
base_type = base_type,
declarator = declarator,
@@ -3483,14 +3584,7 @@ def p_decorators(s):
while s.sy == '@':
pos = s.position()
s.next()
- decstring = p_dotted_name(s, as_allowed=0)[2]
- names = decstring.split('.')
- decorator = ExprNodes.NameNode(pos, name=s.context.intern_ustring(names[0]))
- for name in names[1:]:
- decorator = ExprNodes.AttributeNode(
- pos, attribute=s.context.intern_ustring(name), obj=decorator)
- if s.sy == '(':
- decorator = p_call(s, decorator)
+ decorator = p_namedexpr_test(s)
decorators.append(Nodes.DecoratorNode(pos, decorator=decorator))
s.expect_newline("Expected a newline after decorator")
return decorators
@@ -3807,6 +3901,9 @@ def p_compiler_directive_comments(s):
for name in new_directives:
if name not in result:
pass
+ elif Options.directive_types.get(name) is list:
+ result[name] += new_directives[name]
+ new_directives[name] = result[name]
elif new_directives[name] == result[name]:
warning(pos, "Duplicate directive found: %s" % (name,))
else:
@@ -3946,12 +4043,13 @@ def p_cpp_class_attribute(s, ctx):
node.decorators = decorators
return node
+
def p_match_statement(s, ctx):
assert s.sy == "IDENT" and s.systring == "match"
pos = s.position()
with tentatively_scan(s) as errors:
s.next()
- subject = p_test(s)
+ subject = p_namedexpr_test(s)
subjects = None
if s.sy == ",":
subjects = [subject]
@@ -3965,6 +4063,7 @@ def p_match_statement(s, ctx):
s.expect(":")
if errors:
return None
+
# at this stage were commited to it being a match block so continue
# outside "with tentatively_scan"
# (I think this deviates from the PEG parser slightly, and it'd
@@ -3975,10 +4074,11 @@ def p_match_statement(s, ctx):
while s.sy != "DEDENT":
cases.append(p_case_block(s, ctx))
s.expect_dedent()
- return MatchCaseNodes.MatchNode(pos, subject = subject, cases = cases)
+ return MatchCaseNodes.MatchNode(pos, subject=subject, cases=cases)
+
def p_case_block(s, ctx):
- if not (s.sy=="IDENT" and s.systring == "case"):
+ if not (s.sy == "IDENT" and s.systring == "case"):
s.error("Expected 'case'")
s.next()
pos = s.position()
@@ -3991,8 +4091,10 @@ def p_case_block(s, ctx):
return MatchCaseNodes.MatchCaseNode(pos, pattern=pattern, body=body, guard=guard)
+
def p_patterns(s):
- # note - in slight contrast to the name, returns a single pattern
+ # note - in slight contrast to the name (which comes from the Python grammar),
+ # returns a single pattern
patterns = []
seq = False
pos = s.position()
@@ -4004,9 +4106,9 @@ def p_patterns(s):
break # all is good provided we have at least 1 pattern
else:
e = errors[0]
- s.error(e.args[1], pos = e.args[0])
+ s.error(e.args[1], pos=e.args[0])
patterns.append(pattern)
-
+
if s.sy == ",":
seq = True
s.next()
@@ -4014,11 +4116,13 @@ def p_patterns(s):
break # common reasons to break
else:
break
+
if seq:
- return MatchCaseNodes.MatchSequencePatternNode(pos, patterns = patterns)
+ return MatchCaseNodes.MatchSequencePatternNode(pos, patterns=patterns)
else:
return patterns[0]
+
def p_maybe_star_pattern(s):
# For match case. Either star_pattern or pattern
if s.sy == "*":
@@ -4030,12 +4134,13 @@ def p_maybe_star_pattern(s):
else:
s.next()
pattern = MatchCaseNodes.MatchAndAssignPatternNode(
- s.position(), target = target, is_star = True
+ s.position(), target=target, is_star=True
)
return pattern
else:
- p = p_pattern(s)
- return p
+ pattern = p_pattern(s)
+ return pattern
+
def p_pattern(s):
# try "as_pattern" then "or_pattern"
@@ -4048,13 +4153,15 @@ def p_pattern(s):
s.next()
else:
break
+
if len(patterns) > 1:
pattern = MatchCaseNodes.OrPatternNode(
pos,
- alternatives = patterns
+ alternatives=patterns
)
else:
pattern = patterns[0]
+
if s.sy == 'IDENT' and s.systring == 'as':
s.next()
with tentatively_scan(s) as errors:
@@ -4062,17 +4169,18 @@ def p_pattern(s):
if errors and s.sy == "_":
s.next()
# make this a specific error
- return Nodes.ErrorNode(errors[0].args[0], what = errors[0].args[1])
+ return Nodes.ErrorNode(errors[0].args[0], what=errors[0].args[1])
elif errors:
with tentatively_scan(s):
expr = p_test(s)
- return Nodes.ErrorNode(expr.pos, what = "Invalid pattern target")
+ return Nodes.ErrorNode(expr.pos, what="Invalid pattern target")
s.error(errors[0])
return pattern
def p_closed_pattern(s):
"""
+ The PEG parser specifies it as
| literal_pattern
| capture_pattern
| wildcard_pattern
@@ -4081,42 +4189,49 @@ def p_closed_pattern(s):
| sequence_pattern
| mapping_pattern
| class_pattern
+
+ For the sake avoiding too much backtracking, we know:
+ * starts with "{" is a sequence_pattern
+ * starts with "[" is a mapping_pattern
+ * starts with "(" is a group_pattern or sequence_pattern
+ * wildcard pattern is just identifier=='_'
+ The rest are then tried in order with backtracking
"""
+ if s.sy == 'IDENT' and s.systring == '_':
+ pos = s.position()
+ s.next()
+ return MatchCaseNodes.MatchAndAssignPatternNode(pos)
+ elif s.sy == '{':
+ return p_mapping_pattern(s)
+ elif s.sy == '[':
+ return p_sequence_pattern(s)
+ elif s.sy == '(':
+ with tentatively_scan(s) as errors:
+ result = p_group_pattern(s)
+ if not errors:
+ return result
+ return p_sequence_pattern(s)
+
with tentatively_scan(s) as errors:
result = p_literal_pattern(s)
- if not errors:
- return result
+ if not errors:
+ return result
with tentatively_scan(s) as errors:
result = p_capture_pattern(s)
- if not errors:
- return result
- with tentatively_scan(s) as errors:
- result = p_wildcard_pattern(s)
- if not errors:
- return result
+ if not errors:
+ return result
with tentatively_scan(s) as errors:
result = p_value_pattern(s)
- if not errors:
- return result
- with tentatively_scan(s) as errors:
- result = p_group_pattern(s)
- if not errors:
- return result
- with tentatively_scan(s) as errors:
- result = p_sequence_pattern(s)
- if not errors:
- return result
- with tentatively_scan(s) as errors:
- result = p_mapping_pattern(s)
- if not errors:
- return result
+ if not errors:
+ return result
return p_class_pattern(s)
+
def p_literal_pattern(s):
# a lot of duplication in this function with "p_atom"
next_must_be_a_number = False
sign = ''
- if s.sy in ['+', '-']:
+ if s.sy == '-':
sign = s.sy
sign_pos = s.position()
s.next()
@@ -4131,9 +4246,11 @@ def p_literal_pattern(s):
elif sy == 'FLOAT':
value = s.systring
s.next()
- res = ExprNodes.FloatNode(pos, value = value)
+ res = ExprNodes.FloatNode(pos, value=value)
+
if res and sign == "-":
res = ExprNodes.UnaryMinusNode(sign_pos, operand=res)
+
if res and s.sy in ['+', '-']:
sign = s.sy
s.next()
@@ -4146,61 +4263,43 @@ def p_literal_pattern(s):
res = ExprNodes.binop_node(
add_pos,
sign,
- operand1 = res,
- operand2 = ExprNodes.ImagNode(s.position(), value = value)
+ operand1=res,
+ operand2=ExprNodes.ImagNode(s.position(), value=value)
)
if not res and sy == 'IMAG':
value = s.systring[:-1]
s.next()
- res = ExprNodes.ImagNode(pos, value = sign+value)
+ res = ExprNodes.ImagNode(pos, value=sign+value)
if sign == "-":
res = ExprNodes.UnaryMinusNode(sign_pos, operand=res)
if res:
- return MatchCaseNodes.MatchValuePatternNode(pos, value = res)
+ return MatchCaseNodes.MatchValuePatternNode(pos, value=res)
+ if next_must_be_a_number:
+ s.error("Expected a number")
if sy == 'BEGIN_STRING':
- if next_must_be_a_number:
- s.error("Expected a number")
- kind, bytes_value, unicode_value = p_cat_string_literal(s)
- if kind == 'c':
- res = ExprNodes.CharNode(pos, value = bytes_value)
- elif kind == 'u':
- res = ExprNodes.UnicodeNode(pos, value = unicode_value, bytes_value = bytes_value)
- elif kind == 'b':
- res = ExprNodes.BytesNode(pos, value = bytes_value)
- elif kind == 'f':
- res = Nodes.ErrorNode(pos, what = "f-strings are not accepted for pattern matching")
- elif kind == '':
- res = ExprNodes.StringNode(pos, value = bytes_value, unicode_value = unicode_value)
- else:
- s.error("invalid string kind '%s'" % kind)
- return MatchCaseNodes.MatchValuePatternNode(pos, value = res)
+ res = p_atom_string(s)
+ # f-strings not being accepted is validated in PostParse
+ return MatchCaseNodes.MatchValuePatternNode(pos, value=res)
elif sy == 'IDENT':
- name = s.systring
- result = None
- if name == "None":
- result = ExprNodes.NoneNode(pos)
- elif name == "True":
- result = ExprNodes.BoolNode(pos, value=True, type=Builtin.bool_type)
- elif name == "False":
- result = ExprNodes.BoolNode(pos, value=False, type=Builtin.bool_type)
- elif name == "NULL" and not s.in_python_file:
- # Included Null as an exactly matched constant here
- result = ExprNodes.NullNode(pos)
+ # Note that p_atom_ident_constants includes NULL.
+ # This is a deliberate Cython addition to the pattern matching specification
+ result = p_atom_ident_constants(s)
if result:
- s.next()
- return MatchCaseNodes.MatchValuePatternNode(pos, value = result, is_is_check = True)
+ return MatchCaseNodes.MatchValuePatternNode(pos, value=result, is_is_check=True)
s.error("Failed to match literal")
+
def p_capture_pattern(s):
return MatchCaseNodes.MatchAndAssignPatternNode(
s.position(),
- target = p_pattern_capture_target(s)
+ target=p_pattern_capture_target(s)
)
+
def p_value_pattern(s):
if s.sy != "IDENT":
s.error("Expected identifier")
@@ -4213,10 +4312,11 @@ def p_value_pattern(s):
attr_pos = s.position()
s.next()
attr = p_ident(s)
- res = ExprNodes.AttributeNode(attr_pos, obj = res, attribute=attr)
+ res = ExprNodes.AttributeNode(attr_pos, obj=res, attribute=attr)
if s.sy in ['(', '=']:
s.error("Unexpected symbol '%s'" % s.sy)
- return MatchCaseNodes.MatchValuePatternNode(pos, value = res)
+ return MatchCaseNodes.MatchValuePatternNode(pos, value=res)
+
def p_group_pattern(s):
s.expect("(")
@@ -4224,12 +4324,6 @@ def p_group_pattern(s):
s.expect(")")
return pattern
-def p_wildcard_pattern(s):
- if s.sy != "IDENT" or s.systring != "_":
- s.error("Expected '_'")
- pos = s.position()
- s.next()
- return MatchCaseNodes.MatchAndAssignPatternNode(pos)
def p_sequence_pattern(s):
opener = s.sy
@@ -4249,27 +4343,30 @@ def p_sequence_pattern(s):
if s.sy == closer:
break
else:
- if opener == ')' and len(patterns)==1:
+ if opener == ')' and len(patterns) == 1:
s.error("tuple-like pattern of length 1 must finish with ','")
break
s.expect(closer)
return MatchCaseNodes.MatchSequencePatternNode(pos, patterns=patterns)
else:
- s.error("Expected '[' or '('")
+ s.error("Expected '[' or '('")
+
def p_mapping_pattern(s):
pos = s.position()
s.expect('{')
if s.sy == '}':
+ # trivial empty mapping
s.next()
return MatchCaseNodes.MatchMappingPatternNode(pos)
+
double_star_capture_target = None
items_patterns = []
- double_star_set_twice = None
+ star_star_arg_pos = None
while True:
+ if double_star_capture_target and not star_star_arg_pos:
+ star_star_arg_pos = s.position()
if s.sy == '**':
- if double_star_capture_target:
- double_star_set_twice = s.position()
s.next()
double_star_capture_target = p_pattern_capture_target(s)
else:
@@ -4283,17 +4380,18 @@ def p_mapping_pattern(s):
s.expect(':')
value = p_pattern(s)
items_patterns.append((key, value))
- if s.sy==',':
- s.next()
- else:
- break
- if s.sy=='}':
+ if s.sy != ',':
break
- if s.sy != '}':
- s.error("Expected '}'")
- s.next()
- if double_star_set_twice is not None:
- return Nodes.ErrorNode(double_star_set_twice, what = "Double star capture set twice")
+ s.next()
+ if s.sy == '}':
+ break # Allow trailing comma.
+ s.expect('}')
+
+ if star_star_arg_pos is not None:
+ return Nodes.ErrorNode(
+ star_star_arg_pos,
+ what = "** pattern must be the final part of a mapping pattern."
+ )
return MatchCaseNodes.MatchMappingPatternNode(
pos,
keys = [kv[0] for kv in items_patterns],
@@ -4301,8 +4399,9 @@ def p_mapping_pattern(s):
double_star_capture_target = double_star_capture_target
)
+
def p_class_pattern(s):
- # name_or_attr
+ # start by parsing the class as name_or_attr
pos = s.position()
res = p_name(s, s.systring)
s.next()
@@ -4310,12 +4409,16 @@ def p_class_pattern(s):
attr_pos = s.position()
s.next()
attr = p_ident(s)
- res = ExprNodes.AttributeNode(attr_pos, obj = res, attribute=attr)
+ res = ExprNodes.AttributeNode(attr_pos, obj=res, attribute=attr)
class_ = res
+
s.expect("(")
if s.sy == ")":
+ # trivial case with no arguments matched
s.next()
return MatchCaseNodes.ClassPatternNode(pos, class_=class_)
+
+ # parse the arguments
positional_patterns = []
keyword_patterns = []
keyword_patterns_error = None
@@ -4328,17 +4431,17 @@ def p_class_pattern(s):
else:
with tentatively_scan(s) as errors:
keyword_patterns.append(p_keyword_pattern(s))
- if s.sy == ",":
- s.next()
- if s.sy == ")":
- break
- else:
+ if s.sy != ",":
break
+ s.next()
+ if s.sy == ")":
+ break # Allow trailing comma.
s.expect(")")
+
if keyword_patterns_error is not None:
return Nodes.ErrorNode(
keyword_patterns_error,
- what = "Positional patterns follow keyword patterns"
+ what="Positional patterns follow keyword patterns"
)
return MatchCaseNodes.ClassPatternNode(
pos, class_ = class_,
@@ -4347,6 +4450,7 @@ def p_class_pattern(s):
keyword_pattern_patterns = [kv[1] for kv in keyword_patterns],
)
+
def p_keyword_pattern(s):
if s.sy != "IDENT":
s.error("Expected identifier")
@@ -4356,6 +4460,7 @@ def p_keyword_pattern(s):
value = p_pattern(s)
return arg, value
+
def p_pattern_capture_target(s):
# any name but '_', and with some constraints on what follows
if s.sy != 'IDENT':
@@ -4415,5 +4520,5 @@ def p_annotation(s):
then it is not a bug.
"""
pos = s.position()
- expr = p_test(s, allow_assignment_expression=False)
+ expr = p_test(s)
return ExprNodes.AnnotationNode(pos, expr=expr)
diff --git a/Cython/Compiler/Pipeline.py b/Cython/Compiler/Pipeline.py
index 3a5c42352..2fd3a1d3f 100644
--- a/Cython/Compiler/Pipeline.py
+++ b/Cython/Compiler/Pipeline.py
@@ -231,14 +231,15 @@ def create_pipeline(context, mode, exclude_classes=()):
return stages
def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
- if py:
- mode = 'py'
- else:
- mode = 'pyx'
+ mode = 'py' if py else 'pyx'
+
test_support = []
+ ctest_support = []
if options.evaluate_tree_assertions:
from ..TestUtils import TreeAssertVisitor
- test_support.append(TreeAssertVisitor())
+ test_validator = TreeAssertVisitor()
+ test_support.append(test_validator)
+ ctest_support.append(test_validator.create_c_file_validator())
if options.gdb_debug:
from ..Debugger import DebugWriter # requires Py2.5+
@@ -257,7 +258,9 @@ def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
inject_utility_code_stage_factory(context),
abort_on_errors],
debug_transform,
- [generate_pyx_code_stage_factory(options, result)]))
+ [generate_pyx_code_stage_factory(options, result)],
+ ctest_support,
+ ))
def create_pxd_pipeline(context, scope, module_name):
from .CodeGeneration import ExtractPxdCode
diff --git a/Cython/Compiler/PyrexTypes.py b/Cython/Compiler/PyrexTypes.py
index c773f5c5a..da30809a3 100644
--- a/Cython/Compiler/PyrexTypes.py
+++ b/Cython/Compiler/PyrexTypes.py
@@ -13,6 +13,7 @@ try:
except NameError:
from functools import reduce
from functools import partial
+from itertools import product
from Cython.Utils import cached_function
from .Code import UtilityCode, LazyUtilityCode, TempitaUtilityCode
@@ -205,6 +206,7 @@ class PyrexType(BaseType):
# needs_cpp_construction boolean Needs C++ constructor and destructor when used in a cdef class
# needs_refcounting boolean Needs code to be generated similar to incref/gotref/decref.
# Largely used internally.
+ # equivalent_type type A C or Python type that is equivalent to this Python or C type.
# default_value string Initial value that can be assigned before first user assignment.
# declaration_value string The value statically assigned on declaration (if any).
# entry Entry The Entry for this type
@@ -277,6 +279,7 @@ class PyrexType(BaseType):
has_attributes = 0
needs_cpp_construction = 0
needs_refcounting = 0
+ equivalent_type = None
default_value = ""
declaration_value = ""
@@ -1504,7 +1507,6 @@ class PyExtensionType(PyObjectType):
#
# name string
# scope CClassScope Attribute namespace
- # visibility string
# typedef_flag boolean
# base_type PyExtensionType or None
# module_name string or None Qualified name of defining module
@@ -1518,8 +1520,10 @@ class PyExtensionType(PyObjectType):
# vtable_cname string Name of C method table definition
# early_init boolean Whether to initialize early (as opposed to during module execution).
# defered_declarations [thunk] Used to declare class hierarchies in order
+ # is_external boolean Defined in a extern block
# check_size 'warn', 'error', 'ignore' What to do if tp_basicsize does not match
# dataclass_fields OrderedDict nor None Used for inheriting from dataclasses
+ # multiple_bases boolean Does this class have multiple bases
is_extension_type = 1
has_attributes = 1
@@ -1527,6 +1531,7 @@ class PyExtensionType(PyObjectType):
objtypedef_cname = None
dataclass_fields = None
+ multiple_bases = False
def __init__(self, name, typedef_flag, base_type, is_external=0, check_size=None):
self.name = name
@@ -1833,7 +1838,27 @@ class FusedType(CType):
for t in types:
if t.is_fused:
# recursively merge in subtypes
- for subtype in t.types:
+ if isinstance(t, FusedType):
+ t_types = t.types
+ else:
+ # handle types that aren't a fused type themselves but contain fused types
+ # for example a C++ template where the template type is fused.
+ t_fused_types = t.get_fused_types()
+ t_types = []
+ for substitution in product(
+ *[fused_type.types for fused_type in t_fused_types]
+ ):
+ t_types.append(
+ t.specialize(
+ {
+ fused_type: sub
+ for fused_type, sub in zip(
+ t_fused_types, substitution
+ )
+ }
+ )
+ )
+ for subtype in t_types:
if subtype not in flattened_types:
flattened_types.append(subtype)
elif t not in flattened_types:
@@ -2788,6 +2813,8 @@ class CReferenceBaseType(BaseType):
# Common base type for C reference and C++ rvalue reference types.
+ subtypes = ['ref_base_type']
+
def __init__(self, base_type):
self.ref_base_type = base_type
@@ -3044,6 +3071,9 @@ class CFuncType(CType):
# must catch C++ exceptions if we raise them
return 0
if not other_type.exception_check or other_type.exception_value is not None:
+ # There's no problem if this type doesn't emit exceptions but the other type checks
+ if other_type.exception_check and not (self.exception_check or self.exception_value):
+ return 1
# if other does not *always* check exceptions, self must comply
if not self._same_exception_value(other_type.exception_value):
return 0
@@ -3129,8 +3159,10 @@ class CFuncType(CType):
if (pyrex or for_display) and not self.return_type.is_pyobject:
if self.exception_value and self.exception_check:
trailer = " except? %s" % self.exception_value
- elif self.exception_value:
+ elif self.exception_value and not self.exception_check:
trailer = " except %s" % self.exception_value
+ elif not self.exception_value and not self.exception_check:
+ trailer = " noexcept"
elif self.exception_check == '+':
trailer = " except +"
elif self.exception_check and for_display:
@@ -4429,6 +4461,7 @@ class ErrorType(PyrexType):
class PythonTypeConstructor(PyObjectType):
"""Used to help Cython interpret indexed types from the typing module (or similar)
"""
+ modifier_name = None
def __init__(self, name, base_type=None):
self.python_type_constructor_name = name
@@ -4457,69 +4490,35 @@ class PythonTupleTypeConstructor(PythonTypeConstructor):
not any(v.is_pyobject for v in template_values)):
entry = env.declare_tuple_type(pos, template_values)
if entry:
+ entry.used = True
return entry.type
return super(PythonTupleTypeConstructor, self).specialize_here(pos, env, template_values)
class SpecialPythonTypeConstructor(PythonTypeConstructor):
"""
- For things like ClassVar, Optional, etc, which have extra features on top of being
- a "templated" type.
+ For things like ClassVar, Optional, etc, which are not types and disappear during type analysis.
"""
- def __init__(self, name, template_type=None):
- super(SpecialPythonTypeConstructor, self).__init__(name, None)
- if (name == "typing.ClassVar" and template_type
- and not template_type.is_pyobject):
- # because classvars end up essentially used as globals they have
- # to be PyObjects. Try to find the nearest suitable type (although
- # practically I doubt this matters).
- py_type_name = template_type.py_type_name()
- if py_type_name:
- from .Builtin import builtin_scope
- template_type = (builtin_scope.lookup_type(py_type_name)
- or py_object_type)
- else:
- template_type = py_object_types
- self.template_type = template_type
+ def __init__(self, name):
+ super(SpecialPythonTypeConstructor, self).__init__(name, base_type=None)
+ self.modifier_name = name
def __repr__(self):
- if self.template_type:
- return "%s[%r]" % (self.name, self.template_type)
- else:
- return self.name
-
- def is_template_type(self):
- return self.template_type is None
+ return self.name
def resolve(self):
- if self.template_type:
- return self.template_type.resolve()
- else:
- return self
+ return self
def specialize_here(self, pos, env, template_values=None):
if len(template_values) != 1:
error(pos, "'%s' takes exactly one template argument." % self.name)
- # return a copy of the template type with python_type_constructor_name as an attribute
- # so it can be identified, and a resolve function that gets back to
- # the original type (since types are usually tested with "is")
- new_type = template_values[0]
- if self.python_type_constructor_name == "typing.ClassVar":
- # classvar must remain a py_object_type
- new_type = py_object_type
- if (self.python_type_constructor_name == "typing.Optional" and
- not new_type.is_pyobject):
- # optional must be a py_object, but can be a specialized py_object
- new_type = py_object_type
- return SpecialPythonTypeConstructor(
- self.python_type_constructor_name,
- template_type = template_values[0])
-
- def __getattr__(self, name):
- if self.template_type:
- return getattr(self.template_type, name)
- return super(SpecialPythonTypeConstructor, self).__getattr__(name)
+ return error_type
+ if template_values[0] is None:
+ # FIXME: allowing unknown types for now since we don't recognise all Python types.
+ return None
+ # Replace this type with the actual 'template' argument.
+ return template_values[0].resolve()
rank_to_type_name = (
diff --git a/Cython/Compiler/Symtab.py b/Cython/Compiler/Symtab.py
index 6554008f0..984e10f05 100644
--- a/Cython/Compiler/Symtab.py
+++ b/Cython/Compiler/Symtab.py
@@ -13,6 +13,7 @@ try:
except ImportError: # Py3
import builtins
+from ..Utils import try_finally_contextmanager
from .Errors import warning, error, InternalError
from .StringEncoding import EncodedString
from . import Options, Naming
@@ -163,6 +164,7 @@ class Entry(object):
# known_standard_library_import Either None (default), an empty string (definitely can't be determined)
# or a string of "modulename.something.attribute"
# Used for identifying imports from typing/dataclasses etc
+ # pytyping_modifiers Python type modifiers like "typing.ClassVar" but also "dataclasses.InitVar"
# TODO: utility_code and utility_code_definition serves the same purpose...
@@ -237,6 +239,7 @@ class Entry(object):
is_cgetter = False
is_cpp_optional = False
known_standard_library_import = None
+ pytyping_modifiers = None
def __init__(self, name, cname, type, pos = None, init = None):
self.name = name
@@ -282,6 +285,9 @@ class Entry(object):
assert not self.utility_code # we're not overwriting anything?
self.utility_code_definition = Code.UtilityCode.load_cached("OptionalLocals", "CppSupport.cpp")
+ def declared_with_pytyping_modifier(self, modifier_name):
+ return modifier_name in self.pytyping_modifiers if self.pytyping_modifiers else False
+
class InnerEntry(Entry):
"""
@@ -336,11 +342,13 @@ class Scope(object):
# is_builtin_scope boolean Is the builtin scope of Python/Cython
# is_py_class_scope boolean Is a Python class scope
# is_c_class_scope boolean Is an extension type scope
+ # is_local_scope boolean Is a local (i.e. function/method/generator) scope
# is_closure_scope boolean Is a closure scope
# is_generator_expression_scope boolean A subset of closure scope used for generator expressions
# is_passthrough boolean Outer scope is passed directly
# is_cpp_class_scope boolean Is a C++ class scope
# is_property_scope boolean Is a extension type property scope
+ # is_c_dataclass_scope boolean or "frozen" is a cython.dataclasses.dataclass
# scope_prefix string Disambiguator for C names
# in_cinclude boolean Suppress C declaration code
# qualified_name string "modname" or "modname.classname"
@@ -354,18 +362,22 @@ class Scope(object):
is_py_class_scope = 0
is_c_class_scope = 0
is_closure_scope = 0
+ is_local_scope = False
is_generator_expression_scope = 0
is_comprehension_scope = 0
is_passthrough = 0
is_cpp_class_scope = 0
is_property_scope = 0
is_module_scope = 0
+ is_c_dataclass_scope = False
is_internal = 0
scope_prefix = ""
in_cinclude = 0
nogil = 0
fused_to_specific = None
return_type = None
+ # Do ambiguous type names like 'int' and 'float' refer to the C types? (Otherwise, Python types.)
+ in_c_type_context = True
def __init__(self, name, outer_scope, parent_scope):
# The outer_scope is the next scope in the lookup chain.
@@ -482,6 +494,14 @@ class Scope(object):
for scope in sorted(self.subscopes, key=operator.attrgetter('scope_prefix')):
yield scope
+ @try_finally_contextmanager
+ def new_c_type_context(self, in_c_type_context=None):
+ old_c_type_context = self.in_c_type_context
+ if in_c_type_context is not None:
+ self.in_c_type_context = in_c_type_context
+ yield
+ self.in_c_type_context = old_c_type_context
+
def declare(self, name, cname, type, pos, visibility, shadow = 0, is_type = 0, create_wrapper = 0):
# Create new entry, and add to dictionary if
# name is not None. Reports a warning if already
@@ -733,8 +753,8 @@ class Scope(object):
return self.outer_scope.declare_tuple_type(pos, components)
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
# Add an entry for a variable.
if not cname:
if visibility != 'private' or api:
@@ -754,8 +774,17 @@ class Scope(object):
if api:
entry.api = 1
entry.used = 1
+ if pytyping_modifiers:
+ entry.pytyping_modifiers = pytyping_modifiers
return entry
+ def _reject_pytyping_modifiers(self, pos, modifiers, allowed=()):
+ if not modifiers:
+ return
+ for modifier in modifiers:
+ if modifier not in allowed:
+ error(pos, "Modifier '%s' is not allowed here." % modifier)
+
def declare_assignment_expression_target(self, name, type, pos):
# In most cases declares the variable as normal.
# For generator expressions and comprehensions the variable is declared in their parent
@@ -1269,19 +1298,13 @@ class ModuleScope(Scope):
is_cython_builtin = 0
old_style_globals = 0
- def __init__(self, name, parent_module, context):
+ def __init__(self, name, parent_module, context, is_package=False):
from . import Builtin
self.parent_module = parent_module
outer_scope = Builtin.builtin_scope
Scope.__init__(self, name, outer_scope, parent_module)
- if name == "__init__":
- # Treat Spam/__init__.pyx specially, so that when Python loads
- # Spam/__init__.so, initSpam() is defined.
- self.module_name = parent_module.module_name
- self.is_package = True
- else:
- self.module_name = name
- self.is_package = False
+ self.is_package = is_package
+ self.module_name = name
self.module_name = EncodedString(self.module_name)
self.context = context
self.module_cname = Naming.module_cname
@@ -1394,9 +1417,16 @@ class ModuleScope(Scope):
# explicit relative cimport
# error of going beyond top-level is handled in cimport node
relative_to = self
- while relative_level > 0 and relative_to:
+
+ top_level = 1 if self.is_package else 0
+ # * top_level == 1 when file is __init__.pyx, current package (relative_to) is the current module
+ # i.e. dot in `from . import ...` points to the current package
+ # * top_level == 0 when file is regular module, current package (relative_to) is parent module
+ # i.e. dot in `from . import ...` points to the package where module is placed
+ while relative_level > top_level and relative_to:
relative_to = relative_to.parent_module
relative_level -= 1
+
elif relative_level != 0:
# -1 or None: try relative cimport first, then absolute
relative_to = self.parent_module
@@ -1406,7 +1436,7 @@ class ModuleScope(Scope):
return module_scope.context.find_module(
module_name, relative_to=relative_to, pos=pos, absolute_fallback=absolute_fallback)
- def find_submodule(self, name):
+ def find_submodule(self, name, as_package=False):
# Find and return scope for a submodule of this module,
# creating a new empty one if necessary. Doesn't parse .pxd.
if '.' in name:
@@ -1415,10 +1445,10 @@ class ModuleScope(Scope):
submodule = None
scope = self.lookup_submodule(name)
if not scope:
- scope = ModuleScope(name, parent_module=self, context=self.context)
+ scope = ModuleScope(name, parent_module=self, context=self.context, is_package=True if submodule else as_package)
self.module_entries[name] = scope
if submodule:
- scope = scope.find_submodule(submodule)
+ scope = scope.find_submodule(submodule, as_package=as_package)
return scope
def lookup_submodule(self, name):
@@ -1515,14 +1545,15 @@ class ModuleScope(Scope):
return entry
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
# Add an entry for a global variable. If it is a Python
# object type, and not declared with cdef, it will live
# in the module dictionary, otherwise it will be a C
# global variable.
if visibility not in ('private', 'public', 'extern'):
error(pos, "Module-level variable cannot be declared %s" % visibility)
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers, ('typing.Optional',)) # let's allow at least this one
if not is_cdef:
if type is unspecified_type:
type = py_object_type
@@ -1558,7 +1589,7 @@ class ModuleScope(Scope):
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
- api=api, in_pxd=in_pxd, is_cdef=is_cdef)
+ api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers)
if is_cdef:
entry.is_cglobal = 1
if entry.type.declaration_value:
@@ -1587,7 +1618,7 @@ class ModuleScope(Scope):
entry = self.lookup_here(name)
if entry and entry.defined_in_pxd:
if entry.visibility != "private":
- mangled_cname = self.mangle(Naming.var_prefix, name)
+ mangled_cname = self.mangle(Naming.func_prefix, name)
if entry.cname == mangled_cname:
cname = name
entry.cname = cname
@@ -1860,6 +1891,7 @@ class ModuleScope(Scope):
class LocalScope(Scope):
+ is_local_scope = True
# Does the function have a 'with gil:' block?
has_with_gil_block = False
@@ -1889,15 +1921,15 @@ class LocalScope(Scope):
return entry
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
name = self.mangle_class_private_name(name)
# Add an entry for a local variable.
if visibility in ('public', 'readonly'):
error(pos, "Local variable cannot be declared %s" % visibility)
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
- api=api, in_pxd=in_pxd, is_cdef=is_cdef)
+ api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers)
if entry.type.declaration_value:
entry.init = entry.type.declaration_value
entry.is_local = 1
@@ -1995,13 +2027,14 @@ class ComprehensionScope(Scope):
return '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(prefix, name))
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = True):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=True, pytyping_modifiers=None):
if type is unspecified_type:
# if the outer scope defines a type for this variable, inherit it
outer_entry = self.outer_scope.lookup(name)
if outer_entry and outer_entry.is_variable:
type = outer_entry.type # may still be 'unspecified_type' !
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers)
# the parent scope needs to generate code for the variable, but
# this scope must hold its name exclusively
cname = '%s%s' % (self.genexp_prefix, self.parent_scope.mangle(Naming.var_prefix, name or self.next_id()))
@@ -2084,8 +2117,8 @@ class StructOrUnionScope(Scope):
Scope.__init__(self, name, None, None)
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0,
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None,
allow_pyobject=False, allow_memoryview=False, allow_refcounted=False):
# Add an entry for an attribute.
if not cname:
@@ -2094,6 +2127,7 @@ class StructOrUnionScope(Scope):
cname = c_safe_identifier(cname)
if type.is_cfunction:
type = PyrexTypes.CPtrType(type)
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers)
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
self.var_entries.append(entry)
@@ -2171,15 +2205,15 @@ class PyClassScope(ClassScope):
is_py_class_scope = 1
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
name = self.mangle_class_private_name(name)
if type is unspecified_type:
type = py_object_type
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
- api=api, in_pxd=in_pxd, is_cdef=is_cdef)
+ api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers)
entry.is_pyglobal = 1
entry.is_pyclass_attr = 1
return entry
@@ -2281,6 +2315,25 @@ class CClassScope(ClassScope):
"""
return self.needs_gc() and not self.directives.get('no_gc_clear', False)
+ def may_have_finalize(self):
+ """
+ This covers cases where we definitely have a __del__ function
+ and also cases where one of the base classes could have a __del__
+ function but we don't know.
+ """
+ current_type_scope = self
+ while current_type_scope:
+ del_entry = current_type_scope.lookup_here("__del__")
+ if del_entry and del_entry.is_special:
+ return True
+ if (current_type_scope.parent_type.is_extern or not current_type_scope.implemented or
+ current_type_scope.parent_type.multiple_bases):
+ # we don't know if we have __del__, so assume we do and call it
+ return True
+ current_base_type = current_type_scope.parent_type.base_type
+ current_type_scope = current_base_type.scope if current_base_type else None
+ return False
+
def get_refcounted_entries(self, include_weakref=False,
include_gc_simple=True):
py_attrs = []
@@ -2301,17 +2354,21 @@ class CClassScope(ClassScope):
return have_entries, (py_attrs, py_buffers, memoryview_slices)
def declare_var(self, name, type, pos,
- cname = None, visibility = 'private',
- api = 0, in_pxd = 0, is_cdef = 0):
+ cname=None, visibility='private',
+ api=False, in_pxd=False, is_cdef=False, pytyping_modifiers=None):
name = self.mangle_class_private_name(name)
- if type.python_type_constructor_name == "typing.ClassVar":
- is_cdef = 0
- type = type.resolve()
-
- if (type.python_type_constructor_name == "dataclasses.InitVar" and
- 'dataclasses.dataclass' not in self.directives):
- error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass")
+ if pytyping_modifiers:
+ if "typing.ClassVar" in pytyping_modifiers:
+ is_cdef = 0
+ if not type.is_pyobject:
+ if not type.equivalent_type:
+ warning(pos, "ClassVar[] requires the type to be a Python object type. Found '%s', using object instead." % type)
+ type = py_object_type
+ else:
+ type = type.equivalent_type
+ if "dataclasses.InitVar" in pytyping_modifiers and not self.is_c_dataclass_scope:
+ error(pos, "Use of cython.dataclasses.InitVar does not make sense outside a dataclass")
if is_cdef:
# Add an entry for an attribute.
@@ -2332,6 +2389,7 @@ class CClassScope(ClassScope):
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = 1
self.var_entries.append(entry)
+ entry.pytyping_modifiers = pytyping_modifiers
if type.is_cpp_class and visibility != 'extern':
if self.directives['cpp_locals']:
entry.make_cpp_optional()
@@ -2369,7 +2427,7 @@ class CClassScope(ClassScope):
# Add an entry for a class attribute.
entry = Scope.declare_var(self, name, type, pos,
cname=cname, visibility=visibility,
- api=api, in_pxd=in_pxd, is_cdef=is_cdef)
+ api=api, in_pxd=in_pxd, is_cdef=is_cdef, pytyping_modifiers=pytyping_modifiers)
entry.is_member = 1
# xxx: is_pyglobal changes behaviour in so many places that I keep it in for now.
# is_member should be enough later on
@@ -2560,6 +2618,7 @@ class CClassScope(ClassScope):
base_entry.name, adapt(base_entry.cname),
base_entry.type, None, 'private')
entry.is_variable = 1
+ entry.is_inherited = True
entry.annotation = base_entry.annotation
self.inherited_var_entries.append(entry)
@@ -2612,11 +2671,12 @@ class CppClassScope(Scope):
template_entry.is_type = 1
def declare_var(self, name, type, pos,
- cname = None, visibility = 'extern',
- api = 0, in_pxd = 0, is_cdef = 0, defining = 0):
+ cname=None, visibility='extern',
+ api=False, in_pxd=False, is_cdef=False, defining=False, pytyping_modifiers=None):
# Add an entry for an attribute.
if not cname:
cname = name
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers)
entry = self.lookup_here(name)
if defining and entry is not None:
if entry.type.same_as(type):
@@ -2709,7 +2769,7 @@ class CppClassScope(Scope):
if base_entry.name not in base_templates:
entry = self.declare_type(base_entry.name, base_entry.type,
base_entry.pos, base_entry.cname,
- base_entry.visibility)
+ base_entry.visibility, defining=False)
entry.is_inherited = 1
def specialize(self, values, type_entry):
@@ -2746,10 +2806,11 @@ class CppScopedEnumScope(Scope):
Scope.__init__(self, name, outer_scope, None)
def declare_var(self, name, type, pos,
- cname=None, visibility='extern'):
+ cname=None, visibility='extern', pytyping_modifiers=None):
# Add an entry for an attribute.
if not cname:
cname = name
+ self._reject_pytyping_modifiers(pos, pytyping_modifiers)
entry = self.declare(name, cname, type, pos, visibility)
entry.is_variable = True
return entry
diff --git a/Cython/Compiler/Tests/TestCmdLine.py b/Cython/Compiler/Tests/TestCmdLine.py
index 5953112dc..0961dfa03 100644
--- a/Cython/Compiler/Tests/TestCmdLine.py
+++ b/Cython/Compiler/Tests/TestCmdLine.py
@@ -1,7 +1,12 @@
import os
import sys
+import re
from unittest import TestCase
try:
+ from unittest.mock import patch, Mock
+except ImportError: # Py2
+ from mock import patch, Mock
+try:
from StringIO import StringIO
except ImportError:
from io import StringIO # doesn't accept 'str' in Py2
@@ -11,7 +16,15 @@ from ..CmdLine import parse_command_line
from .Utils import backup_Options, restore_Options, check_global_options
+unpatched_exists = os.path.exists
+
+def patched_exists(path):
+ # avoid the Cython command raising a file not found error
+ if path in ('source.pyx', 'file.pyx', 'file1.pyx', 'file2.pyx', 'file3.pyx', 'foo.pyx', 'bar.pyx'):
+ return True
+ return unpatched_exists(path)
+@patch('os.path.exists', new=Mock(side_effect=patched_exists))
class CmdLineParserTest(TestCase):
def setUp(self):
self._options_backup = backup_Options()
@@ -495,22 +508,62 @@ class CmdLineParserTest(TestCase):
self.check_default_global_options()
self.check_default_options(options, ['compiler_directives'])
+ def test_module_name(self):
+ options, sources = parse_command_line([
+ 'source.pyx'
+ ])
+ self.assertEqual(options.module_name, None)
+ self.check_default_global_options()
+ self.check_default_options(options)
+ options, sources = parse_command_line([
+ '--module-name', 'foo.bar',
+ 'source.pyx'
+ ])
+ self.assertEqual(options.module_name, 'foo.bar')
+ self.check_default_global_options()
+ self.check_default_options(options, ['module_name'])
+
def test_errors(self):
- def error(*args):
+ def error(args, regex=None):
old_stderr = sys.stderr
stderr = sys.stderr = StringIO()
try:
self.assertRaises(SystemExit, parse_command_line, list(args))
finally:
sys.stderr = old_stderr
- self.assertTrue(stderr.getvalue())
-
- error('-1')
- error('-I')
- error('--version=-a')
- error('--version=--annotate=true')
- error('--working')
- error('--verbose=1')
- error('--verbose=1')
- error('--cleanup')
- error('--debug-disposal-code-wrong-name', 'file3.pyx')
+ msg = stderr.getvalue()
+ err_msg = 'Message "{}"'.format(msg.strip())
+ self.assertTrue(msg.startswith('usage: '),
+ '%s does not start with "usage :"' % err_msg)
+ self.assertTrue(': error: ' in msg,
+ '%s does not contain ": error :"' % err_msg)
+ if regex:
+ self.assertTrue(re.search(regex, msg),
+ '%s does not match search "%s"' %
+ (err_msg, regex))
+
+ error(['-1'],
+ 'unknown option -1')
+ error(['-I'],
+ 'argument -I/--include-dir: expected one argument')
+ error(['--version=-a'],
+ "argument -V/--version: ignored explicit argument '-a'")
+ error(['--version=--annotate=true'],
+ "argument -V/--version: ignored explicit argument "
+ "'--annotate=true'")
+ error(['--working'],
+ "argument -w/--working: expected one argument")
+ error(['--verbose=1'],
+ "argument -v/--verbose: ignored explicit argument '1'")
+ error(['--cleanup'],
+ "argument --cleanup: expected one argument")
+ error(['--debug-disposal-code-wrong-name', 'file3.pyx'],
+ "unknown option --debug-disposal-code-wrong-name")
+ error(['--module-name', 'foo.pyx'],
+ "Need at least one source file")
+ error(['--module-name', 'foo.bar'],
+ "Need at least one source file")
+ error(['--module-name', 'foo.bar', 'foo.pyx', 'bar.pyx'],
+ "Only one source file allowed when using --module-name")
+ error(['--module-name', 'foo.bar', '--timestamps', 'foo.pyx'],
+ "Cannot use --module-name with --timestamps")
diff --git a/Cython/Compiler/Tests/TestGrammar.py b/Cython/Compiler/Tests/TestGrammar.py
index f80ec22d3..852b48c33 100644
--- a/Cython/Compiler/Tests/TestGrammar.py
+++ b/Cython/Compiler/Tests/TestGrammar.py
@@ -7,9 +7,12 @@ Uses TreeFragment to test invalid syntax.
from __future__ import absolute_import
+import ast
+import textwrap
+
from ...TestUtils import CythonTest
-from ..Errors import CompileError
from .. import ExprNodes
+from ..Errors import CompileError
# Copied from CPython's test_grammar.py
VALID_UNDERSCORE_LITERALS = [
@@ -103,6 +106,39 @@ INVALID_UNDERSCORE_LITERALS = [
]
+INVALID_ELLIPSIS = [
+ (". . .", 2, 0),
+ (". ..", 2, 0),
+ (".. .", 2, 0),
+ (". ...", 2, 0),
+ (". ... .", 2, 0),
+ (".. ... .", 2, 0),
+ (". ... ..", 2, 0),
+ ("""
+ (
+ .
+ ..
+ )
+ """, 3, 4),
+ ("""
+ [
+ ..
+ .,
+ None
+ ]
+ """, 3, 4),
+ ("""
+ {
+ None,
+ .
+ .
+
+ .
+ }
+ """, 4, 4)
+]
+
+
class TestGrammar(CythonTest):
def test_invalid_number_literals(self):
@@ -142,6 +178,25 @@ class TestGrammar(CythonTest):
else:
assert isinstance(literal_node, ExprNodes.IntNode), (literal, literal_node)
+ def test_invalid_ellipsis(self):
+ ERR = ":{0}:{1}: Expected an identifier or literal"
+ for code, line, col in INVALID_ELLIPSIS:
+ try:
+ ast.parse(textwrap.dedent(code))
+ except SyntaxError as exc:
+ assert True
+ else:
+ assert False, "Invalid Python code '%s' failed to raise an exception" % code
+
+ try:
+ self.fragment(u'''\
+ # cython: language_level=3
+ ''' + code)
+ except CompileError as exc:
+ assert ERR.format(line, col) in str(exc), str(exc)
+ else:
+ assert False, "Invalid Cython code '%s' failed to raise an exception" % code
+
if __name__ == "__main__":
import unittest
diff --git a/Cython/Compiler/Tests/TestParseTreeTransforms.py b/Cython/Compiler/Tests/TestParseTreeTransforms.py
index e6889f8f2..6e29263e5 100644
--- a/Cython/Compiler/Tests/TestParseTreeTransforms.py
+++ b/Cython/Compiler/Tests/TestParseTreeTransforms.py
@@ -1,7 +1,9 @@
-import os
+import os.path
+import unittest
from Cython.TestUtils import TransformTest
from Cython.Compiler.ParseTreeTransforms import *
+from Cython.Compiler.ParseTreeTransforms import _calculate_pickle_checksums
from Cython.Compiler.Nodes import *
from Cython.Compiler import Main, Symtab, Options
@@ -276,6 +278,11 @@ class TestDebugTransform(DebuggerTestCase):
raise
+class TestAnalyseDeclarationsTransform(unittest.TestCase):
+ def test_calculate_pickle_checksums(self):
+ checksums = _calculate_pickle_checksums(['member1', 'member2', 'member3'])
+ assert 2 <= len(checksums) <= 3, checksums # expecting ['0xc0af380' (MD5), '0x0c75bd4', '0xa7a7b94']
+
if __name__ == "__main__":
import unittest
diff --git a/Cython/Compiler/TypeInference.py b/Cython/Compiler/TypeInference.py
index 4ae3ab155..0ef651d24 100644
--- a/Cython/Compiler/TypeInference.py
+++ b/Cython/Compiler/TypeInference.py
@@ -104,10 +104,11 @@ class MarkParallelAssignments(EnvTransform):
is_special = False
sequence = node.iterator.sequence
target = node.target
+ iterator_scope = node.iterator.expr_scope or self.current_env()
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
- entry = self.current_env().lookup(function.name)
+ entry = iterator_scope.lookup(function.name)
if not entry or entry.is_builtin:
if function.name == 'reversed' and len(sequence.args) == 1:
sequence = sequence.args[0]
@@ -115,7 +116,7 @@ class MarkParallelAssignments(EnvTransform):
if target.is_sequence_constructor and len(target.args) == 2:
iterator = sequence.args[0]
if iterator.is_name:
- iterator_type = iterator.infer_type(self.current_env())
+ iterator_type = iterator.infer_type(iterator_scope)
if iterator_type.is_builtin_type:
# assume that builtin types have a length within Py_ssize_t
self.mark_assignment(
@@ -127,7 +128,7 @@ class MarkParallelAssignments(EnvTransform):
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
- entry = self.current_env().lookup(function.name)
+ entry = iterator_scope.lookup(function.name)
if not entry or entry.is_builtin:
if function.name in ('range', 'xrange'):
is_special = True
diff --git a/Cython/Compiler/TypeSlots.py b/Cython/Compiler/TypeSlots.py
index ea310a6d3..0bd550d8c 100644
--- a/Cython/Compiler/TypeSlots.py
+++ b/Cython/Compiler/TypeSlots.py
@@ -556,8 +556,7 @@ class TypeFlagsSlot(SlotDescriptor):
value += "|Py_TPFLAGS_BASETYPE"
if scope.needs_gc():
value += "|Py_TPFLAGS_HAVE_GC"
- entry = scope.lookup("__del__")
- if entry and entry.is_special:
+ if scope.may_have_finalize():
value += "|Py_TPFLAGS_HAVE_FINALIZE"
return value
@@ -966,8 +965,8 @@ class SlotTable(object):
# Added in release 2.2
# The following require the Py_TPFLAGS_HAVE_CLASS flag
- BinopSlot(binaryfunc, "nb_floor_divide", "__floordiv__", method_name_to_slot),
- BinopSlot(binaryfunc, "nb_true_divide", "__truediv__", method_name_to_slot),
+ BinopSlot(bf, "nb_floor_divide", "__floordiv__", method_name_to_slot),
+ BinopSlot(bf, "nb_true_divide", "__truediv__", method_name_to_slot),
MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__", method_name_to_slot),
MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__", method_name_to_slot),
@@ -975,7 +974,7 @@ class SlotTable(object):
MethodSlot(unaryfunc, "nb_index", "__index__", method_name_to_slot),
# Added in release 3.5
- BinopSlot(binaryfunc, "nb_matrix_multiply", "__matmul__", method_name_to_slot,
+ BinopSlot(bf, "nb_matrix_multiply", "__matmul__", method_name_to_slot,
ifdef="PY_VERSION_HEX >= 0x03050000"),
MethodSlot(ibinaryfunc, "nb_inplace_matrix_multiply", "__imatmul__", method_name_to_slot,
ifdef="PY_VERSION_HEX >= 0x03050000"),
diff --git a/Cython/Compiler/Visitor.py b/Cython/Compiler/Visitor.py
index 4eabd6b83..92e2eb9c0 100644
--- a/Cython/Compiler/Visitor.py
+++ b/Cython/Compiler/Visitor.py
@@ -1,5 +1,5 @@
# cython: infer_types=True
-# cython: language_level=3
+# cython: language_level=3str
# cython: auto_pickle=False
#
@@ -80,7 +80,7 @@ class TreeVisitor(object):
def dump_node(self, node):
ignored = list(node.child_attrs or []) + [
- u'child_attrs', u'pos', u'gil_message', u'cpp_message', u'subexprs']
+ 'child_attrs', 'pos', 'gil_message', 'cpp_message', 'subexprs']
values = []
pos = getattr(node, 'pos', None)
if pos:
@@ -116,7 +116,7 @@ class TreeVisitor(object):
nodes = []
while hasattr(stacktrace, 'tb_frame'):
frame = stacktrace.tb_frame
- node = frame.f_locals.get(u'self')
+ node = frame.f_locals.get('self')
if isinstance(node, Nodes.Node):
code = frame.f_code
method_name = code.co_name
@@ -153,12 +153,12 @@ class TreeVisitor(object):
def find_handler(self, obj):
# to resolve, try entire hierarchy
cls = type(obj)
- pattern = "visit_%s"
mro = inspect.getmro(cls)
for mro_cls in mro:
- handler_method = getattr(self, pattern % mro_cls.__name__, None)
+ handler_method = getattr(self, "visit_" + mro_cls.__name__, None)
if handler_method is not None:
return handler_method
+
print(type(self), cls)
if self.access_path:
print(self.access_path)
@@ -306,8 +306,8 @@ class CythonTransform(VisitorTransform):
self.context = context
def __call__(self, node):
- from . import ModuleNode
- if isinstance(node, ModuleNode.ModuleNode):
+ from .ModuleNode import ModuleNode
+ if isinstance(node, ModuleNode):
self.current_directives = node.directives
return super(CythonTransform, self).__call__(node)
@@ -380,13 +380,15 @@ class EnvTransform(CythonTransform):
self.env_stack.pop()
def visit_FuncDefNode(self, node):
- outer_attrs = node.outer_attrs
- self.visitchildren(node, attrs=outer_attrs)
+ self.visit_func_outer_attrs(node)
self.enter_scope(node, node.local_scope)
- self.visitchildren(node, attrs=None, exclude=outer_attrs)
+ self.visitchildren(node, attrs=None, exclude=node.outer_attrs)
self.exit_scope()
return node
+ def visit_func_outer_attrs(self, node):
+ self.visitchildren(node, attrs=node.outer_attrs)
+
def visit_GeneratorBodyDefNode(self, node):
self._process_children(node)
return node
@@ -592,7 +594,7 @@ class MethodDispatcherTransform(EnvTransform):
# Python 2 and 3
return None
- call_type = has_kwargs and 'general' or 'simple'
+ call_type = 'general' if has_kwargs else 'simple'
handler = getattr(self, '_handle_%s_%s' % (call_type, match_name), None)
if handler is None:
handler = getattr(self, '_handle_any_%s' % match_name, None)
diff --git a/Cython/Coverage.py b/Cython/Coverage.py
index 7acd54c1f..147df8050 100644
--- a/Cython/Coverage.py
+++ b/Cython/Coverage.py
@@ -6,6 +6,44 @@ Requires the coverage package at least in version 4.0 (which added the plugin AP
This plugin requires the generated C sources to be available, next to the extension module.
It parses the C file and reads the original source files from it, which are stored in C comments.
It then reports a source file to coverage.py when it hits one of its lines during line tracing.
+
+Basically, Cython can (on request) emit explicit trace calls into the C code that it generates,
+and as a general human debugging helper, it always copies the current source code line
+(and its surrounding context) into the C files before it generates code for that line, e.g.
+
+::
+
+ /* "line_trace.pyx":147
+ * def cy_add_with_nogil(a,b):
+ * cdef int z, x=a, y=b # 1
+ * with nogil: # 2 # <<<<<<<<<<<<<<
+ * z = 0 # 3
+ * z += cy_add_nogil(x, y) # 4
+ */
+ __Pyx_TraceLine(147,1,__PYX_ERR(0, 147, __pyx_L4_error))
+ [C code generated for file line_trace.pyx, line 147, follows here]
+
+The crux is that multiple source files can contribute code to a single C (or C++) file
+(and thus, to a single extension module) besides the main module source file (.py/.pyx),
+usually shared declaration files (.pxd) but also literally included files (.pxi).
+
+Therefore, the coverage plugin doesn't actually try to look at the file that happened
+to contribute the current source line for the trace call, but simply looks up the single
+.c file from which the extension was compiled (which usually lies right next to it after
+the build, having the same name), and parses the code copy comments from that .c file
+to recover the original source files and their code as a line-to-file mapping.
+
+That mapping is then used to report the ``__Pyx_TraceLine()`` calls to the coverage tool.
+The plugin also reports the line of source code that it found in the C file to the coverage
+tool to support annotated source representations. For this, again, it does not look at the
+actual source files but only reports the source code that it found in the C code comments.
+
+Apart from simplicity (read one file instead of finding and parsing many), part of the
+reasoning here is that any line in the original sources for which there is no comment line
+(and trace call) in the generated C code cannot count as executed, really, so the C code
+comments are a very good source for coverage reporting. They already filter out purely
+declarative code lines that do not contribute executable code, and such (missing) lines
+can then be marked as excluded from coverage analysis.
"""
from __future__ import absolute_import
@@ -45,6 +83,23 @@ def _find_dep_file_path(main_file, file_path, relative_path_search=False):
rel_file_path = os.path.join(os.path.dirname(main_file), file_path)
if os.path.exists(rel_file_path):
abs_path = os.path.abspath(rel_file_path)
+
+ abs_no_ext = os.path.splitext(abs_path)[0]
+ file_no_ext, extension = os.path.splitext(file_path)
+ # We check if the paths match by matching the directories in reverse order.
+ # pkg/module.pyx /long/absolute_path/bla/bla/site-packages/pkg/module.c should match.
+ # this will match the pairs: module-module and pkg-pkg. After which there is nothing left to zip.
+ abs_no_ext = os.path.normpath(abs_no_ext)
+ file_no_ext = os.path.normpath(file_no_ext)
+ matching_paths = zip(reversed(abs_no_ext.split(os.sep)), reversed(file_no_ext.split(os.sep)))
+ for one, other in matching_paths:
+ if one != other:
+ break
+ else: # No mismatches detected
+ matching_abs_path = os.path.splitext(main_file)[0] + extension
+ if os.path.exists(matching_abs_path):
+ return canonical_filename(matching_abs_path)
+
# search sys.path for external locations if a valid file hasn't been found
if not os.path.exists(abs_path):
for sys_path in sys.path:
diff --git a/Cython/Distutils/old_build_ext.py b/Cython/Distutils/old_build_ext.py
index 3595d80e0..cec54d93d 100644
--- a/Cython/Distutils/old_build_ext.py
+++ b/Cython/Distutils/old_build_ext.py
@@ -321,8 +321,8 @@ class old_build_ext(_build_ext.build_ext):
for source in cython_sources:
target = cython_targets[source]
depends = [source] + list(extension.depends or ())
- if(source[-4:].lower()==".pyx" and os.path.isfile(source[:-3]+"pxd")):
- depends += [source[:-3]+"pxd"]
+ if source[-4:].lower() == ".pyx" and os.path.isfile(source[:-3] + "pxd"):
+ depends += [source[:-3] + "pxd"]
rebuild = self.force or newer_group(depends, target, 'newer')
if not rebuild and newest_dependency is not None:
rebuild = newer(newest_dependency, target)
diff --git a/Cython/Includes/cpython/object.pxd b/Cython/Includes/cpython/object.pxd
index c4688f738..41874159c 100644
--- a/Cython/Includes/cpython/object.pxd
+++ b/Cython/Includes/cpython/object.pxd
@@ -5,7 +5,7 @@ cdef extern from "Python.h":
ctypedef struct PyObject # forward declaration
- ctypedef object (*newfunc)(cpython.type.type, object, object) # (type, args, kwargs)
+ ctypedef object (*newfunc)(cpython.type.type, PyObject*, PyObject*) # (type, args|NULL, kwargs|NULL)
ctypedef object (*unaryfunc)(object)
ctypedef object (*binaryfunc)(object, object)
diff --git a/Cython/Includes/cpython/time.pxd b/Cython/Includes/cpython/time.pxd
index 076abd931..7f20095a1 100644
--- a/Cython/Includes/cpython/time.pxd
+++ b/Cython/Includes/cpython/time.pxd
@@ -30,7 +30,7 @@ cdef inline int _raise_from_errno() except -1 with gil:
return <int> -1 # Let the C compiler know that this function always raises.
-cdef inline tm localtime() nogil except *:
+cdef inline tm localtime() except * nogil:
"""
Analogue to the stdlib time.localtime. The returned struct
has some entries that the stdlib version does not: tm_gmtoff, tm_zone
diff --git a/Cython/Includes/cpython/unicode.pxd b/Cython/Includes/cpython/unicode.pxd
index ba11f5736..6ec77f7b3 100644
--- a/Cython/Includes/cpython/unicode.pxd
+++ b/Cython/Includes/cpython/unicode.pxd
@@ -1,4 +1,8 @@
+
cdef extern from *:
+ ctypedef unsigned char Py_UCS1 # uint8_t
+ ctypedef unsigned short Py_UCS2 # uint16_t
+
# Return true if the object o is a Unicode object or an instance
# of a Unicode subtype. Changed in version 2.2: Allowed subtypes
# to be accepted.
@@ -23,6 +27,21 @@ cdef extern from *:
# New in version 3.3.
Py_ssize_t PyUnicode_GET_LENGTH(object o)
+ Py_UCS1 *PyUnicode_1BYTE_DATA(object o)
+ Py_UCS2 *PyUnicode_2BYTE_DATA(object o)
+ Py_UCS4 *PyUnicode_4BYTE_DATA(object o)
+
+ int PyUnicode_WCHAR_KIND # Deprecated since Python 3.10, removed in 3.12.
+ int PyUnicode_1BYTE_KIND
+ int PyUnicode_2BYTE_KIND
+ int PyUnicode_4BYTE_KIND
+ void PyUnicode_WRITE(int kind, void *data, Py_ssize_t index, Py_UCS4 value)
+ Py_UCS4 PyUnicode_READ(int kind, void *data, Py_ssize_t index)
+ Py_UCS4 PyUnicode_READ_CHAR(object o, Py_ssize_t index)
+
+ unsigned int PyUnicode_KIND(object o)
+ void *PyUnicode_DATA(object o)
+
# Return the size of the object's internal buffer in bytes. o has
# to be a PyUnicodeObject (not checked).
Py_ssize_t PyUnicode_GET_DATA_SIZE(object o)
@@ -35,6 +54,8 @@ cdef extern from *:
# be a PyUnicodeObject (not checked).
char* PyUnicode_AS_DATA(object o)
+ bint PyUnicode_IsIdentifier(object o)
+
# Return 1 or 0 depending on whether ch is a whitespace character.
bint Py_UNICODE_ISSPACE(Py_UCS4 ch)
@@ -65,6 +86,8 @@ cdef extern from *:
# Return 1 or 0 depending on whether ch is an alphanumeric character.
bint Py_UNICODE_ISALNUM(Py_UCS4 ch)
+ bint Py_UNICODE_ISPRINTABLE(Py_UCS4 ch)
+
# Return the character ch converted to lower case.
# Used to return a Py_UNICODE value before Py3.3.
Py_UCS4 Py_UNICODE_TOLOWER(Py_UCS4 ch)
@@ -111,6 +134,18 @@ cdef extern from *:
# UTF-8 encoded bytes. The size is determined with strlen().
unicode PyUnicode_FromString(const char *u)
+ unicode PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar)
+ unicode PyUnicode_FromKindAndData(int kind, const void *buffer, Py_ssize_t size)
+ unicode PyUnicode_FromFormat(const char *format, ...)
+ Py_ssize_t PyUnicode_GetLength(object unicode) except -1
+ Py_ssize_t PyUnicode_CopyCharacters(object to, Py_ssize_t to_start, object from_, Py_ssize_t from_start, Py_ssize_t how_many) except -1
+ Py_ssize_t PyUnicode_Fill(object unicode, Py_ssize_t start, Py_ssize_t length, Py_UCS4 fill_char) except -1
+ int PyUnicode_WriteChar(object unicode, Py_ssize_t index, Py_UCS4 character) except -1
+ Py_UCS4 PyUnicode_ReadChar(object unicode, Py_ssize_t index) except -1
+ unicode PyUnicode_Substring(object str, Py_ssize_t start, Py_ssize_t end)
+ Py_UCS4 *PyUnicode_AsUCS4(object u, Py_UCS4 *buffer, Py_ssize_t buflen, int copy_null) except NULL
+ Py_UCS4 *PyUnicode_AsUCS4Copy(object u) except NULL
+
# Create a Unicode Object from the given Unicode code point ordinal.
#
# The ordinal must be in range(0x10000) on narrow Python builds
diff --git a/Cython/Includes/libcpp/bit.pxd b/Cython/Includes/libcpp/bit.pxd
new file mode 100644
index 000000000..cac12ea4f
--- /dev/null
+++ b/Cython/Includes/libcpp/bit.pxd
@@ -0,0 +1,31 @@
+cdef extern from "<bit>" namespace "std" nogil:
+ # bit_cast (gcc >= 11.0, clang >= 14.0)
+ cdef To bit_cast[To, From](From&)
+
+ # byteswap (C++23)
+ #cdef T byteswap[T](T)
+
+ # integral powers of 2 (gcc >= 10.0, clang >= 12.0)
+ cdef bint has_single_bit[T](T)
+ cdef T bit_ceil[T](T)
+ cdef T bit_floor[T](T)
+ cdef int bit_width[T](T)
+
+ # rotating (gcc >= 9.0, clang >= 9.0)
+ cdef T rotl[T](T, int shift)
+ cdef T rotr[T](T, int shift)
+
+ # counting (gcc >= 9.0, clang >= 9.0)
+ cdef int countl_zero[T](T)
+ cdef int countl_one[T](T)
+ cdef int countr_zero[T](T)
+ cdef int countr_one[T](T)
+ cdef int popcount[T](T)
+
+ # endian
+ cpdef enum class endian(int):
+ little,
+ big,
+ native
+
+
diff --git a/Cython/Includes/libcpp/map.pxd b/Cython/Includes/libcpp/map.pxd
index 2f8238f14..d81af66e0 100644
--- a/Cython/Includes/libcpp/map.pxd
+++ b/Cython/Includes/libcpp/map.pxd
@@ -121,6 +121,8 @@ cdef extern from "<map>" namespace "std" nogil:
iterator upper_bound(const T&)
const_iterator const_upper_bound "upper_bound"(const T&)
#value_compare value_comp()
+ # C++20
+ bint contains(const T&)
cdef cppclass multimap[T, U, COMPARE=*, ALLOCATOR=*]:
ctypedef T key_type
@@ -239,3 +241,4 @@ cdef extern from "<map>" namespace "std" nogil:
iterator upper_bound(const T&)
const_iterator const_upper_bound "upper_bound"(const T&)
#value_compare value_comp()
+ bint contains(const T&)
diff --git a/Cython/Includes/libcpp/numeric.pxd b/Cython/Includes/libcpp/numeric.pxd
index 670c6cfe8..0335a0bac 100644
--- a/Cython/Includes/libcpp/numeric.pxd
+++ b/Cython/Includes/libcpp/numeric.pxd
@@ -122,3 +122,10 @@ cdef extern from "<numeric>" namespace "std" nogil:
ForwardIt2 transform_exclusive_scan[ExecutionPolicy, ForwardIt1, ForwardIt2, T, BinaryOperation, UnaryOperation](
ExecutionPolicy&& policy, ForwardIt1 first, ForwardIt1 last, ForwardIt2 d_first,
T init, BinaryOperation binary_op, UnaryOperation unary_op)
+
+ # C++17
+ T gcd[T](T a, T b)
+ T lcm[T](T a, T b)
+
+ # C++20
+ T midpoint[T](T a, T b) except + \ No newline at end of file
diff --git a/Cython/Includes/libcpp/random.pxd b/Cython/Includes/libcpp/random.pxd
index e879c8f64..9e48bb27f 100644
--- a/Cython/Includes/libcpp/random.pxd
+++ b/Cython/Includes/libcpp/random.pxd
@@ -1,10 +1,14 @@
-from libc.stdint cimport uint_fast32_t
+from libc.stdint cimport uint_fast32_t, uint_fast64_t
cdef extern from "<random>" namespace "std" nogil:
- cdef cppclass mt19937:
+ cdef cppclass random_device:
ctypedef uint_fast32_t result_type
+ random_device() except +
+ result_type operator()() except +
+ cdef cppclass mt19937:
+ ctypedef uint_fast32_t result_type
mt19937() except +
mt19937(result_type seed) except +
result_type operator()() except +
@@ -12,3 +16,151 @@ cdef extern from "<random>" namespace "std" nogil:
result_type max() except +
void discard(size_t z) except +
void seed(result_type seed) except +
+
+ cdef cppclass mt19937_64:
+ ctypedef uint_fast64_t result_type
+
+ mt19937_64() except +
+ mt19937_64(result_type seed) except +
+ result_type operator()() except +
+ result_type min() except +
+ result_type max() except +
+ void discard(size_t z) except +
+ void seed(result_type seed) except +
+
+ cdef cppclass uniform_int_distribution[T]:
+ ctypedef T result_type
+ uniform_int_distribution() except +
+ uniform_int_distribution(T, T) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass uniform_real_distribution[T]:
+ ctypedef T result_type
+ uniform_real_distribution() except +
+ uniform_real_distribution(T, T) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass bernoulli_distribution:
+ ctypedef bint result_type
+ bernoulli_distribution() except +
+ bernoulli_distribution(double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass binomial_distribution[T]:
+ ctypedef T result_type
+ binomial_distribution() except +
+ binomial_distribution(T, double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass geometric_distribution[T]:
+ ctypedef T result_type
+ geometric_distribution() except +
+ geometric_distribution(double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+
+ cdef cppclass negative_binomial_distribution[T]:
+ ctypedef T result_type
+ negative_binomial_distribution() except +
+ negative_binomial_distribution(T, double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass poisson_distribution[T]:
+ ctypedef T result_type
+ poisson_distribution() except +
+ poisson_distribution(double) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass exponential_distribution[T]:
+ ctypedef T result_type
+ exponential_distribution() except +
+ exponential_distribution(result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass gamma_distribution[T]:
+ ctypedef T result_type
+ gamma_distribution() except +
+ gamma_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass weibull_distribution[T]:
+ ctypedef T result_type
+ weibull_distribution() except +
+ weibull_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass extreme_value_distribution[T]:
+ ctypedef T result_type
+ extreme_value_distribution() except +
+ extreme_value_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass normal_distribution[T]:
+ ctypedef T result_type
+ normal_distribution() except +
+ normal_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass lognormal_distribution[T]:
+ ctypedef T result_type
+ lognormal_distribution() except +
+ lognormal_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass chi_squared_distribution[T]:
+ ctypedef T result_type
+ chi_squared_distribution() except +
+ chi_squared_distribution(result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass cauchy_distribution[T]:
+ ctypedef T result_type
+ cauchy_distribution() except +
+ cauchy_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass fisher_f_distribution[T]:
+ ctypedef T result_type
+ fisher_f_distribution() except +
+ fisher_f_distribution(result_type, result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
+
+ cdef cppclass student_t_distribution[T]:
+ ctypedef T result_type
+ student_t_distribution() except +
+ student_t_distribution(result_type) except +
+ result_type operator()[Generator](Generator&) except +
+ result_type min() except +
+ result_type max() except +
diff --git a/Cython/Includes/libcpp/set.pxd b/Cython/Includes/libcpp/set.pxd
index 8ba47cb7f..7e6449ca2 100644
--- a/Cython/Includes/libcpp/set.pxd
+++ b/Cython/Includes/libcpp/set.pxd
@@ -112,6 +112,8 @@ cdef extern from "<set>" namespace "std" nogil:
iterator upper_bound(const T&)
const_iterator const_upper_bound "upper_bound"(const T&)
#value_compare value_comp()
+ # C++20
+ bint contains(const T&)
cdef cppclass multiset[T]:
ctypedef T value_type
@@ -222,3 +224,5 @@ cdef extern from "<set>" namespace "std" nogil:
void swap(multiset&)
iterator upper_bound(const T&)
const_iterator const_upper_bound "upper_bound"(const T&)
+ # C++20
+ bint contains(const T&)
diff --git a/Cython/Includes/libcpp/string.pxd b/Cython/Includes/libcpp/string.pxd
index 0fee703ea..23518806a 100644
--- a/Cython/Includes/libcpp/string.pxd
+++ b/Cython/Includes/libcpp/string.pxd
@@ -251,6 +251,15 @@ cdef extern from "<string>" namespace "std" nogil:
string substr(size_t pos) except +
string substr()
+ # C++20
+ bint starts_with(char c) except +
+ bint starts_with(const char* s)
+ bint ends_with(char c) except +
+ bint ends_with(const char* s)
+ # C++23
+ bint contains(char c) except +
+ bint contains(const char* s)
+
#string& operator= (const string&)
#string& operator= (const char*)
#string& operator= (char)
diff --git a/Cython/Includes/libcpp/unordered_map.pxd b/Cython/Includes/libcpp/unordered_map.pxd
index 05f3338fa..61d11b0be 100644
--- a/Cython/Includes/libcpp/unordered_map.pxd
+++ b/Cython/Includes/libcpp/unordered_map.pxd
@@ -95,6 +95,8 @@ cdef extern from "<unordered_map>" namespace "std" nogil:
size_t max_bucket_count()
size_t bucket_size(size_t)
size_t bucket(const T&)
+ # C++20
+ bint contains(const T&)
cdef cppclass unordered_multimap[T, U, HASH=*, PRED=*, ALLOCATOR=*]:
ctypedef T key_type
@@ -186,3 +188,5 @@ cdef extern from "<unordered_map>" namespace "std" nogil:
size_t max_bucket_count()
size_t bucket_size(size_t)
size_t bucket(const T&)
+ # C++20
+ bint contains(const T&)
diff --git a/Cython/Includes/libcpp/unordered_set.pxd b/Cython/Includes/libcpp/unordered_set.pxd
index f3fdfb56e..6aae890d9 100644
--- a/Cython/Includes/libcpp/unordered_set.pxd
+++ b/Cython/Includes/libcpp/unordered_set.pxd
@@ -75,6 +75,8 @@ cdef extern from "<unordered_set>" namespace "std" nogil:
size_t max_bucket_count()
size_t bucket_size(size_t)
size_t bucket(const T&)
+ # C++20
+ bint contains(const T&)
cdef cppclass unordered_multiset[T,HASH=*,PRED=*,ALLOCATOR=*]:
ctypedef T value_type
@@ -146,3 +148,5 @@ cdef extern from "<unordered_set>" namespace "std" nogil:
size_t max_bucket_count()
size_t bucket_size(size_t)
size_t bucket(const T&)
+ # C++20
+ bint contains(const T&)
diff --git a/Cython/Shadow.py b/Cython/Shadow.py
index 48bc249e0..097126475 100644
--- a/Cython/Shadow.py
+++ b/Cython/Shadow.py
@@ -1,7 +1,7 @@
# cython.* namespace for pure mode.
from __future__ import absolute_import
-__version__ = "3.0.0a10"
+__version__ = "3.0.0a11"
try:
from __builtin__ import basestring
@@ -385,7 +385,7 @@ class typedef(CythonType):
__getitem__ = index_type
class _FusedType(CythonType):
- pass
+ __getitem__ = index_type
def fused_type(*args):
diff --git a/Cython/TestUtils.py b/Cython/TestUtils.py
index bb2070d39..45a8e6f59 100644
--- a/Cython/TestUtils.py
+++ b/Cython/TestUtils.py
@@ -1,18 +1,21 @@
from __future__ import absolute_import
import os
+import re
import unittest
import shlex
import sys
import tempfile
import textwrap
from io import open
+from functools import partial
from .Compiler import Errors
from .CodeWriter import CodeWriter
-from .Compiler.TreeFragment import TreeFragment, strip_common_indent
+from .Compiler.TreeFragment import TreeFragment, strip_common_indent, StringParseContext
from .Compiler.Visitor import TreeVisitor, VisitorTransform
from .Compiler import TreePath
+from .Compiler.ParseTreeTransforms import PostParse
class NodeTypeWriter(TreeVisitor):
@@ -161,11 +164,81 @@ class TransformTest(CythonTest):
return tree
+# For the test C code validation, we have to take care that the test directives (and thus
+# the match strings) do not just appear in (multiline) C code comments containing the original
+# Cython source code. Thus, we discard the comments before matching.
+# This seems a prime case for re.VERBOSE, but it seems to match some of the whitespace.
+_strip_c_comments = partial(re.compile(
+ re.sub(r'\s+', '', r'''
+ /[*] (
+ (?: [^*\n] | [*][^/] )*
+ [\n]
+ (?: [^*] | [*][^/] )*
+ ) [*]/
+ ''')
+).sub, '')
+
+_strip_cython_code_from_html = partial(re.compile(
+ re.sub(r'\s\s+', '', r'''
+ <pre class=["'][^"']*cython\s+line[^"']*["']\s*>
+ (?:[^<]|<(?!/pre))+
+ </pre>
+ ''')
+).sub, '')
+
+
class TreeAssertVisitor(VisitorTransform):
# actually, a TreeVisitor would be enough, but this needs to run
# as part of the compiler pipeline
- def visit_CompilerDirectivesNode(self, node):
+ def __init__(self):
+ super(TreeAssertVisitor, self).__init__()
+ self._module_pos = None
+ self._c_patterns = []
+ self._c_antipatterns = []
+
+ def create_c_file_validator(self):
+ patterns, antipatterns = self._c_patterns, self._c_antipatterns
+
+ def fail(pos, pattern, found, file_path):
+ Errors.error(pos, "Pattern '%s' %s found in %s" %(
+ pattern,
+ 'was' if found else 'was not',
+ file_path,
+ ))
+
+ def validate_file_content(file_path, content):
+ for pattern in patterns:
+ #print("Searching pattern '%s'" % pattern)
+ if not re.search(pattern, content):
+ fail(self._module_pos, pattern, found=False, file_path=file_path)
+
+ for antipattern in antipatterns:
+ #print("Searching antipattern '%s'" % antipattern)
+ if re.search(antipattern, content):
+ fail(self._module_pos, antipattern, found=True, file_path=file_path)
+
+ def validate_c_file(result):
+ c_file = result.c_file
+ if not (patterns or antipatterns):
+ #print("No patterns defined for %s" % c_file)
+ return result
+
+ with open(c_file, encoding='utf8') as f:
+ content = f.read()
+ content = _strip_c_comments(content)
+ validate_file_content(c_file, content)
+
+ html_file = os.path.splitext(c_file)[0] + ".html"
+ if os.path.exists(html_file) and os.path.getmtime(c_file) <= os.path.getmtime(html_file):
+ with open(html_file, encoding='utf8') as f:
+ content = f.read()
+ content = _strip_cython_code_from_html(content)
+ validate_file_content(html_file, content)
+
+ return validate_c_file
+
+ def _check_directives(self, node):
directives = node.directives
if 'test_assert_path_exists' in directives:
for path in directives['test_assert_path_exists']:
@@ -179,6 +252,19 @@ class TreeAssertVisitor(VisitorTransform):
Errors.error(
node.pos,
"Unexpected path '%s' found in result tree" % path)
+ if 'test_assert_c_code_has' in directives:
+ self._c_patterns.extend(directives['test_assert_c_code_has'])
+ if 'test_fail_if_c_code_has' in directives:
+ self._c_antipatterns.extend(directives['test_fail_if_c_code_has'])
+
+ def visit_ModuleNode(self, node):
+ self._module_pos = node.pos
+ self._check_directives(node)
+ self.visitchildren(node)
+ return node
+
+ def visit_CompilerDirectivesNode(self, node):
+ self._check_directives(node)
self.visitchildren(node)
return node
@@ -272,3 +358,24 @@ def write_newer_file(file_path, newer_than, content, dedent=False, encoding=None
while other_time is None or other_time >= os.path.getmtime(file_path):
write_file(file_path, content, dedent=dedent, encoding=encoding)
+
+
+def py_parse_code(code):
+ """
+ Compiles code far enough to get errors from the parser and post-parse stage.
+
+ Is useful for checking for syntax errors, however it doesn't generate runable
+ code.
+ """
+ context = StringParseContext("test")
+ # all the errors we care about are in the parsing or postparse stage
+ try:
+ with Errors.local_errors() as errors:
+ result = TreeFragment(code, pipeline=[PostParse(context)])
+ result = result.substitute()
+ if errors:
+ raise errors[0] # compile error, which should get caught
+ else:
+ return result
+ except Errors.CompileError as e:
+ raise SyntaxError(e.message_only)
diff --git a/Cython/Utility/AsyncGen.c b/Cython/Utility/AsyncGen.c
index 4b8c8f678..fa374525f 100644
--- a/Cython/Utility/AsyncGen.c
+++ b/Cython/Utility/AsyncGen.c
@@ -1245,7 +1245,7 @@ static int __pyx_AsyncGen_init(PyObject *module) {
#if CYTHON_USE_TYPE_SPECS
__pyx_AsyncGenType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_AsyncGenType_spec, NULL);
#else
- (void) module;
+ CYTHON_MAYBE_UNUSED_VAR(module);
// on Windows, C-API functions can't be used in slots statically
__pyx_AsyncGenType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_AsyncGenType = __Pyx_FetchCommonType(&__pyx_AsyncGenType_type);
diff --git a/Cython/Utility/CommonStructures.c b/Cython/Utility/CommonStructures.c
index 5449e6902..f39f3d70d 100644
--- a/Cython/Utility/CommonStructures.c
+++ b/Cython/Utility/CommonStructures.c
@@ -121,7 +121,7 @@ static PyTypeObject *__Pyx_FetchCommonTypeFromSpec(PyObject *module, PyType_Spec
if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;
PyErr_Clear();
// We pass the ABI module reference to avoid keeping the user module alive by foreign type usages.
- (void) module;
+ CYTHON_UNUSED_VAR(module);
cached_type = __Pyx_PyType_FromModuleAndSpec(abi_module, spec, bases);
if (unlikely(!cached_type)) goto bad;
if (unlikely(__Pyx_fix_up_extension_type_from_spec(spec, (PyTypeObject *) cached_type) < 0)) goto bad;
diff --git a/Cython/Utility/Complex.c b/Cython/Utility/Complex.c
index 28062a061..15d5f544d 100644
--- a/Cython/Utility/Complex.c
+++ b/Cython/Utility/Complex.c
@@ -265,7 +265,7 @@ static {{type}} __Pyx_PyComplex_As_{{type_name}}(PyObject* o) {
if (a.imag == 0) {
if (a.real == 0) {
return a;
- } else if (b.imag == 0) {
+ } else if ((b.imag == 0) && (a.real >= 0)) {
z.real = pow{{m}}(a.real, b.real);
z.imag = 0;
return z;
diff --git a/Cython/Utility/Coroutine.c b/Cython/Utility/Coroutine.c
index 15ed61cc4..1a4a78ff5 100644
--- a/Cython/Utility/Coroutine.c
+++ b/Cython/Utility/Coroutine.c
@@ -256,7 +256,7 @@ static PyObject *__Pyx_Coroutine_GetAsyncIter_Generic(PyObject *obj) {
}
#else
// avoid C warning about 'unused function'
- if ((0)) (void) __Pyx_PyObject_CallMethod0(obj, PYIDENT("__aiter__"));
+ (void)&__Pyx_PyObject_CallMethod0;
#endif
obj_type_name = __Pyx_PyType_GetName(Py_TYPE(obj));
@@ -800,6 +800,7 @@ static CYTHON_INLINE void __Pyx_Coroutine_ResetFrameBackpointer(__Pyx_ExcInfoStr
// cycle.
#if CYTHON_COMPILING_IN_PYPY
// FIXME: what to do in PyPy?
+ CYTHON_UNUSED_VAR(exc_state);
#else
PyObject *exc_tb;
@@ -1860,11 +1861,11 @@ static PyTypeObject __pyx_CoroutineType_type = {
#endif /* CYTHON_USE_TYPE_SPECS */
static int __pyx_Coroutine_init(PyObject *module) {
+ CYTHON_MAYBE_UNUSED_VAR(module);
// on Windows, C-API functions can't be used in slots statically
#if CYTHON_USE_TYPE_SPECS
__pyx_CoroutineType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CoroutineType_spec, NULL);
#else
- (void) module;
__pyx_CoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_CoroutineType = __Pyx_FetchCommonType(&__pyx_CoroutineType_type);
#endif
@@ -2014,7 +2015,7 @@ static int __pyx_IterableCoroutine_init(PyObject *module) {
#if CYTHON_USE_TYPE_SPECS
__pyx_IterableCoroutineType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_IterableCoroutineType_spec, NULL);
#else
- (void) module;
+ CYTHON_UNUSED_VAR(module);
__pyx_IterableCoroutineType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_IterableCoroutineType = __Pyx_FetchCommonType(&__pyx_IterableCoroutineType_type);
#endif
@@ -2159,7 +2160,7 @@ static int __pyx_Generator_init(PyObject *module) {
#if CYTHON_USE_TYPE_SPECS
__pyx_GeneratorType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_GeneratorType_spec, NULL);
#else
- (void) module;
+ CYTHON_UNUSED_VAR(module);
// on Windows, C-API functions can't be used in slots statically
__pyx_GeneratorType_type.tp_getattro = __Pyx_PyObject_GenericGetAttrNoDict;
__pyx_GeneratorType_type.tp_iter = PyObject_SelfIter;
@@ -2565,8 +2566,8 @@ static PyTypeObject __Pyx__PyExc_StopAsyncIteration_type = {
#endif
static int __pyx_StopAsyncIteration_init(PyObject *module) {
+ CYTHON_UNUSED_VAR(module);
#if PY_VERSION_HEX >= 0x030500B1
- (void) module;
__Pyx_PyExc_StopAsyncIteration = PyExc_StopAsyncIteration;
#else
PyObject *builtins = PyEval_GetBuiltins();
@@ -2584,7 +2585,6 @@ static int __pyx_StopAsyncIteration_init(PyObject *module) {
__Pyx__PyExc_StopAsyncIteration_type.tp_dictoffset = ((PyTypeObject*)PyExc_BaseException)->tp_dictoffset;
__Pyx__PyExc_StopAsyncIteration_type.tp_base = (PyTypeObject*)PyExc_Exception;
- (void) module;
__Pyx_PyExc_StopAsyncIteration = (PyObject*) __Pyx_FetchCommonType(&__Pyx__PyExc_StopAsyncIteration_type);
if (unlikely(!__Pyx_PyExc_StopAsyncIteration))
return -1;
diff --git a/Cython/Utility/CppSupport.cpp b/Cython/Utility/CppSupport.cpp
index ca5579918..ba0002c94 100644
--- a/Cython/Utility/CppSupport.cpp
+++ b/Cython/Utility/CppSupport.cpp
@@ -84,15 +84,50 @@ auto __Pyx_pythran_to_python(T &&value) -> decltype(to_python(
////////////// OptionalLocals.proto ////////////////
//@proto_block: utility_code_proto_before_types
+#include <utility>
#if defined(CYTHON_USE_BOOST_OPTIONAL)
// fallback mode - std::optional is preferred but this gives
// people with a less up-to-date compiler a chance
#include <boost/optional.hpp>
- #define __Pyx_Optional_Type boost::optional
+ #define __Pyx_Optional_BaseType boost::optional
#else
#include <optional>
// since std::optional is a C++17 features, a templated using declaration should be safe
// (although it could be replaced with a define)
template <typename T>
- using __Pyx_Optional_Type = std::optional<T>;
+ using __Pyx_Optional_BaseType = std::optional<T>;
#endif
+
+// This class reuses as much of the implementation of std::optional as possible.
+// The only place it differs significantly is the assignment operators, which use
+// "emplace" (thus requiring move/copy constructors, but not move/copy
+// assignment operators). This is preferred because it lets us work with assignable
+// types (for example those with const members)
+template <typename T>
+class __Pyx_Optional_Type : private __Pyx_Optional_BaseType<T> {
+public:
+ using __Pyx_Optional_BaseType<T>::__Pyx_Optional_BaseType;
+ using __Pyx_Optional_BaseType<T>::has_value;
+ using __Pyx_Optional_BaseType<T>::operator*;
+ using __Pyx_Optional_BaseType<T>::operator->;
+#if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1600)
+ __Pyx_Optional_Type& operator=(const __Pyx_Optional_Type& rhs) {
+ this->emplace(*rhs);
+ return *this;
+ }
+ __Pyx_Optional_Type& operator=(__Pyx_Optional_Type&& rhs) {
+ this->emplace(std::move(*rhs));
+ return *this;
+ }
+ template <typename U=T>
+ __Pyx_Optional_Type& operator=(U&& rhs) {
+ this->emplace(std::forward<U>(rhs));
+ return *this;
+ }
+#else
+ // Note - the "cpp_locals" feature is designed to require C++14.
+ // This pre-c++11 fallback is largely untested, and definitely won't work
+ // in all the cases that the more modern version does
+ using __Pyx_Optional_BaseType<T>::operator=; // the chances are emplace can't work...
+#endif
+};
diff --git a/Cython/Utility/CythonFunction.c b/Cython/Utility/CythonFunction.c
index 9a7bf7405..226019cee 100644
--- a/Cython/Utility/CythonFunction.c
+++ b/Cython/Utility/CythonFunction.c
@@ -780,9 +780,17 @@ static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, P
self = PyTuple_GetItem(args, 0);
if (unlikely(!self)) {
Py_DECREF(new_args);
+#if PY_MAJOR_VERSION > 2
PyErr_Format(PyExc_TypeError,
"unbound method %.200S() needs an argument",
cyfunc->func_qualname);
+#else
+ // %S doesn't work in PyErr_Format on Py2 and replicating
+ // the formatting seems more trouble than it's worth
+ // (so produce a less useful error message).
+ PyErr_SetString(PyExc_TypeError,
+ "unbound method needs an argument");
+#endif
return NULL;
}
@@ -934,7 +942,7 @@ static PyObject * __Pyx_CyFunction_Vectorcall_FASTCALL_KEYWORDS_METHOD(PyObject
return NULL;
}
- return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, nargs, kwnames);
+ return ((__Pyx_PyCMethod)(void(*)(void))def->ml_meth)(self, cls, args, (size_t)nargs, kwnames);
}
#endif
@@ -1055,7 +1063,7 @@ static int __pyx_CyFunction_init(PyObject *module) {
#if CYTHON_USE_TYPE_SPECS
__pyx_CyFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_CyFunctionType_spec, NULL);
#else
- (void) module;
+ CYTHON_UNUSED_VAR(module);
__pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type);
#endif
if (unlikely(__pyx_CyFunctionType == NULL)) {
@@ -1463,30 +1471,17 @@ bad:
return result;
}
-static PyObject *
-__Pyx_FusedFunction_get_self(__pyx_FusedFunctionObject *m, void *closure)
-{
- PyObject *self = m->self;
- CYTHON_UNUSED_VAR(closure);
- if (unlikely(!self)) {
- PyErr_SetString(PyExc_AttributeError, "'function' object has no attribute '__self__'");
- } else {
- Py_INCREF(self);
- }
- return self;
-}
-
static PyMemberDef __pyx_FusedFunction_members[] = {
{(char *) "__signatures__",
T_OBJECT,
offsetof(__pyx_FusedFunctionObject, __signatures__),
READONLY,
0},
+ {(char *) "__self__", T_OBJECT_EX, offsetof(__pyx_FusedFunctionObject, self), READONLY, 0},
{0, 0, 0, 0, 0},
};
static PyGetSetDef __pyx_FusedFunction_getsets[] = {
- {(char *) "__self__", (getter)__Pyx_FusedFunction_get_self, 0, 0, 0},
// __doc__ is None for the fused function type, but we need it to be
// a descriptor for the instance's __doc__, so rebuild the descriptor in our subclass
// (all other descriptors are inherited)
@@ -1600,7 +1595,7 @@ static int __pyx_FusedFunction_init(PyObject *module) {
__pyx_FusedFunctionType = __Pyx_FetchCommonTypeFromSpec(module, &__pyx_FusedFunctionType_spec, bases);
Py_DECREF(bases);
#else
- (void) module;
+ CYTHON_UNUSED_VAR(module);
// Set base from __Pyx_FetchCommonTypeFromSpec, in case it's different from the local static value.
__pyx_FusedFunctionType_type.tp_base = __pyx_CyFunctionType;
__pyx_FusedFunctionType = __Pyx_FetchCommonType(&__pyx_FusedFunctionType_type);
@@ -1614,7 +1609,7 @@ static int __pyx_FusedFunction_init(PyObject *module) {
//////////////////// ClassMethod.proto ////////////////////
#include "descrobject.h"
-static CYTHON_UNUSED PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/
+CYTHON_UNUSED static PyObject* __Pyx_Method_ClassMethod(PyObject *method); /*proto*/
//////////////////// ClassMethod ////////////////////
diff --git a/Cython/Utility/Exceptions.c b/Cython/Utility/Exceptions.c
index c6c5d20ed..3c9784f88 100644
--- a/Cython/Utility/Exceptions.c
+++ b/Cython/Utility/Exceptions.c
@@ -675,10 +675,8 @@ static void __Pyx_WriteUnraisable(const char *name, int clineno,
PyGILState_STATE state;
if (nogil)
state = PyGILState_Ensure();
-#ifdef _MSC_VER
/* arbitrary, to suppress warning */
- else state = (PyGILState_STATE)-1;
-#endif
+ else state = (PyGILState_STATE)0;
#endif
CYTHON_UNUSED_VAR(clineno);
CYTHON_UNUSED_VAR(lineno);
@@ -727,13 +725,15 @@ static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);/*proto*/
//@substitute: naming
#ifndef CYTHON_CLINE_IN_TRACEBACK
-static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
PyObject **cython_runtime_dict;
#endif
+ CYTHON_MAYBE_UNUSED_VAR(tstate);
+
if (unlikely(!${cython_runtime_cname})) {
// Very early error where the runtime module is not set up yet.
return c_line;
diff --git a/Cython/Utility/ExtensionTypes.c b/Cython/Utility/ExtensionTypes.c
index ec994a367..700bf1468 100644
--- a/Cython/Utility/ExtensionTypes.c
+++ b/Cython/Utility/ExtensionTypes.c
@@ -11,8 +11,8 @@ static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject
#if CYTHON_USE_TYPE_SPECS
static int __Pyx_fix_up_extension_type_from_spec(PyType_Spec *spec, PyTypeObject *type) {
#if PY_VERSION_HEX > 0x030900B1 || CYTHON_COMPILING_IN_LIMITED_API
- (void) spec;
- (void) type;
+ CYTHON_UNUSED_VAR(spec);
+ CYTHON_UNUSED_VAR(type);
#else
// Set tp_weakreflist, tp_dictoffset, tp_vectorcalloffset
// Copied and adapted from https://bugs.python.org/issue38140
@@ -156,7 +156,7 @@ static int __Pyx_validate_bases_tuple(const char *type_name, Py_ssize_t dictoffs
/////////////// PyType_Ready.proto ///////////////
// unused when using type specs
-static CYTHON_UNUSED int __Pyx_PyType_Ready(PyTypeObject *t);/*proto*/
+CYTHON_UNUSED static int __Pyx_PyType_Ready(PyTypeObject *t);/*proto*/
/////////////// PyType_Ready ///////////////
//@requires: ObjectHandling.c::PyObjectCallMethod0
@@ -564,3 +564,37 @@ static PyObject *{{func_name}}(PyObject *left, PyObject *right {{extra_arg_decl}
}
return __Pyx_NewRef(Py_NotImplemented);
}
+
+/////////////// ValidateExternBase.proto ///////////////
+
+static int __Pyx_validate_extern_base(PyTypeObject *base); /* proto */
+
+/////////////// ValidateExternBase ///////////////
+//@requires: ObjectHandling.c::FormatTypeName
+
+static int __Pyx_validate_extern_base(PyTypeObject *base) {
+ Py_ssize_t itemsize;
+#if CYTHON_COMPILING_IN_LIMITED_API
+ PyObject *py_itemsize;
+#endif
+#if !CYTHON_COMPILING_IN_LIMITED_API
+ itemsize = ((PyTypeObject *)base)->tp_itemsize;
+#else
+ py_itemsize = PyObject_GetAttrString(base, "__itemsize__");
+ if (!py_itemsize)
+ return -1;
+ itemsize = PyLong_AsSsize_t(py_itemsize);
+ Py_DECREF(py_itemsize);
+ py_itemsize = 0;
+ if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred())
+ return -1;
+#endif
+ if (itemsize) {
+ __Pyx_TypeName b_name = __Pyx_PyType_GetName(base);
+ PyErr_Format(PyExc_TypeError,
+ "inheritance from PyVarObject types like '" __Pyx_FMT_TYPENAME "' not currently supported", b_name);
+ __Pyx_DECREF_TypeName(b_name);
+ return -1;
+ }
+ return 0;
+}
diff --git a/Cython/Utility/FunctionArguments.c b/Cython/Utility/FunctionArguments.c
index 1882f826f..8bdaee562 100644
--- a/Cython/Utility/FunctionArguments.c
+++ b/Cython/Utility/FunctionArguments.c
@@ -422,7 +422,7 @@ bad:
#if CYTHON_METH_FASTCALL
#define __Pyx_Arg_FASTCALL(args, i) args[i]
#define __Pyx_NumKwargs_FASTCALL(kwds) PyTuple_GET_SIZE(kwds)
- #define __Pyx_KwValues_FASTCALL(args, nargs) (&args[nargs])
+ #define __Pyx_KwValues_FASTCALL(args, nargs) ((args) + (nargs))
static CYTHON_INLINE PyObject * __Pyx_GetKwValue_FASTCALL(PyObject *kwnames, PyObject *const *kwvalues, PyObject *s);
#define __Pyx_KwargsAsDict_FASTCALL(kw, kwvalues) _PyStack_AsDict(kwvalues, kw)
#else
diff --git a/Cython/Utility/ImportExport.c b/Cython/Utility/ImportExport.c
index 6ceba7efb..66e75ea00 100644
--- a/Cython/Utility/ImportExport.c
+++ b/Cython/Utility/ImportExport.c
@@ -478,13 +478,24 @@ set_path:
#ifndef __PYX_HAVE_RT_ImportType_proto
#define __PYX_HAVE_RT_ImportType_proto
+#if __STDC_VERSION__ >= 201112L
+#include <stdalign.h>
+#endif
+
+#if __STDC_VERSION__ >= 201112L || __cplusplus >= 201103L
+#define __PYX_GET_STRUCT_ALIGNMENT(s) alignof(s)
+#else
+// best guess at what the alignment could be since we can't measure it
+#define __PYX_GET_STRUCT_ALIGNMENT(s) sizeof(void*)
+#endif
+
enum __Pyx_ImportType_CheckSize {
__Pyx_ImportType_CheckSize_Error = 0,
__Pyx_ImportType_CheckSize_Warn = 1,
__Pyx_ImportType_CheckSize_Ignore = 2
};
-static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size); /*proto*/
+static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize check_size); /*proto*/
#endif
@@ -493,13 +504,15 @@ static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name,
#ifndef __PYX_HAVE_RT_ImportType
#define __PYX_HAVE_RT_ImportType
static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name,
- size_t size, enum __Pyx_ImportType_CheckSize check_size)
+ size_t size, size_t alignment, enum __Pyx_ImportType_CheckSize check_size)
{
PyObject *result = 0;
char warning[200];
Py_ssize_t basicsize;
+ Py_ssize_t itemsize;
#if CYTHON_COMPILING_IN_LIMITED_API
PyObject *py_basicsize;
+ PyObject *py_itemsize;
#endif
result = PyObject_GetAttrString(module, class_name);
@@ -513,6 +526,7 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name,
}
#if !CYTHON_COMPILING_IN_LIMITED_API
basicsize = ((PyTypeObject *)result)->tp_basicsize;
+ itemsize = ((PyTypeObject *)result)->tp_itemsize;
#else
py_basicsize = PyObject_GetAttrString(result, "__basicsize__");
if (!py_basicsize)
@@ -522,19 +536,42 @@ static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name,
py_basicsize = 0;
if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred())
goto bad;
+ py_itemsize = PyObject_GetAttrString(result, "__itemsize__");
+ if (!py_itemsize)
+ goto bad;
+ itemsize = PyLong_AsSsize_t(py_itemsize);
+ Py_DECREF(py_itemsize);
+ py_itemsize = 0;
+ if (itemsize == (Py_ssize_t)-1 && PyErr_Occurred())
+ goto bad;
#endif
- if ((size_t)basicsize < size) {
+ if (itemsize) {
+ // If itemsize is smaller than the alignment the struct can end up with some extra
+ // padding at the end. In this case we need to work out the maximum size that
+ // the padding could be when calculating the range of valid struct sizes.
+ if (size % alignment) {
+ // if this is true we've probably calculated the alignment wrongly
+ // (most likely because alignof isn't available)
+ alignment = size % alignment;
+ }
+ if (itemsize < (Py_ssize_t)alignment)
+ itemsize = (Py_ssize_t)alignment;
+ }
+ if ((size_t)(basicsize + itemsize) < size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
- module_name, class_name, size, basicsize);
+ module_name, class_name, size, basicsize+itemsize);
goto bad;
}
- if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) {
+ // varobjects almost have structs between basicsize and basicsize + itemsize
+ // but the struct isn't always one of the two limiting values
+ if (check_size == __Pyx_ImportType_CheckSize_Error &&
+ ((size_t)basicsize > size || (size_t)(basicsize + itemsize) < size)) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
- "Expected %zd from C header, got %zd from PyObject",
- module_name, class_name, size, basicsize);
+ "Expected %zd from C header, got %zd-%zd from PyObject",
+ module_name, class_name, size, basicsize, basicsize+itemsize);
goto bad;
}
else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) {
diff --git a/Cython/Utility/MemoryView.pyx b/Cython/Utility/MemoryView.pyx
index 990319e05..d36e7f60c 100644
--- a/Cython/Utility/MemoryView.pyx
+++ b/Cython/Utility/MemoryView.pyx
@@ -26,6 +26,7 @@ cdef extern from "<string.h>":
void *memset(void *b, int c, size_t len)
cdef extern from *:
+ bint __PYX_CYTHON_ATOMICS_ENABLED()
int __Pyx_GetBuffer(object, Py_buffer *, int) except -1
void __Pyx_ReleaseBuffer(Py_buffer *)
@@ -80,7 +81,7 @@ cdef extern from *:
__Pyx_memviewslice *from_mvs,
char *mode, int ndim,
size_t sizeof_dtype, int contig_flag,
- bint dtype_is_object) nogil except *
+ bint dtype_is_object) except * nogil
bint slice_is_contig "__pyx_memviewslice_is_contig" (
{{memviewslice_name}} mvs, char order, int ndim) nogil
bint slices_overlap "__pyx_slices_overlap" ({{memviewslice_name}} *slice1,
@@ -93,6 +94,17 @@ cdef extern from "<stdlib.h>":
void free(void *) nogil
void *memcpy(void *dest, void *src, size_t n) nogil
+# the sequence abstract base class
+cdef object __pyx_collections_abc_Sequence "__pyx_collections_abc_Sequence"
+try:
+ if __import__("sys").version_info >= (3, 3):
+ __pyx_collections_abc_Sequence = __import__("collections.abc").abc.Sequence
+ else:
+ __pyx_collections_abc_Sequence = __import__("collections").Sequence
+except:
+ # it isn't a big problem if this fails
+ __pyx_collections_abc_Sequence = None
+
#
### cython.array class
#
@@ -110,7 +122,7 @@ cdef class array:
Py_ssize_t itemsize
unicode mode # FIXME: this should have been a simple 'char'
bytes _format
- void (*callback_free_data)(void *data)
+ void (*callback_free_data)(void *data) noexcept
# cdef object _memview
cdef bint free_data
cdef bint dtype_is_object
@@ -224,6 +236,12 @@ cdef class array:
def __setitem__(self, item, value):
self.memview[item] = value
+ # Sequence methods
+ try:
+ count = __pyx_collections_abc_Sequence.count
+ index = __pyx_collections_abc_Sequence.index
+ except:
+ pass
@cname("__pyx_array_allocate_buffer")
cdef int _allocate_buffer(array self) except -1:
@@ -349,14 +367,15 @@ cdef class memoryview:
(<__pyx_buffer *> &self.view).obj = Py_None
Py_INCREF(Py_None)
- global __pyx_memoryview_thread_locks_used
- if __pyx_memoryview_thread_locks_used < {{THREAD_LOCKS_PREALLOCATED}}:
- self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
- __pyx_memoryview_thread_locks_used += 1
- if self.lock is NULL:
- self.lock = PyThread_allocate_lock()
+ if not __PYX_CYTHON_ATOMICS_ENABLED():
+ global __pyx_memoryview_thread_locks_used
+ if __pyx_memoryview_thread_locks_used < {{THREAD_LOCKS_PREALLOCATED}}:
+ self.lock = __pyx_memoryview_thread_locks[__pyx_memoryview_thread_locks_used]
+ __pyx_memoryview_thread_locks_used += 1
if self.lock is NULL:
- raise MemoryError
+ self.lock = PyThread_allocate_lock()
+ if self.lock is NULL:
+ raise MemoryError
if flags & PyBUF_FORMAT:
self.dtype_is_object = (self.view.format[0] == b'O' and self.view.format[1] == b'\0')
@@ -789,7 +808,7 @@ cdef int slice_memviewslice(
int dim, int new_ndim, int *suboffset_dim,
Py_ssize_t start, Py_ssize_t stop, Py_ssize_t step,
int have_start, int have_stop, int have_step,
- bint is_slice) nogil except -1:
+ bint is_slice) except -1 nogil:
"""
Create a new slice dst given slice src.
@@ -919,7 +938,7 @@ cdef char *pybuffer_index(Py_buffer *view, char *bufp, Py_ssize_t index,
### Transposing a memoryviewslice
#
@cname('__pyx_memslice_transpose')
-cdef int transpose_memslice({{memviewslice_name}} *memslice) nogil except -1:
+cdef int transpose_memslice({{memviewslice_name}} *memslice) except -1 nogil:
cdef int ndim = memslice.memview.view.ndim
cdef Py_ssize_t *shape = memslice.shape
@@ -970,6 +989,22 @@ cdef class _memoryviewslice(memoryview):
cdef _get_base(self):
return self.from_object
+ # Sequence methods
+ try:
+ count = __pyx_collections_abc_Sequence.count
+ index = __pyx_collections_abc_Sequence.index
+ except:
+ pass
+
+try:
+ if __pyx_collections_abc_Sequence:
+ # The main value of registering _memoryviewslice as a
+ # Sequence is that it can be used in structural pattern
+ # matching in Python 3.10+
+ __pyx_collections_abc_Sequence.register(_memoryviewslice)
+ __pyx_collections_abc_Sequence.register(array)
+except:
+ pass # ignore failure, it's a minor issue
@cname('__pyx_memoryview_fromslice')
cdef memoryview_fromslice({{memviewslice_name}} memviewslice,
@@ -1147,7 +1182,7 @@ cdef void copy_strided_to_strided({{memviewslice_name}} *src,
src.shape, dst.shape, ndim, itemsize)
@cname('__pyx_memoryview_slice_get_size')
-cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) nogil:
+cdef Py_ssize_t slice_get_size({{memviewslice_name}} *src, int ndim) noexcept nogil:
"Return the size of the memory occupied by the slice in number of bytes"
cdef Py_ssize_t shape, size = src.memview.view.itemsize
@@ -1181,7 +1216,7 @@ cdef Py_ssize_t fill_contig_strides_array(
cdef void *copy_data_to_temp({{memviewslice_name}} *src,
{{memviewslice_name}} *tmpslice,
char order,
- int ndim) nogil except NULL:
+ int ndim) except NULL nogil:
"""
Copy a direct slice to temporary contiguous memory. The caller should free
the result when done.
@@ -1241,7 +1276,7 @@ cdef int _err_no_memory() except -1 with gil:
cdef int memoryview_copy_contents({{memviewslice_name}} src,
{{memviewslice_name}} dst,
int src_ndim, int dst_ndim,
- bint dtype_is_object) nogil except -1:
+ bint dtype_is_object) except -1 nogil:
"""
Copy memory from slice src to slice dst.
Check for overlapping memory and verify the shapes.
@@ -1345,7 +1380,7 @@ cdef void refcount_objects_in_slice_with_gil(char *data, Py_ssize_t *shape,
@cname('__pyx_memoryview_refcount_objects_in_slice')
cdef void refcount_objects_in_slice(char *data, Py_ssize_t *shape,
- Py_ssize_t *strides, int ndim, bint inc):
+ Py_ssize_t *strides, int ndim, bint inc) noexcept:
cdef Py_ssize_t i
cdef Py_ssize_t stride = strides[0]
diff --git a/Cython/Utility/MemoryView_C.c b/Cython/Utility/MemoryView_C.c
index 07ed24d20..774ec1767 100644
--- a/Cython/Utility/MemoryView_C.c
+++ b/Cython/Utility/MemoryView_C.c
@@ -24,38 +24,84 @@ typedef struct {
#ifndef CYTHON_ATOMICS
#define CYTHON_ATOMICS 1
#endif
+// using CYTHON_ATOMICS as a cdef extern bint in the Cython memoryview code
+// interacts badly with "import *". Therefore, define a helper function-like macro
+#define __PYX_CYTHON_ATOMICS_ENABLED() CYTHON_ATOMICS
#define __pyx_atomic_int_type int
-// todo: Portland pgcc, maybe OS X's OSAtomicIncrement32,
-// libatomic + autotools-like distutils support? Such a pain...
-#if CYTHON_ATOMICS && __GNUC__ >= 4 && (__GNUC_MINOR__ > 1 || \
- (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL >= 2)) && \
- !defined(__i386__)
+#define __pyx_nonatomic_int_type int
+
+// For standard C/C++ atomics, get the headers first so we have ATOMIC_INT_LOCK_FREE
+// defined when we decide to use them.
+#if CYTHON_ATOMICS && (defined(__STDC_VERSION__) && \
+ (__STDC_VERSION__ >= 201112L) && \
+ !defined(__STDC_NO_ATOMICS__))
+ #include <stdatomic.h>
+#elif CYTHON_ATOMICS && (defined(__cplusplus) && ( \
+ (__cplusplus >= 201103L) || \
+ (defined(_MSC_VER) && _MSC_VER >= 1700)))
+ #include <atomic>
+#endif
+
+#if CYTHON_ATOMICS && (defined(__STDC_VERSION__) && \
+ (__STDC_VERSION__ >= 201112L) && \
+ !defined(__STDC_NO_ATOMICS__) && \
+ ATOMIC_INT_LOCK_FREE == 2)
+ // C11 atomics are available.
+ // Require ATOMIC_INT_LOCK_FREE because I'm nervous about the __pyx_atomic_int[2]
+ // alignment trick in MemoryView.pyx if it uses mutexes.
+ #undef __pyx_atomic_int_type
+ #define __pyx_atomic_int_type atomic_int
+ // TODO - it might be possible to use a less strict memory ordering here
+ #define __pyx_atomic_incr_aligned(value) atomic_fetch_add(value, 1)
+ #define __pyx_atomic_decr_aligned(value) atomic_fetch_sub(value, 1)
+ #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER)
+ #pragma message ("Using standard C atomics")
+ #elif defined(__PYX_DEBUG_ATOMICS)
+ #warning "Using standard C atomics"
+ #endif
+#elif CYTHON_ATOMICS && (defined(__cplusplus) && ( \
+ (__cplusplus >= 201103L) || \
+ /*_MSC_VER 1700 is Visual Studio 2012 */ \
+ (defined(_MSC_VER) && _MSC_VER >= 1700)) && \
+ ATOMIC_INT_LOCK_FREE == 2)
+ // C++11 atomics are available.
+ // Require ATOMIC_INT_LOCK_FREE because I'm nervous about the __pyx_atomic_int[2]
+ // alignment trick in MemoryView.pyx if it uses mutexes.
+ #undef __pyx_atomic_int_type
+ #define __pyx_atomic_int_type std::atomic_int
+ // TODO - it might be possible to use a less strict memory ordering here
+ #define __pyx_atomic_incr_aligned(value) std::atomic_fetch_add(value, 1)
+ #define __pyx_atomic_decr_aligned(value) std::atomic_fetch_sub(value, 1)
+
+ #if defined(__PYX_DEBUG_ATOMICS) && defined(_MSC_VER)
+ #pragma message ("Using standard C++ atomics")
+ #elif defined(__PYX_DEBUG_ATOMICS)
+ #warning "Using standard C++ atomics"
+ #endif
+#elif CYTHON_ATOMICS && (__GNUC__ >= 5 || (__GNUC__ == 4 && \
+ (__GNUC_MINOR__ > 1 || \
+ (__GNUC_MINOR__ == 1 && __GNUC_PATCHLEVEL__ >= 2))))
/* gcc >= 4.1.2 */
- #define __pyx_atomic_incr_aligned(value, lock) __sync_fetch_and_add(value, 1)
- #define __pyx_atomic_decr_aligned(value, lock) __sync_fetch_and_sub(value, 1)
+ #define __pyx_atomic_incr_aligned(value) __sync_fetch_and_add(value, 1)
+ #define __pyx_atomic_decr_aligned(value) __sync_fetch_and_sub(value, 1)
#ifdef __PYX_DEBUG_ATOMICS
#warning "Using GNU atomics"
#endif
-#elif CYTHON_ATOMICS && defined(_MSC_VER) && 0
+#elif CYTHON_ATOMICS && defined(_MSC_VER)
/* msvc */
- #include <Windows.h>
+ #include <intrin.h>
#undef __pyx_atomic_int_type
- #define __pyx_atomic_int_type LONG
- #define __pyx_atomic_incr_aligned(value, lock) InterlockedIncrement(value)
- #define __pyx_atomic_decr_aligned(value, lock) InterlockedDecrement(value)
+ #define __pyx_atomic_int_type long
+ #define __pyx_nonatomic_int_type long
+ #pragma intrinsic (_InterlockedExchangeAdd)
+ #define __pyx_atomic_incr_aligned(value) _InterlockedExchangeAdd(value, 1)
+ #define __pyx_atomic_decr_aligned(value) _InterlockedExchangeAdd(value, -1)
#ifdef __PYX_DEBUG_ATOMICS
#pragma message ("Using MSVC atomics")
#endif
-#elif CYTHON_ATOMICS && (defined(__ICC) || defined(__INTEL_COMPILER)) && 0
- #define __pyx_atomic_incr_aligned(value, lock) _InterlockedIncrement(value)
- #define __pyx_atomic_decr_aligned(value, lock) _InterlockedDecrement(value)
-
- #ifdef __PYX_DEBUG_ATOMICS
- #warning "Using Intel atomics"
- #endif
#else
#undef CYTHON_ATOMICS
#define CYTHON_ATOMICS 0
@@ -69,9 +115,9 @@ typedef volatile __pyx_atomic_int_type __pyx_atomic_int;
#if CYTHON_ATOMICS
#define __pyx_add_acquisition_count(memview) \
- __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock)
+ __pyx_atomic_incr_aligned(__pyx_get_slice_count_pointer(memview))
#define __pyx_sub_acquisition_count(memview) \
- __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview), memview->lock)
+ __pyx_atomic_decr_aligned(__pyx_get_slice_count_pointer(memview))
#else
#define __pyx_add_acquisition_count(memview) \
__pyx_add_acquisition_count_locked(__pyx_get_slice_count_pointer(memview), memview->lock)
@@ -451,7 +497,7 @@ static void __pyx_fatalerror(const char *fmt, ...) Py_NO_RETURN {
va_list vargs;
char msg[200];
-#ifdef HAVE_STDARG_PROTOTYPES
+#if PY_VERSION_HEX >= 0x030A0000 || defined(HAVE_STDARG_PROTOTYPES)
va_start(vargs, fmt);
#else
va_start(vargs);
@@ -488,7 +534,7 @@ __pyx_sub_acquisition_count_locked(__pyx_atomic_int *acquisition_count,
static CYTHON_INLINE void
__Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno)
{
- __pyx_atomic_int_type old_acquisition_count;
+ __pyx_nonatomic_int_type old_acquisition_count;
struct {{memview_struct_name}} *memview = memslice->memview;
if (unlikely(!memview || (PyObject *) memview == Py_None)) {
// Allow uninitialized memoryview assignment and do not ref-count None.
@@ -515,7 +561,7 @@ __Pyx_INC_MEMVIEW({{memviewslice_name}} *memslice, int have_gil, int lineno)
static CYTHON_INLINE void __Pyx_XCLEAR_MEMVIEW({{memviewslice_name}} *memslice,
int have_gil, int lineno) {
- __pyx_atomic_int_type old_acquisition_count;
+ __pyx_nonatomic_int_type old_acquisition_count;
struct {{memview_struct_name}} *memview = memslice->memview;
if (unlikely(!memview || (PyObject *) memview == Py_None)) {
diff --git a/Cython/Utility/ModuleSetupCode.c b/Cython/Utility/ModuleSetupCode.c
index 533689788..df2a4ee4a 100644
--- a/Cython/Utility/ModuleSetupCode.c
+++ b/Cython/Utility/ModuleSetupCode.c
@@ -61,6 +61,7 @@
#define CYTHON_COMPILING_IN_CPYTHON 0
#define CYTHON_COMPILING_IN_LIMITED_API 0
#define CYTHON_COMPILING_IN_GRAAL 1
+ #define CYTHON_COMPILING_IN_NOGIL 0
#undef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 0
@@ -112,11 +113,13 @@
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
#define CYTHON_UPDATE_DESCRIPTOR_DOC 0
#endif
+
#elif defined(PYPY_VERSION)
#define CYTHON_COMPILING_IN_PYPY 1
#define CYTHON_COMPILING_IN_CPYTHON 0
#define CYTHON_COMPILING_IN_LIMITED_API 0
#define CYTHON_COMPILING_IN_GRAAL 0
+ #define CYTHON_COMPILING_IN_NOGIL 0
#undef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 0
@@ -166,14 +169,16 @@
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
- #define CYTHON_UPDATE_DESCRIPTOR_DOC (PYPY_VERSION_HEX >= 0x07030900)
+ #define CYTHON_UPDATE_DESCRIPTOR_DOC 0
#endif
+
#elif defined(CYTHON_LIMITED_API)
// EXPERIMENTAL !!
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_CPYTHON 0
#define CYTHON_COMPILING_IN_LIMITED_API 1
#define CYTHON_COMPILING_IN_GRAAL 0
+ #define CYTHON_COMPILING_IN_NOGIL 0
// CYTHON_CLINE_IN_TRACEBACK is currently disabled for the Limited API
#undef CYTHON_CLINE_IN_TRACEBACK
@@ -228,11 +233,61 @@
#ifndef CYTHON_UPDATE_DESCRIPTOR_DOC
#define CYTHON_UPDATE_DESCRIPTOR_DOC 0
#endif
+
+#elif defined(PY_NOGIL)
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #define CYTHON_COMPILING_IN_LIMITED_API 0
+ #define CYTHON_COMPILING_IN_GRAAL 0
+ #define CYTHON_COMPILING_IN_NOGIL 1
+
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #ifndef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 1
+ #endif
+ #ifndef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 1
+ #endif
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+
#else
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_CPYTHON 1
#define CYTHON_COMPILING_IN_LIMITED_API 0
#define CYTHON_COMPILING_IN_GRAAL 0
+ #define CYTHON_COMPILING_IN_NOGIL 0
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
@@ -374,6 +429,17 @@
// unused attribute
#ifndef CYTHON_UNUSED
+ #if defined(__cplusplus)
+ /* for clang __has_cpp_attribute(maybe_unused) is true even before C++17
+ * but leads to warnings with -pedantic, since it is a C++17 feature */
+ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L)
+ #if __has_cpp_attribute(maybe_unused)
+ #define CYTHON_UNUSED [[maybe_unused]]
+ #endif
+ #endif
+ #endif
+#endif
+#ifndef CYTHON_UNUSED
# if defined(__GNUC__)
# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
# define CYTHON_UNUSED __attribute__ ((__unused__))
@@ -441,13 +507,21 @@
#ifndef CYTHON_FALLTHROUGH
- #if defined(__cplusplus) && __cplusplus >= 201103L
- #if __has_cpp_attribute(fallthrough)
- #define CYTHON_FALLTHROUGH [[fallthrough]]
- #elif __has_cpp_attribute(clang::fallthrough)
- #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
- #elif __has_cpp_attribute(gnu::fallthrough)
- #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #if defined(__cplusplus)
+ /* for clang __has_cpp_attribute(fallthrough) is true even before C++17
+ * but leads to warnings with -pedantic, since it is a C++17 feature */
+ #if ((defined(_MSVC_LANG) && _MSVC_LANG >= 201703L) || __cplusplus >= 201703L)
+ #if __has_cpp_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
+ #endif
+ #endif
+
+ #ifndef CYTHON_FALLTHROUGH
+ #if __has_cpp_attribute(clang::fallthrough)
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
+ #elif __has_cpp_attribute(gnu::fallthrough)
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #endif
#endif
#endif
@@ -459,7 +533,7 @@
#endif
#endif
- #if defined(__clang__ ) && defined(__apple_build_version__)
+ #if defined(__clang__) && defined(__apple_build_version__)
#if __apple_build_version__ < 7000000 /* Xcode < 7.0 */
#undef CYTHON_FALLTHROUGH
#define CYTHON_FALLTHROUGH
@@ -518,8 +592,10 @@ class __Pyx_FakeReference {
T *operator&() { return ptr; }
operator T&() { return *ptr; }
// TODO(robertwb): Delegate all operators (or auto-generate unwrapping code where needed).
- template<typename U> bool operator ==(U other) { return *ptr == other; }
- template<typename U> bool operator !=(U other) { return *ptr != other; }
+ template<typename U> bool operator ==(const U& other) const { return *ptr == other; }
+ template<typename U> bool operator !=(const U& other) const { return *ptr != other; }
+ template<typename U=T> bool operator==(const __Pyx_FakeReference<U>& other) const { return *ptr == *other.ptr; }
+ template<typename U=T> bool operator!=(const __Pyx_FakeReference<U>& other) const { return *ptr != *other.ptr; }
private:
T *ptr;
};
@@ -650,6 +726,13 @@ class __Pyx_FakeReference {
#endif
#define __Pyx_NoneAsNull(obj) (__Pyx_Py_IsNone(obj) ? NULL : (obj))
+#ifndef CO_COROUTINE
+ #define CO_COROUTINE 0x80
+#endif
+#ifndef CO_ASYNC_GENERATOR
+ #define CO_ASYNC_GENERATOR 0x200
+#endif
+
#ifndef Py_TPFLAGS_CHECKTYPES
#define Py_TPFLAGS_CHECKTYPES 0
#endif
diff --git a/Cython/Utility/ObjectHandling.c b/Cython/Utility/ObjectHandling.c
index 56be4ea27..6b212ca79 100644
--- a/Cython/Utility/ObjectHandling.c
+++ b/Cython/Utility/ObjectHandling.c
@@ -287,7 +287,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject *k
#if CYTHON_USE_TYPE_SLOTS
static PyObject *__Pyx_PyObject_GetIndex(PyObject *obj, PyObject *index) {
// Get element from sequence object `obj` at index `index`.
- PyObject *runerr;
+ PyObject *runerr = NULL;
Py_ssize_t key_value;
key_value = __Pyx_PyIndex_AsSsize_t(index);
if (likely(key_value != -1 || !(runerr = PyErr_Occurred()))) {
@@ -1553,18 +1553,18 @@ static int __Pyx_SetNewInClass(PyObject *ns, PyObject *name, PyObject *value) {
//@substitute: naming
#if CYTHON_USE_DICT_VERSIONS
-#define __Pyx_GetModuleGlobalName(var, name) { \
+#define __Pyx_GetModuleGlobalName(var, name) do { \
static PY_UINT64_T __pyx_dict_version = 0; \
static PyObject *__pyx_dict_cached_value = NULL; \
(var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION($moddict_cname))) ? \
(likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) : \
__Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value); \
-}
-#define __Pyx_GetModuleGlobalNameUncached(var, name) { \
+} while(0)
+#define __Pyx_GetModuleGlobalNameUncached(var, name) do { \
PY_UINT64_T __pyx_dict_version; \
PyObject *__pyx_dict_cached_value; \
(var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value); \
-}
+} while(0)
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); /*proto*/
#else
#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name)
@@ -2081,11 +2081,11 @@ static CYTHON_INLINE PyObject* __Pyx_CallUnboundCMethod1(__Pyx_CachedCFunction*
if (flag == METH_O) {
return (*(cfunc->func))(self, arg);
} else if ((PY_VERSION_HEX >= 0x030600B1) && flag == METH_FASTCALL) {
- if ((PY_VERSION_HEX >= 0x030700A0)) {
+ #if PY_VERSION_HEX >= 0x030700A0
return (*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)cfunc->func)(self, &arg, 1);
- } else {
+ #else
return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL);
- }
+ #endif
} else if ((PY_VERSION_HEX >= 0x030700A0) && flag == (METH_FASTCALL | METH_KEYWORDS)) {
return (*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)cfunc->func)(self, &arg, 1, NULL);
}
@@ -2900,6 +2900,15 @@ static PyObject *__Pyx_PyMethod_New(PyObject *func, PyObject *self, PyObject *ty
#define __Pyx_PyMethod_New PyMethod_New
#endif
+///////////// PyMethodNew2Arg.proto /////////////
+
+// Another wrapping of PyMethod_New that matches the Python3 signature
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyMethod_New2Arg PyMethod_New
+#else
+#define __Pyx_PyMethod_New2Arg(func, self) PyMethod_New(func, self, (PyObject*)Py_TYPE(self))
+#endif
+
/////////////// UnicodeConcatInPlace.proto ////////////////
# if CYTHON_COMPILING_IN_CPYTHON && PY_MAJOR_VERSION >= 3
diff --git a/Cython/Utility/Optimize.c b/Cython/Utility/Optimize.c
index 81aeb316e..7a3e3cd3d 100644
--- a/Cython/Utility/Optimize.c
+++ b/Cython/Utility/Optimize.c
@@ -445,7 +445,7 @@ static CYTHON_INLINE PyObject* __Pyx_set_iterator(PyObject* iterable, int is_set
return iterable;
}
#else
- (void)is_set;
+ CYTHON_UNUSED_VAR(is_set);
*p_source_is_set = 0;
#endif
*p_orig_length = 0;
@@ -461,8 +461,8 @@ static CYTHON_INLINE int __Pyx_set_iter_next(
if (unlikely(!*value)) {
return __Pyx_IterFinish();
}
- (void)orig_length;
- (void)ppos;
+ CYTHON_UNUSED_VAR(orig_length);
+ CYTHON_UNUSED_VAR(ppos);
return 1;
}
#if CYTHON_COMPILING_IN_CPYTHON
@@ -904,7 +904,7 @@ static CYTHON_INLINE int __Pyx__PyBytes_AsDouble_IsSpace(char ch) {
return (ch == 0x20) | !((ch < 0x9) | (ch > 0xd));
}
-static CYTHON_UNUSED double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length) {
+CYTHON_UNUSED static double __Pyx__PyBytes_AsDouble(PyObject *obj, const char* start, Py_ssize_t length) {
double value;
Py_ssize_t i, digits;
const char *last = start + length;
@@ -1318,7 +1318,8 @@ static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, long intval,
}
{{else}}
{{if c_op == '*'}}
- (void)a; (void)b;
+ CYTHON_UNUSED_VAR(a);
+ CYTHON_UNUSED_VAR(b);
#ifdef HAVE_LONG_LONG
ll{{ival}} = {{ival}};
goto long_long;
@@ -1464,8 +1465,8 @@ def zerodiv_check(operand, _is_mod=op == 'Remainder', _needs_check=(order == 'CO
static {{c_ret_type}} {{cfunc_name}}(PyObject *op1, PyObject *op2, double floatval, int inplace, int zerodivision_check) {
const double {{'a' if order == 'CObj' else 'b'}} = floatval;
double {{fval}}{{if op not in ('Eq', 'Ne')}}, result{{endif}};
- // Prevent "unused" warnings.
- (void)inplace; (void)zerodivision_check;
+ CYTHON_UNUSED_VAR(inplace);
+ CYTHON_UNUSED_VAR(zerodivision_check);
{{if op in ('Eq', 'Ne')}}
if (op1 == op2) {
diff --git a/Cython/Utility/StringTools.c b/Cython/Utility/StringTools.c
index 8c92228cb..910fbf6fa 100644
--- a/Cython/Utility/StringTools.c
+++ b/Cython/Utility/StringTools.c
@@ -1012,7 +1012,7 @@ static PyObject* __Pyx_PyUnicode_BuildFromAscii(Py_ssize_t ulength, char* chars,
padding = PyUnicode_FromOrdinal(padding_char);
if (likely(padding) && uoffset > prepend_sign + 1) {
PyObject *tmp;
- PyObject *repeat = PyInt_FromSize_t(uoffset - prepend_sign);
+ PyObject *repeat = PyInt_FromSsize_t(uoffset - prepend_sign);
if (unlikely(!repeat)) goto done_or_error;
tmp = PyNumber_Multiply(padding, repeat);
Py_DECREF(repeat);
diff --git a/Cython/Utility/TypeConversion.c b/Cython/Utility/TypeConversion.c
index cf190f401..a4befa79f 100644
--- a/Cython/Utility/TypeConversion.c
+++ b/Cython/Utility/TypeConversion.c
@@ -519,7 +519,7 @@ no_error:
// GCC diagnostic pragmas were introduced in GCC 4.6
// Used to silence conversion warnings that are ok but cannot be avoided.
-#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
+#if !defined(__INTEL_COMPILER) && defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
#define __Pyx_HAS_GCC_DIAGNOSTIC
#endif
diff --git a/Cython/Utils.py b/Cython/Utils.py
index d6a49f973..fa4801731 100644
--- a/Cython/Utils.py
+++ b/Cython/Utils.py
@@ -579,6 +579,11 @@ class OrderedSet(object):
self._list.append(e)
self._set.add(e)
+ def __bool__(self):
+ return bool(self._set)
+
+ __nonzero__ = __bool__
+
# Class decorator that adds a metaclass and recreates the class with it.
# Copied from 'six'.
@@ -626,3 +631,23 @@ def build_hex_version(version_string):
hexversion = (hexversion << 8) + digit
return '0x%08X' % hexversion
+
+
+def write_depfile(target, source, dependencies):
+ src_base_dir = os.path.dirname(source)
+ cwd = os.getcwd()
+ if not src_base_dir.endswith(os.sep):
+ src_base_dir += os.sep
+ # paths below the base_dir are relative, otherwise absolute
+ paths = []
+ for fname in dependencies:
+ if fname.startswith(src_base_dir):
+ paths.append(os.path.relpath(fname, cwd))
+ else:
+ paths.append(os.path.abspath(fname))
+
+ depline = os.path.relpath(target, cwd) + ": \\\n "
+ depline += " \\\n ".join(paths) + "\n"
+
+ with open(target+'.dep', 'w') as outfile:
+ outfile.write(depline)
diff --git a/Makefile b/Makefile
index 280f30a3a..ddc9e3cc0 100644
--- a/Makefile
+++ b/Makefile
@@ -8,13 +8,15 @@ PARALLEL?=$(shell ${PYTHON} -c 'import sys; print("-j5" if sys.version_info >= (
MANYLINUX_CFLAGS=-O3 -g0 -mtune=generic -pipe -fPIC
MANYLINUX_LDFLAGS=
MANYLINUX_IMAGES= \
- manylinux1_x86_64 \
- manylinux1_i686 \
+ manylinux2014_x86_64 \
+ manylinux2014_i686 \
musllinux_1_1_x86_64 \
musllinux_1_1_aarch64 \
manylinux_2_24_x86_64 \
manylinux_2_24_i686 \
manylinux_2_24_aarch64 \
+ manylinux_2_28_x86_64 \
+ manylinux_2_28_aarch64 \
# manylinux_2_24_ppc64le \
# manylinux_2_24_s390x
diff --git a/README.rst b/README.rst
index 0f56f5661..27d7d8150 100644
--- a/README.rst
+++ b/README.rst
@@ -102,7 +102,9 @@ Similar projects that have a relevance today include:
* Pros: highly language compliant, reasonable performance gains,
support for static application linking (similar to
- `cython_freeze <https://github.com/cython/cython/blob/master/bin/cython_freeze>`_)
+ `cython_freeze <https://github.com/cython/cython/blob/master/bin/cython_freeze>`_
+ but with the ability to bundle library dependencies into a self-contained
+ executable)
* Cons: no support for low-level optimisations and typing
In comparison to the above, Cython provides
diff --git a/Tools/ci-run.sh b/Tools/ci-run.sh
index 4309fd4ad..0fde602fd 100644
--- a/Tools/ci-run.sh
+++ b/Tools/ci-run.sh
@@ -68,14 +68,18 @@ if [[ $PYTHON_VERSION == "2.7"* ]]; then
elif [[ $PYTHON_VERSION == "3."[45]* ]]; then
python -m pip install wheel || exit 1
python -m pip install -r test-requirements-34.txt || exit 1
+elif [[ $PYTHON_VERSION == "pypy-2.7" ]]; then
+ pip install wheel || exit 1
+ pip install -r test-requirements-pypy27.txt || exit 1
else
python -m pip install -U pip "setuptools<60" wheel || exit 1
if [[ $PYTHON_VERSION != *"-dev" || $COVERAGE == "1" ]]; then
python -m pip install -r test-requirements.txt || exit 1
-
if [[ $PYTHON_VERSION != "pypy"* && $PYTHON_VERSION != "3."[1]* ]]; then
python -m pip install -r test-requirements-cpython.txt || exit 1
+ elif [[ $PYTHON_VERSION == "pypy-2.7" ]]; then
+ python -m pip install -r test-requirements-pypy27.txt || exit 1
fi
fi
fi
@@ -108,11 +112,23 @@ export PATH="/usr/lib/ccache:$PATH"
# Most modern compilers allow the last conflicting option
# to override the previous ones, so '-O0 -O3' == '-O3'
# This is true for the latest msvc, gcc and clang
-CFLAGS="-O0 -ggdb -Wall -Wextra"
+if [[ $OSTYPE == "msys" ]]; then # for MSVC cl
+ # /wd disables warnings
+ # 4711 warns that function `x` was selected for automatic inline expansion
+ # 4127 warns that a conditional expression is constant, should be fixed here https://github.com/cython/cython/pull/4317
+ # (off by default) 5045 warns that the compiler will insert Spectre mitigations for memory load if the /Qspectre switch is specified
+ # (off by default) 4820 warns about the code in Python\3.9.6\x64\include ...
+ CFLAGS="-Od /Z7 /MP /W4 /wd4711 /wd4127 /wd5045 /wd4820"
+else
+ CFLAGS="-O0 -ggdb -Wall -Wextra"
+fi
if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then
BUILD_CFLAGS="$CFLAGS -O2"
+ if [[ $CYTHON_COMPILE_ALL == "1" && $OSTYPE != "msys" ]]; then
+ BUILD_CFLAGS="$CFLAGS -O3 -g0 -mtune=generic" # make wheel sizes comparable to standard wheel build
+ fi
if [[ $PYTHON_SYS_VERSION == "2"* ]]; then
BUILD_CFLAGS="$BUILD_CFLAGS -fno-strict-aliasing"
fi
@@ -132,16 +148,19 @@ if [[ $NO_CYTHON_COMPILE != "1" && $PYTHON_VERSION != "pypy"* ]]; then
# COVERAGE can be either "" (empty or not set) or "1" (when we set it)
# STACKLESS can be either "" (empty or not set) or "true" (when we set it)
- # CYTHON_COMPILE_ALL can be either "" (empty or not set) or "1" (when we set it)
if [[ $COVERAGE != "1" && $STACKLESS != "true" && $BACKEND != *"cpp"* &&
- $CYTHON_COMPILE_ALL != "1" && $LIMITED_API == "" && $EXTRA_CFLAGS == "" ]]; then
+ $LIMITED_API == "" && $EXTRA_CFLAGS == "" ]]; then
python setup.py bdist_wheel || exit 1
+ ls -l dist/ || true
fi
+
+ echo "Extension modules created during the build:"
+ find Cython -name "*.so" -ls | sort -k11
fi
if [[ $TEST_CODE_STYLE == "1" ]]; then
make -C docs html || exit 1
-elif [[ $PYTHON_VERSION != "pypy"* ]]; then
+elif [[ $PYTHON_VERSION != "pypy"* && $OSTYPE != "msys" ]]; then
# Run the debugger tests in python-dbg if available
# (but don't fail, because they currently do fail)
PYTHON_DBG=$(python -c 'import sys; print("%d.%d" % sys.version_info[:2])')
diff --git a/Tools/dataclass_test_data/test_dataclasses.py b/Tools/dataclass_test_data/test_dataclasses.py
new file mode 100644
index 000000000..e2eab6957
--- /dev/null
+++ b/Tools/dataclass_test_data/test_dataclasses.py
@@ -0,0 +1,4266 @@
+# Deliberately use "from dataclasses import *". Every name in __all__
+# is tested, so they all must be present. This is a way to catch
+# missing ones.
+
+from dataclasses import *
+
+import abc
+import pickle
+import inspect
+import builtins
+import types
+import weakref
+import unittest
+from unittest.mock import Mock
+from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional, Protocol
+from typing import get_type_hints
+from collections import deque, OrderedDict, namedtuple
+from functools import total_ordering
+
+import typing # Needed for the string "typing.ClassVar[int]" to work as an annotation.
+import dataclasses # Needed for the string "dataclasses.InitVar[int]" to work as an annotation.
+
+# Just any custom exception we can catch.
+class CustomError(Exception): pass
+
+class TestCase(unittest.TestCase):
+ def test_no_fields(self):
+ @dataclass
+ class C:
+ pass
+
+ o = C()
+ self.assertEqual(len(fields(C)), 0)
+
+ def test_no_fields_but_member_variable(self):
+ @dataclass
+ class C:
+ i = 0
+
+ o = C()
+ self.assertEqual(len(fields(C)), 0)
+
+ def test_one_field_no_default(self):
+ @dataclass
+ class C:
+ x: int
+
+ o = C(42)
+ self.assertEqual(o.x, 42)
+
+ def test_field_default_default_factory_error(self):
+ msg = "cannot specify both default and default_factory"
+ with self.assertRaisesRegex(ValueError, msg):
+ @dataclass
+ class C:
+ x: int = field(default=1, default_factory=int)
+
+ def test_field_repr(self):
+ int_field = field(default=1, init=True, repr=False)
+ int_field.name = "id"
+ repr_output = repr(int_field)
+ expected_output = "Field(name='id',type=None," \
+ f"default=1,default_factory={MISSING!r}," \
+ "init=True,repr=False,hash=None," \
+ "compare=True,metadata=mappingproxy({})," \
+ f"kw_only={MISSING!r}," \
+ "_field_type=None)"
+
+ self.assertEqual(repr_output, expected_output)
+
+ def test_named_init_params(self):
+ @dataclass
+ class C:
+ x: int
+
+ o = C(x=32)
+ self.assertEqual(o.x, 32)
+
+ def test_two_fields_one_default(self):
+ @dataclass
+ class C:
+ x: int
+ y: int = 0
+
+ o = C(3)
+ self.assertEqual((o.x, o.y), (3, 0))
+
+ # Non-defaults following defaults.
+ with self.assertRaisesRegex(TypeError,
+ "non-default argument 'y' follows "
+ "default argument"):
+ @dataclass
+ class C:
+ x: int = 0
+ y: int
+
+ # A derived class adds a non-default field after a default one.
+ with self.assertRaisesRegex(TypeError,
+ "non-default argument 'y' follows "
+ "default argument"):
+ @dataclass
+ class B:
+ x: int = 0
+
+ @dataclass
+ class C(B):
+ y: int
+
+ # Override a base class field and add a default to
+ # a field which didn't use to have a default.
+ with self.assertRaisesRegex(TypeError,
+ "non-default argument 'y' follows "
+ "default argument"):
+ @dataclass
+ class B:
+ x: int
+ y: int
+
+ @dataclass
+ class C(B):
+ x: int = 0
+
+ def test_overwrite_hash(self):
+ # Test that declaring this class isn't an error. It should
+ # use the user-provided __hash__.
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ def __hash__(self):
+ return 301
+ self.assertEqual(hash(C(100)), 301)
+
+ # Test that declaring this class isn't an error. It should
+ # use the generated __hash__.
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ def __eq__(self, other):
+ return False
+ self.assertEqual(hash(C(100)), hash((100,)))
+
+ # But this one should generate an exception, because with
+ # unsafe_hash=True, it's an error to have a __hash__ defined.
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __hash__'):
+ @dataclass(unsafe_hash=True)
+ class C:
+ def __hash__(self):
+ pass
+
+ # Creating this class should not generate an exception,
+ # because even though __hash__ exists before @dataclass is
+ # called, (due to __eq__ being defined), since it's None
+ # that's okay.
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int
+ def __eq__(self):
+ pass
+ # The generated hash function works as we'd expect.
+ self.assertEqual(hash(C(10)), hash((10,)))
+
+ # Creating this class should generate an exception, because
+ # __hash__ exists and is not None, which it would be if it
+ # had been auto-generated due to __eq__ being defined.
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __hash__'):
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int
+ def __eq__(self):
+ pass
+ def __hash__(self):
+ pass
+
+ def test_overwrite_fields_in_derived_class(self):
+ # Note that x from C1 replaces x in Base, but the order remains
+ # the same as defined in Base.
+ @dataclass
+ class Base:
+ x: Any = 15.0
+ y: int = 0
+
+ @dataclass
+ class C1(Base):
+ z: int = 10
+ x: int = 15
+
+ o = Base()
+ self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class.<locals>.Base(x=15.0, y=0)')
+
+ o = C1()
+ self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class.<locals>.C1(x=15, y=0, z=10)')
+
+ o = C1(x=5)
+ self.assertEqual(repr(o), 'TestCase.test_overwrite_fields_in_derived_class.<locals>.C1(x=5, y=0, z=10)')
+
+ def test_field_named_self(self):
+ @dataclass
+ class C:
+ self: str
+ c=C('foo')
+ self.assertEqual(c.self, 'foo')
+
+ # Make sure the first parameter is not named 'self'.
+ sig = inspect.signature(C.__init__)
+ first = next(iter(sig.parameters))
+ self.assertNotEqual('self', first)
+
+ # But we do use 'self' if no field named self.
+ @dataclass
+ class C:
+ selfx: str
+
+ # Make sure the first parameter is named 'self'.
+ sig = inspect.signature(C.__init__)
+ first = next(iter(sig.parameters))
+ self.assertEqual('self', first)
+
+ def test_field_named_object(self):
+ @dataclass
+ class C:
+ object: str
+ c = C('foo')
+ self.assertEqual(c.object, 'foo')
+
+ def test_field_named_object_frozen(self):
+ @dataclass(frozen=True)
+ class C:
+ object: str
+ c = C('foo')
+ self.assertEqual(c.object, 'foo')
+
+ def test_field_named_like_builtin(self):
+ # Attribute names can shadow built-in names
+ # since code generation is used.
+ # Ensure that this is not happening.
+ exclusions = {'None', 'True', 'False'}
+ builtins_names = sorted(
+ b for b in builtins.__dict__.keys()
+ if not b.startswith('__') and b not in exclusions
+ )
+ attributes = [(name, str) for name in builtins_names]
+ C = make_dataclass('C', attributes)
+
+ c = C(*[name for name in builtins_names])
+
+ for name in builtins_names:
+ self.assertEqual(getattr(c, name), name)
+
+ def test_field_named_like_builtin_frozen(self):
+ # Attribute names can shadow built-in names
+ # since code generation is used.
+ # Ensure that this is not happening
+ # for frozen data classes.
+ exclusions = {'None', 'True', 'False'}
+ builtins_names = sorted(
+ b for b in builtins.__dict__.keys()
+ if not b.startswith('__') and b not in exclusions
+ )
+ attributes = [(name, str) for name in builtins_names]
+ C = make_dataclass('C', attributes, frozen=True)
+
+ c = C(*[name for name in builtins_names])
+
+ for name in builtins_names:
+ self.assertEqual(getattr(c, name), name)
+
+ def test_0_field_compare(self):
+ # Ensure that order=False is the default.
+ @dataclass
+ class C0:
+ pass
+
+ @dataclass(order=False)
+ class C1:
+ pass
+
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(), cls())
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a > b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaisesRegex(TypeError,
+ f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"):
+ fn(cls(), cls())
+
+ @dataclass(order=True)
+ class C:
+ pass
+ self.assertLessEqual(C(), C())
+ self.assertGreaterEqual(C(), C())
+
+ def test_1_field_compare(self):
+ # Ensure that order=False is the default.
+ @dataclass
+ class C0:
+ x: int
+
+ @dataclass(order=False)
+ class C1:
+ x: int
+
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(1), cls(1))
+ self.assertNotEqual(cls(0), cls(1))
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a > b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaisesRegex(TypeError,
+ f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"):
+ fn(cls(0), cls(0))
+
+ @dataclass(order=True)
+ class C:
+ x: int
+ self.assertLess(C(0), C(1))
+ self.assertLessEqual(C(0), C(1))
+ self.assertLessEqual(C(1), C(1))
+ self.assertGreater(C(1), C(0))
+ self.assertGreaterEqual(C(1), C(0))
+ self.assertGreaterEqual(C(1), C(1))
+
+ def test_simple_compare(self):
+ # Ensure that order=False is the default.
+ @dataclass
+ class C0:
+ x: int
+ y: int
+
+ @dataclass(order=False)
+ class C1:
+ x: int
+ y: int
+
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(0, 0), cls(0, 0))
+ self.assertEqual(cls(1, 2), cls(1, 2))
+ self.assertNotEqual(cls(1, 0), cls(0, 0))
+ self.assertNotEqual(cls(1, 0), cls(1, 1))
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a > b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaisesRegex(TypeError,
+ f"not supported between instances of '{cls.__name__}' and '{cls.__name__}'"):
+ fn(cls(0, 0), cls(0, 0))
+
+ @dataclass(order=True)
+ class C:
+ x: int
+ y: int
+
+ for idx, fn in enumerate([lambda a, b: a == b,
+ lambda a, b: a <= b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ self.assertTrue(fn(C(0, 0), C(0, 0)))
+
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a != b]):
+ with self.subTest(idx=idx):
+ self.assertTrue(fn(C(0, 0), C(0, 1)))
+ self.assertTrue(fn(C(0, 1), C(1, 0)))
+ self.assertTrue(fn(C(1, 0), C(1, 1)))
+
+ for idx, fn in enumerate([lambda a, b: a > b,
+ lambda a, b: a >= b,
+ lambda a, b: a != b]):
+ with self.subTest(idx=idx):
+ self.assertTrue(fn(C(0, 1), C(0, 0)))
+ self.assertTrue(fn(C(1, 0), C(0, 1)))
+ self.assertTrue(fn(C(1, 1), C(1, 0)))
+
+ def test_compare_subclasses(self):
+ # Comparisons fail for subclasses, even if no fields
+ # are added.
+ @dataclass
+ class B:
+ i: int
+
+ @dataclass
+ class C(B):
+ pass
+
+ for idx, (fn, expected) in enumerate([(lambda a, b: a == b, False),
+ (lambda a, b: a != b, True)]):
+ with self.subTest(idx=idx):
+ self.assertEqual(fn(B(0), C(0)), expected)
+
+ for idx, fn in enumerate([lambda a, b: a < b,
+ lambda a, b: a <= b,
+ lambda a, b: a > b,
+ lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaisesRegex(TypeError,
+ "not supported between instances of 'B' and 'C'"):
+ fn(B(0), C(0))
+
+ def test_eq_order(self):
+ # Test combining eq and order.
+ for (eq, order, result ) in [
+ (False, False, 'neither'),
+ (False, True, 'exception'),
+ (True, False, 'eq_only'),
+ (True, True, 'both'),
+ ]:
+ with self.subTest(eq=eq, order=order):
+ if result == 'exception':
+ with self.assertRaisesRegex(ValueError, 'eq must be true if order is true'):
+ @dataclass(eq=eq, order=order)
+ class C:
+ pass
+ else:
+ @dataclass(eq=eq, order=order)
+ class C:
+ pass
+
+ if result == 'neither':
+ self.assertNotIn('__eq__', C.__dict__)
+ self.assertNotIn('__lt__', C.__dict__)
+ self.assertNotIn('__le__', C.__dict__)
+ self.assertNotIn('__gt__', C.__dict__)
+ self.assertNotIn('__ge__', C.__dict__)
+ elif result == 'both':
+ self.assertIn('__eq__', C.__dict__)
+ self.assertIn('__lt__', C.__dict__)
+ self.assertIn('__le__', C.__dict__)
+ self.assertIn('__gt__', C.__dict__)
+ self.assertIn('__ge__', C.__dict__)
+ elif result == 'eq_only':
+ self.assertIn('__eq__', C.__dict__)
+ self.assertNotIn('__lt__', C.__dict__)
+ self.assertNotIn('__le__', C.__dict__)
+ self.assertNotIn('__gt__', C.__dict__)
+ self.assertNotIn('__ge__', C.__dict__)
+ else:
+ assert False, f'unknown result {result!r}'
+
+ def test_field_no_default(self):
+ @dataclass
+ class C:
+ x: int = field()
+
+ self.assertEqual(C(5).x, 5)
+
+ with self.assertRaisesRegex(TypeError,
+ r"__init__\(\) missing 1 required "
+ "positional argument: 'x'"):
+ C()
+
+ def test_field_default(self):
+ default = object()
+ @dataclass
+ class C:
+ x: object = field(default=default)
+
+ self.assertIs(C.x, default)
+ c = C(10)
+ self.assertEqual(c.x, 10)
+
+ # If we delete the instance attribute, we should then see the
+ # class attribute.
+ del c.x
+ self.assertIs(c.x, default)
+
+ self.assertIs(C().x, default)
+
+ def test_not_in_repr(self):
+ @dataclass
+ class C:
+ x: int = field(repr=False)
+ with self.assertRaises(TypeError):
+ C()
+ c = C(10)
+ self.assertEqual(repr(c), 'TestCase.test_not_in_repr.<locals>.C()')
+
+ @dataclass
+ class C:
+ x: int = field(repr=False)
+ y: int
+ c = C(10, 20)
+ self.assertEqual(repr(c), 'TestCase.test_not_in_repr.<locals>.C(y=20)')
+
+ def test_not_in_compare(self):
+ @dataclass
+ class C:
+ x: int = 0
+ y: int = field(compare=False, default=4)
+
+ self.assertEqual(C(), C(0, 20))
+ self.assertEqual(C(1, 10), C(1, 20))
+ self.assertNotEqual(C(3), C(4, 10))
+ self.assertNotEqual(C(3, 10), C(4, 10))
+
+ def test_no_unhashable_default(self):
+ # See bpo-44674.
+ class Unhashable:
+ __hash__ = None
+
+ unhashable_re = 'mutable default .* for field a is not allowed'
+ with self.assertRaisesRegex(ValueError, unhashable_re):
+ @dataclass
+ class A:
+ a: dict = {}
+
+ with self.assertRaisesRegex(ValueError, unhashable_re):
+ @dataclass
+ class A:
+ a: Any = Unhashable()
+
+ # Make sure that the machinery looking for hashability is using the
+ # class's __hash__, not the instance's __hash__.
+ with self.assertRaisesRegex(ValueError, unhashable_re):
+ unhashable = Unhashable()
+ # This shouldn't make the variable hashable.
+ unhashable.__hash__ = lambda: 0
+ @dataclass
+ class A:
+ a: Any = unhashable
+
+ def test_hash_field_rules(self):
+ # Test all 6 cases of:
+ # hash=True/False/None
+ # compare=True/False
+ for (hash_, compare, result ) in [
+ (True, False, 'field' ),
+ (True, True, 'field' ),
+ (False, False, 'absent'),
+ (False, True, 'absent'),
+ (None, False, 'absent'),
+ (None, True, 'field' ),
+ ]:
+ with self.subTest(hash=hash_, compare=compare):
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int = field(compare=compare, hash=hash_, default=5)
+
+ if result == 'field':
+ # __hash__ contains the field.
+ self.assertEqual(hash(C(5)), hash((5,)))
+ elif result == 'absent':
+ # The field is not present in the hash.
+ self.assertEqual(hash(C(5)), hash(()))
+ else:
+ assert False, f'unknown result {result!r}'
+
+ def test_init_false_no_default(self):
+ # If init=False and no default value, then the field won't be
+ # present in the instance.
+ @dataclass
+ class C:
+ x: int = field(init=False)
+
+ self.assertNotIn('x', C().__dict__)
+
+ @dataclass
+ class C:
+ x: int
+ y: int = 0
+ z: int = field(init=False)
+ t: int = 10
+
+ self.assertNotIn('z', C(0).__dict__)
+ self.assertEqual(vars(C(5)), {'t': 10, 'x': 5, 'y': 0})
+
+ def test_class_marker(self):
+ @dataclass
+ class C:
+ x: int
+ y: str = field(init=False, default=None)
+ z: str = field(repr=False)
+
+ the_fields = fields(C)
+ # the_fields is a tuple of 3 items, each value
+ # is in __annotations__.
+ self.assertIsInstance(the_fields, tuple)
+ for f in the_fields:
+ self.assertIs(type(f), Field)
+ self.assertIn(f.name, C.__annotations__)
+
+ self.assertEqual(len(the_fields), 3)
+
+ self.assertEqual(the_fields[0].name, 'x')
+ self.assertEqual(the_fields[0].type, int)
+ self.assertFalse(hasattr(C, 'x'))
+ self.assertTrue (the_fields[0].init)
+ self.assertTrue (the_fields[0].repr)
+ self.assertEqual(the_fields[1].name, 'y')
+ self.assertEqual(the_fields[1].type, str)
+ self.assertIsNone(getattr(C, 'y'))
+ self.assertFalse(the_fields[1].init)
+ self.assertTrue (the_fields[1].repr)
+ self.assertEqual(the_fields[2].name, 'z')
+ self.assertEqual(the_fields[2].type, str)
+ self.assertFalse(hasattr(C, 'z'))
+ self.assertTrue (the_fields[2].init)
+ self.assertFalse(the_fields[2].repr)
+
+ def test_field_order(self):
+ @dataclass
+ class B:
+ a: str = 'B:a'
+ b: str = 'B:b'
+ c: str = 'B:c'
+
+ @dataclass
+ class C(B):
+ b: str = 'C:b'
+
+ self.assertEqual([(f.name, f.default) for f in fields(C)],
+ [('a', 'B:a'),
+ ('b', 'C:b'),
+ ('c', 'B:c')])
+
+ @dataclass
+ class D(B):
+ c: str = 'D:c'
+
+ self.assertEqual([(f.name, f.default) for f in fields(D)],
+ [('a', 'B:a'),
+ ('b', 'B:b'),
+ ('c', 'D:c')])
+
+ @dataclass
+ class E(D):
+ a: str = 'E:a'
+ d: str = 'E:d'
+
+ self.assertEqual([(f.name, f.default) for f in fields(E)],
+ [('a', 'E:a'),
+ ('b', 'B:b'),
+ ('c', 'D:c'),
+ ('d', 'E:d')])
+
+ def test_class_attrs(self):
+ # We only have a class attribute if a default value is
+ # specified, either directly or via a field with a default.
+ default = object()
+ @dataclass
+ class C:
+ x: int
+ y: int = field(repr=False)
+ z: object = default
+ t: int = field(default=100)
+
+ self.assertFalse(hasattr(C, 'x'))
+ self.assertFalse(hasattr(C, 'y'))
+ self.assertIs (C.z, default)
+ self.assertEqual(C.t, 100)
+
+ def test_disallowed_mutable_defaults(self):
+ # For the known types, don't allow mutable default values.
+ for typ, empty, non_empty in [(list, [], [1]),
+ (dict, {}, {0:1}),
+ (set, set(), set([1])),
+ ]:
+ with self.subTest(typ=typ):
+ # Can't use a zero-length value.
+ with self.assertRaisesRegex(ValueError,
+ f'mutable default {typ} for field '
+ 'x is not allowed'):
+ @dataclass
+ class Point:
+ x: typ = empty
+
+
+ # Nor a non-zero-length value
+ with self.assertRaisesRegex(ValueError,
+ f'mutable default {typ} for field '
+ 'y is not allowed'):
+ @dataclass
+ class Point:
+ y: typ = non_empty
+
+ # Check subtypes also fail.
+ class Subclass(typ): pass
+
+ with self.assertRaisesRegex(ValueError,
+ f"mutable default .*Subclass'>"
+ ' for field z is not allowed'
+ ):
+ @dataclass
+ class Point:
+ z: typ = Subclass()
+
+ # Because this is a ClassVar, it can be mutable.
+ @dataclass
+ class C:
+ z: ClassVar[typ] = typ()
+
+ # Because this is a ClassVar, it can be mutable.
+ @dataclass
+ class C:
+ x: ClassVar[typ] = Subclass()
+
+ def test_deliberately_mutable_defaults(self):
+ # If a mutable default isn't in the known list of
+ # (list, dict, set), then it's okay.
+ class Mutable:
+ def __init__(self):
+ self.l = []
+
+ @dataclass
+ class C:
+ x: Mutable
+
+ # These 2 instances will share this value of x.
+ lst = Mutable()
+ o1 = C(lst)
+ o2 = C(lst)
+ self.assertEqual(o1, o2)
+ o1.x.l.extend([1, 2])
+ self.assertEqual(o1, o2)
+ self.assertEqual(o1.x.l, [1, 2])
+ self.assertIs(o1.x, o2.x)
+
+ def test_no_options(self):
+ # Call with dataclass().
+ @dataclass()
+ class C:
+ x: int
+
+ self.assertEqual(C(42).x, 42)
+
+ def test_not_tuple(self):
+ # Make sure we can't be compared to a tuple.
+ @dataclass
+ class Point:
+ x: int
+ y: int
+ self.assertNotEqual(Point(1, 2), (1, 2))
+
+ # And that we can't compare to another unrelated dataclass.
+ @dataclass
+ class C:
+ x: int
+ y: int
+ self.assertNotEqual(Point(1, 3), C(1, 3))
+
+ def test_not_other_dataclass(self):
+ # Test that some of the problems with namedtuple don't happen
+ # here.
+ @dataclass
+ class Point3D:
+ x: int
+ y: int
+ z: int
+
+ @dataclass
+ class Date:
+ year: int
+ month: int
+ day: int
+
+ self.assertNotEqual(Point3D(2017, 6, 3), Date(2017, 6, 3))
+ self.assertNotEqual(Point3D(1, 2, 3), (1, 2, 3))
+
+ # Make sure we can't unpack.
+ with self.assertRaisesRegex(TypeError, 'unpack'):
+ x, y, z = Point3D(4, 5, 6)
+
+ # Make sure another class with the same field names isn't
+ # equal.
+ @dataclass
+ class Point3Dv1:
+ x: int = 0
+ y: int = 0
+ z: int = 0
+ self.assertNotEqual(Point3D(0, 0, 0), Point3Dv1())
+
+ def test_function_annotations(self):
+ # Some dummy class and instance to use as a default.
+ class F:
+ pass
+ f = F()
+
+ def validate_class(cls):
+ # First, check __annotations__, even though they're not
+ # function annotations.
+ self.assertEqual(cls.__annotations__['i'], int)
+ self.assertEqual(cls.__annotations__['j'], str)
+ self.assertEqual(cls.__annotations__['k'], F)
+ self.assertEqual(cls.__annotations__['l'], float)
+ self.assertEqual(cls.__annotations__['z'], complex)
+
+ # Verify __init__.
+
+ signature = inspect.signature(cls.__init__)
+ # Check the return type, should be None.
+ self.assertIs(signature.return_annotation, None)
+
+ # Check each parameter.
+ params = iter(signature.parameters.values())
+ param = next(params)
+ # This is testing an internal name, and probably shouldn't be tested.
+ self.assertEqual(param.name, 'self')
+ param = next(params)
+ self.assertEqual(param.name, 'i')
+ self.assertIs (param.annotation, int)
+ self.assertEqual(param.default, inspect.Parameter.empty)
+ self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ param = next(params)
+ self.assertEqual(param.name, 'j')
+ self.assertIs (param.annotation, str)
+ self.assertEqual(param.default, inspect.Parameter.empty)
+ self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ param = next(params)
+ self.assertEqual(param.name, 'k')
+ self.assertIs (param.annotation, F)
+ # Don't test for the default, since it's set to MISSING.
+ self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ param = next(params)
+ self.assertEqual(param.name, 'l')
+ self.assertIs (param.annotation, float)
+ # Don't test for the default, since it's set to MISSING.
+ self.assertEqual(param.kind, inspect.Parameter.POSITIONAL_OR_KEYWORD)
+ self.assertRaises(StopIteration, next, params)
+
+
+ @dataclass
+ class C:
+ i: int
+ j: str
+ k: F = f
+ l: float=field(default=None)
+ z: complex=field(default=3+4j, init=False)
+
+ validate_class(C)
+
+ # Now repeat with __hash__.
+ @dataclass(frozen=True, unsafe_hash=True)
+ class C:
+ i: int
+ j: str
+ k: F = f
+ l: float=field(default=None)
+ z: complex=field(default=3+4j, init=False)
+
+ validate_class(C)
+
+ def test_missing_default(self):
+ # Test that MISSING works the same as a default not being
+ # specified.
+ @dataclass
+ class C:
+ x: int=field(default=MISSING)
+ with self.assertRaisesRegex(TypeError,
+ r'__init__\(\) missing 1 required '
+ 'positional argument'):
+ C()
+ self.assertNotIn('x', C.__dict__)
+
+ @dataclass
+ class D:
+ x: int
+ with self.assertRaisesRegex(TypeError,
+ r'__init__\(\) missing 1 required '
+ 'positional argument'):
+ D()
+ self.assertNotIn('x', D.__dict__)
+
+ def test_missing_default_factory(self):
+ # Test that MISSING works the same as a default factory not
+ # being specified (which is really the same as a default not
+ # being specified, too).
+ @dataclass
+ class C:
+ x: int=field(default_factory=MISSING)
+ with self.assertRaisesRegex(TypeError,
+ r'__init__\(\) missing 1 required '
+ 'positional argument'):
+ C()
+ self.assertNotIn('x', C.__dict__)
+
+ @dataclass
+ class D:
+ x: int=field(default=MISSING, default_factory=MISSING)
+ with self.assertRaisesRegex(TypeError,
+ r'__init__\(\) missing 1 required '
+ 'positional argument'):
+ D()
+ self.assertNotIn('x', D.__dict__)
+
+ def test_missing_repr(self):
+ self.assertIn('MISSING_TYPE object', repr(MISSING))
+
+ def test_dont_include_other_annotations(self):
+ @dataclass
+ class C:
+ i: int
+ def foo(self) -> int:
+ return 4
+ @property
+ def bar(self) -> int:
+ return 5
+ self.assertEqual(list(C.__annotations__), ['i'])
+ self.assertEqual(C(10).foo(), 4)
+ self.assertEqual(C(10).bar, 5)
+ self.assertEqual(C(10).i, 10)
+
+ def test_post_init(self):
+ # Just make sure it gets called
+ @dataclass
+ class C:
+ def __post_init__(self):
+ raise CustomError()
+ with self.assertRaises(CustomError):
+ C()
+
+ @dataclass
+ class C:
+ i: int = 10
+ def __post_init__(self):
+ if self.i == 10:
+ raise CustomError()
+ with self.assertRaises(CustomError):
+ C()
+ # post-init gets called, but doesn't raise. This is just
+ # checking that self is used correctly.
+ C(5)
+
+ # If there's not an __init__, then post-init won't get called.
+ @dataclass(init=False)
+ class C:
+ def __post_init__(self):
+ raise CustomError()
+ # Creating the class won't raise
+ C()
+
+ @dataclass
+ class C:
+ x: int = 0
+ def __post_init__(self):
+ self.x *= 2
+ self.assertEqual(C().x, 0)
+ self.assertEqual(C(2).x, 4)
+
+ # Make sure that if we're frozen, post-init can't set
+ # attributes.
+ @dataclass(frozen=True)
+ class C:
+ x: int = 0
+ def __post_init__(self):
+ self.x *= 2
+ with self.assertRaises(FrozenInstanceError):
+ C()
+
+ def test_post_init_super(self):
+ # Make sure super() post-init isn't called by default.
+ class B:
+ def __post_init__(self):
+ raise CustomError()
+
+ @dataclass
+ class C(B):
+ def __post_init__(self):
+ self.x = 5
+
+ self.assertEqual(C().x, 5)
+
+ # Now call super(), and it will raise.
+ @dataclass
+ class C(B):
+ def __post_init__(self):
+ super().__post_init__()
+
+ with self.assertRaises(CustomError):
+ C()
+
+ # Make sure post-init is called, even if not defined in our
+ # class.
+ @dataclass
+ class C(B):
+ pass
+
+ with self.assertRaises(CustomError):
+ C()
+
+ def test_post_init_staticmethod(self):
+ flag = False
+ @dataclass
+ class C:
+ x: int
+ y: int
+ @staticmethod
+ def __post_init__():
+ nonlocal flag
+ flag = True
+
+ self.assertFalse(flag)
+ c = C(3, 4)
+ self.assertEqual((c.x, c.y), (3, 4))
+ self.assertTrue(flag)
+
+ def test_post_init_classmethod(self):
+ @dataclass
+ class C:
+ flag = False
+ x: int
+ y: int
+ @classmethod
+ def __post_init__(cls):
+ cls.flag = True
+
+ self.assertFalse(C.flag)
+ c = C(3, 4)
+ self.assertEqual((c.x, c.y), (3, 4))
+ self.assertTrue(C.flag)
+
+ def test_post_init_not_auto_added(self):
+ # See bpo-46757, which had proposed always adding __post_init__. As
+ # Raymond Hettinger pointed out, that would be a breaking change. So,
+ # add a test to make sure that the current behavior doesn't change.
+
+ @dataclass
+ class A0:
+ pass
+
+ @dataclass
+ class B0:
+ b_called: bool = False
+ def __post_init__(self):
+ self.b_called = True
+
+ @dataclass
+ class C0(A0, B0):
+ c_called: bool = False
+ def __post_init__(self):
+ super().__post_init__()
+ self.c_called = True
+
+ # Since A0 has no __post_init__, and one wasn't automatically added
+ # (because that's the rule: it's never added by @dataclass, it's only
+ # the class author that can add it), then B0.__post_init__ is called.
+ # Verify that.
+ c = C0()
+ self.assertTrue(c.b_called)
+ self.assertTrue(c.c_called)
+
+ ######################################
+ # Now, the same thing, except A1 defines __post_init__.
+ @dataclass
+ class A1:
+ def __post_init__(self):
+ pass
+
+ @dataclass
+ class B1:
+ b_called: bool = False
+ def __post_init__(self):
+ self.b_called = True
+
+ @dataclass
+ class C1(A1, B1):
+ c_called: bool = False
+ def __post_init__(self):
+ super().__post_init__()
+ self.c_called = True
+
+ # This time, B1.__post_init__ isn't being called. This mimics what
+ # would happen if A1.__post_init__ had been automatically added,
+ # instead of manually added as we see here. This test isn't really
+ # needed, but I'm including it just to demonstrate the changed
+ # behavior when A1 does define __post_init__.
+ c = C1()
+ self.assertFalse(c.b_called)
+ self.assertTrue(c.c_called)
+
+ def test_class_var(self):
+ # Make sure ClassVars are ignored in __init__, __repr__, etc.
+ @dataclass
+ class C:
+ x: int
+ y: int = 10
+ z: ClassVar[int] = 1000
+ w: ClassVar[int] = 2000
+ t: ClassVar[int] = 3000
+ s: ClassVar = 4000
+
+ c = C(5)
+ self.assertEqual(repr(c), 'TestCase.test_class_var.<locals>.C(x=5, y=10)')
+ self.assertEqual(len(fields(C)), 2) # We have 2 fields.
+ self.assertEqual(len(C.__annotations__), 6) # And 4 ClassVars.
+ self.assertEqual(c.z, 1000)
+ self.assertEqual(c.w, 2000)
+ self.assertEqual(c.t, 3000)
+ self.assertEqual(c.s, 4000)
+ C.z += 1
+ self.assertEqual(c.z, 1001)
+ c = C(20)
+ self.assertEqual((c.x, c.y), (20, 10))
+ self.assertEqual(c.z, 1001)
+ self.assertEqual(c.w, 2000)
+ self.assertEqual(c.t, 3000)
+ self.assertEqual(c.s, 4000)
+
+ def test_class_var_no_default(self):
+ # If a ClassVar has no default value, it should not be set on the class.
+ @dataclass
+ class C:
+ x: ClassVar[int]
+
+ self.assertNotIn('x', C.__dict__)
+
+ def test_class_var_default_factory(self):
+ # It makes no sense for a ClassVar to have a default factory. When
+ # would it be called? Call it yourself, since it's class-wide.
+ with self.assertRaisesRegex(TypeError,
+ 'cannot have a default factory'):
+ @dataclass
+ class C:
+ x: ClassVar[int] = field(default_factory=int)
+
+ self.assertNotIn('x', C.__dict__)
+
+ def test_class_var_with_default(self):
+ # If a ClassVar has a default value, it should be set on the class.
+ @dataclass
+ class C:
+ x: ClassVar[int] = 10
+ self.assertEqual(C.x, 10)
+
+ @dataclass
+ class C:
+ x: ClassVar[int] = field(default=10)
+ self.assertEqual(C.x, 10)
+
+ def test_class_var_frozen(self):
+ # Make sure ClassVars work even if we're frozen.
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int = 10
+ z: ClassVar[int] = 1000
+ w: ClassVar[int] = 2000
+ t: ClassVar[int] = 3000
+
+ c = C(5)
+ self.assertEqual(repr(C(5)), 'TestCase.test_class_var_frozen.<locals>.C(x=5, y=10)')
+ self.assertEqual(len(fields(C)), 2) # We have 2 fields
+ self.assertEqual(len(C.__annotations__), 5) # And 3 ClassVars
+ self.assertEqual(c.z, 1000)
+ self.assertEqual(c.w, 2000)
+ self.assertEqual(c.t, 3000)
+ # We can still modify the ClassVar, it's only instances that are
+ # frozen.
+ C.z += 1
+ self.assertEqual(c.z, 1001)
+ c = C(20)
+ self.assertEqual((c.x, c.y), (20, 10))
+ self.assertEqual(c.z, 1001)
+ self.assertEqual(c.w, 2000)
+ self.assertEqual(c.t, 3000)
+
+ def test_init_var_no_default(self):
+ # If an InitVar has no default value, it should not be set on the class.
+ @dataclass
+ class C:
+ x: InitVar[int]
+
+ self.assertNotIn('x', C.__dict__)
+
+ def test_init_var_default_factory(self):
+ # It makes no sense for an InitVar to have a default factory. When
+ # would it be called? Call it yourself, since it's class-wide.
+ with self.assertRaisesRegex(TypeError,
+ 'cannot have a default factory'):
+ @dataclass
+ class C:
+ x: InitVar[int] = field(default_factory=int)
+
+ self.assertNotIn('x', C.__dict__)
+
+ def test_init_var_with_default(self):
+ # If an InitVar has a default value, it should be set on the class.
+ @dataclass
+ class C:
+ x: InitVar[int] = 10
+ self.assertEqual(C.x, 10)
+
+ @dataclass
+ class C:
+ x: InitVar[int] = field(default=10)
+ self.assertEqual(C.x, 10)
+
+ def test_init_var(self):
+ @dataclass
+ class C:
+ x: int = None
+ init_param: InitVar[int] = None
+
+ def __post_init__(self, init_param):
+ if self.x is None:
+ self.x = init_param*2
+
+ c = C(init_param=10)
+ self.assertEqual(c.x, 20)
+
+ def test_init_var_preserve_type(self):
+ self.assertEqual(InitVar[int].type, int)
+
+ # Make sure the repr is correct.
+ self.assertEqual(repr(InitVar[int]), 'dataclasses.InitVar[int]')
+ self.assertEqual(repr(InitVar[List[int]]),
+ 'dataclasses.InitVar[typing.List[int]]')
+ self.assertEqual(repr(InitVar[list[int]]),
+ 'dataclasses.InitVar[list[int]]')
+ self.assertEqual(repr(InitVar[int|str]),
+ 'dataclasses.InitVar[int | str]')
+
+ def test_init_var_inheritance(self):
+ # Note that this deliberately tests that a dataclass need not
+ # have a __post_init__ function if it has an InitVar field.
+ # It could just be used in a derived class, as shown here.
+ @dataclass
+ class Base:
+ x: int
+ init_base: InitVar[int]
+
+ # We can instantiate by passing the InitVar, even though
+ # it's not used.
+ b = Base(0, 10)
+ self.assertEqual(vars(b), {'x': 0})
+
+ @dataclass
+ class C(Base):
+ y: int
+ init_derived: InitVar[int]
+
+ def __post_init__(self, init_base, init_derived):
+ self.x = self.x + init_base
+ self.y = self.y + init_derived
+
+ c = C(10, 11, 50, 51)
+ self.assertEqual(vars(c), {'x': 21, 'y': 101})
+
+ def test_default_factory(self):
+ # Test a factory that returns a new list.
+ @dataclass
+ class C:
+ x: int
+ y: list = field(default_factory=list)
+
+ c0 = C(3)
+ c1 = C(3)
+ self.assertEqual(c0.x, 3)
+ self.assertEqual(c0.y, [])
+ self.assertEqual(c0, c1)
+ self.assertIsNot(c0.y, c1.y)
+ self.assertEqual(astuple(C(5, [1])), (5, [1]))
+
+ # Test a factory that returns a shared list.
+ l = []
+ @dataclass
+ class C:
+ x: int
+ y: list = field(default_factory=lambda: l)
+
+ c0 = C(3)
+ c1 = C(3)
+ self.assertEqual(c0.x, 3)
+ self.assertEqual(c0.y, [])
+ self.assertEqual(c0, c1)
+ self.assertIs(c0.y, c1.y)
+ self.assertEqual(astuple(C(5, [1])), (5, [1]))
+
+ # Test various other field flags.
+ # repr
+ @dataclass
+ class C:
+ x: list = field(default_factory=list, repr=False)
+ self.assertEqual(repr(C()), 'TestCase.test_default_factory.<locals>.C()')
+ self.assertEqual(C().x, [])
+
+ # hash
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: list = field(default_factory=list, hash=False)
+ self.assertEqual(astuple(C()), ([],))
+ self.assertEqual(hash(C()), hash(()))
+
+ # init (see also test_default_factory_with_no_init)
+ @dataclass
+ class C:
+ x: list = field(default_factory=list, init=False)
+ self.assertEqual(astuple(C()), ([],))
+
+ # compare
+ @dataclass
+ class C:
+ x: list = field(default_factory=list, compare=False)
+ self.assertEqual(C(), C([1]))
+
+ def test_default_factory_with_no_init(self):
+ # We need a factory with a side effect.
+ factory = Mock()
+
+ @dataclass
+ class C:
+ x: list = field(default_factory=factory, init=False)
+
+ # Make sure the default factory is called for each new instance.
+ C().x
+ self.assertEqual(factory.call_count, 1)
+ C().x
+ self.assertEqual(factory.call_count, 2)
+
+ def test_default_factory_not_called_if_value_given(self):
+ # We need a factory that we can test if it's been called.
+ factory = Mock()
+
+ @dataclass
+ class C:
+ x: int = field(default_factory=factory)
+
+ # Make sure that if a field has a default factory function,
+ # it's not called if a value is specified.
+ C().x
+ self.assertEqual(factory.call_count, 1)
+ self.assertEqual(C(10).x, 10)
+ self.assertEqual(factory.call_count, 1)
+ C().x
+ self.assertEqual(factory.call_count, 2)
+
+ def test_default_factory_derived(self):
+ # See bpo-32896.
+ @dataclass
+ class Foo:
+ x: dict = field(default_factory=dict)
+
+ @dataclass
+ class Bar(Foo):
+ y: int = 1
+
+ self.assertEqual(Foo().x, {})
+ self.assertEqual(Bar().x, {})
+ self.assertEqual(Bar().y, 1)
+
+ @dataclass
+ class Baz(Foo):
+ pass
+ self.assertEqual(Baz().x, {})
+
+ def test_intermediate_non_dataclass(self):
+ # Test that an intermediate class that defines
+ # annotations does not define fields.
+
+ @dataclass
+ class A:
+ x: int
+
+ class B(A):
+ y: int
+
+ @dataclass
+ class C(B):
+ z: int
+
+ c = C(1, 3)
+ self.assertEqual((c.x, c.z), (1, 3))
+
+ # .y was not initialized.
+ with self.assertRaisesRegex(AttributeError,
+ 'object has no attribute'):
+ c.y
+
+ # And if we again derive a non-dataclass, no fields are added.
+ class D(C):
+ t: int
+ d = D(4, 5)
+ self.assertEqual((d.x, d.z), (4, 5))
+
+ def test_classvar_default_factory(self):
+ # It's an error for a ClassVar to have a factory function.
+ with self.assertRaisesRegex(TypeError,
+ 'cannot have a default factory'):
+ @dataclass
+ class C:
+ x: ClassVar[int] = field(default_factory=int)
+
+ def test_is_dataclass(self):
+ class NotDataClass:
+ pass
+
+ self.assertFalse(is_dataclass(0))
+ self.assertFalse(is_dataclass(int))
+ self.assertFalse(is_dataclass(NotDataClass))
+ self.assertFalse(is_dataclass(NotDataClass()))
+
+ @dataclass
+ class C:
+ x: int
+
+ @dataclass
+ class D:
+ d: C
+ e: int
+
+ c = C(10)
+ d = D(c, 4)
+
+ self.assertTrue(is_dataclass(C))
+ self.assertTrue(is_dataclass(c))
+ self.assertFalse(is_dataclass(c.x))
+ self.assertTrue(is_dataclass(d.d))
+ self.assertFalse(is_dataclass(d.e))
+
+ def test_is_dataclass_when_getattr_always_returns(self):
+ # See bpo-37868.
+ class A:
+ def __getattr__(self, key):
+ return 0
+ self.assertFalse(is_dataclass(A))
+ a = A()
+
+ # Also test for an instance attribute.
+ class B:
+ pass
+ b = B()
+ b.__dataclass_fields__ = []
+
+ for obj in a, b:
+ with self.subTest(obj=obj):
+ self.assertFalse(is_dataclass(obj))
+
+ # Indirect tests for _is_dataclass_instance().
+ with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'):
+ asdict(obj)
+ with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'):
+ astuple(obj)
+ with self.assertRaisesRegex(TypeError, 'should be called on dataclass instances'):
+ replace(obj, x=0)
+
+ def test_is_dataclass_genericalias(self):
+ @dataclass
+ class A(types.GenericAlias):
+ origin: type
+ args: type
+ self.assertTrue(is_dataclass(A))
+ a = A(list, int)
+ self.assertTrue(is_dataclass(type(a)))
+ self.assertTrue(is_dataclass(a))
+
+
+ def test_helper_fields_with_class_instance(self):
+ # Check that we can call fields() on either a class or instance,
+ # and get back the same thing.
+ @dataclass
+ class C:
+ x: int
+ y: float
+
+ self.assertEqual(fields(C), fields(C(0, 0.0)))
+
+ def test_helper_fields_exception(self):
+ # Check that TypeError is raised if not passed a dataclass or
+ # instance.
+ with self.assertRaisesRegex(TypeError, 'dataclass type or instance'):
+ fields(0)
+
+ class C: pass
+ with self.assertRaisesRegex(TypeError, 'dataclass type or instance'):
+ fields(C)
+ with self.assertRaisesRegex(TypeError, 'dataclass type or instance'):
+ fields(C())
+
+ def test_helper_asdict(self):
+ # Basic tests for asdict(), it should return a new dictionary.
+ @dataclass
+ class C:
+ x: int
+ y: int
+ c = C(1, 2)
+
+ self.assertEqual(asdict(c), {'x': 1, 'y': 2})
+ self.assertEqual(asdict(c), asdict(c))
+ self.assertIsNot(asdict(c), asdict(c))
+ c.x = 42
+ self.assertEqual(asdict(c), {'x': 42, 'y': 2})
+ self.assertIs(type(asdict(c)), dict)
+
+ def test_helper_asdict_raises_on_classes(self):
+ # asdict() should raise on a class object.
+ @dataclass
+ class C:
+ x: int
+ y: int
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ asdict(C)
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ asdict(int)
+
+ def test_helper_asdict_copy_values(self):
+ @dataclass
+ class C:
+ x: int
+ y: List[int] = field(default_factory=list)
+ initial = []
+ c = C(1, initial)
+ d = asdict(c)
+ self.assertEqual(d['y'], initial)
+ self.assertIsNot(d['y'], initial)
+ c = C(1)
+ d = asdict(c)
+ d['y'].append(1)
+ self.assertEqual(c.y, [])
+
+ def test_helper_asdict_nested(self):
+ @dataclass
+ class UserId:
+ token: int
+ group: int
+ @dataclass
+ class User:
+ name: str
+ id: UserId
+ u = User('Joe', UserId(123, 1))
+ d = asdict(u)
+ self.assertEqual(d, {'name': 'Joe', 'id': {'token': 123, 'group': 1}})
+ self.assertIsNot(asdict(u), asdict(u))
+ u.id.group = 2
+ self.assertEqual(asdict(u), {'name': 'Joe',
+ 'id': {'token': 123, 'group': 2}})
+
+ def test_helper_asdict_builtin_containers(self):
+ @dataclass
+ class User:
+ name: str
+ id: int
+ @dataclass
+ class GroupList:
+ id: int
+ users: List[User]
+ @dataclass
+ class GroupTuple:
+ id: int
+ users: Tuple[User, ...]
+ @dataclass
+ class GroupDict:
+ id: int
+ users: Dict[str, User]
+ a = User('Alice', 1)
+ b = User('Bob', 2)
+ gl = GroupList(0, [a, b])
+ gt = GroupTuple(0, (a, b))
+ gd = GroupDict(0, {'first': a, 'second': b})
+ self.assertEqual(asdict(gl), {'id': 0, 'users': [{'name': 'Alice', 'id': 1},
+ {'name': 'Bob', 'id': 2}]})
+ self.assertEqual(asdict(gt), {'id': 0, 'users': ({'name': 'Alice', 'id': 1},
+ {'name': 'Bob', 'id': 2})})
+ self.assertEqual(asdict(gd), {'id': 0, 'users': {'first': {'name': 'Alice', 'id': 1},
+ 'second': {'name': 'Bob', 'id': 2}}})
+
+ def test_helper_asdict_builtin_object_containers(self):
+ @dataclass
+ class Child:
+ d: object
+
+ @dataclass
+ class Parent:
+ child: Child
+
+ self.assertEqual(asdict(Parent(Child([1]))), {'child': {'d': [1]}})
+ self.assertEqual(asdict(Parent(Child({1: 2}))), {'child': {'d': {1: 2}}})
+
+ def test_helper_asdict_factory(self):
+ @dataclass
+ class C:
+ x: int
+ y: int
+ c = C(1, 2)
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, OrderedDict([('x', 1), ('y', 2)]))
+ self.assertIsNot(d, asdict(c, dict_factory=OrderedDict))
+ c.x = 42
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, OrderedDict([('x', 42), ('y', 2)]))
+ self.assertIs(type(d), OrderedDict)
+
+ def test_helper_asdict_namedtuple(self):
+ T = namedtuple('T', 'a b c')
+ @dataclass
+ class C:
+ x: str
+ y: T
+ c = C('outer', T(1, C('inner', T(11, 12, 13)), 2))
+
+ d = asdict(c)
+ self.assertEqual(d, {'x': 'outer',
+ 'y': T(1,
+ {'x': 'inner',
+ 'y': T(11, 12, 13)},
+ 2),
+ }
+ )
+
+ # Now with a dict_factory. OrderedDict is convenient, but
+ # since it compares to dicts, we also need to have separate
+ # assertIs tests.
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, {'x': 'outer',
+ 'y': T(1,
+ {'x': 'inner',
+ 'y': T(11, 12, 13)},
+ 2),
+ }
+ )
+
+ # Make sure that the returned dicts are actually OrderedDicts.
+ self.assertIs(type(d), OrderedDict)
+ self.assertIs(type(d['y'][1]), OrderedDict)
+
+ def test_helper_asdict_namedtuple_key(self):
+ # Ensure that a field that contains a dict which has a
+ # namedtuple as a key works with asdict().
+
+ @dataclass
+ class C:
+ f: dict
+ T = namedtuple('T', 'a')
+
+ c = C({T('an a'): 0})
+
+ self.assertEqual(asdict(c), {'f': {T(a='an a'): 0}})
+
+ def test_helper_asdict_namedtuple_derived(self):
+ class T(namedtuple('Tbase', 'a')):
+ def my_a(self):
+ return self.a
+
+ @dataclass
+ class C:
+ f: T
+
+ t = T(6)
+ c = C(t)
+
+ d = asdict(c)
+ self.assertEqual(d, {'f': T(a=6)})
+ # Make sure that t has been copied, not used directly.
+ self.assertIsNot(d['f'], t)
+ self.assertEqual(d['f'].my_a(), 6)
+
+ def test_helper_astuple(self):
+ # Basic tests for astuple(), it should return a new tuple.
+ @dataclass
+ class C:
+ x: int
+ y: int = 0
+ c = C(1)
+
+ self.assertEqual(astuple(c), (1, 0))
+ self.assertEqual(astuple(c), astuple(c))
+ self.assertIsNot(astuple(c), astuple(c))
+ c.y = 42
+ self.assertEqual(astuple(c), (1, 42))
+ self.assertIs(type(astuple(c)), tuple)
+
+ def test_helper_astuple_raises_on_classes(self):
+ # astuple() should raise on a class object.
+ @dataclass
+ class C:
+ x: int
+ y: int
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ astuple(C)
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ astuple(int)
+
+ def test_helper_astuple_copy_values(self):
+ @dataclass
+ class C:
+ x: int
+ y: List[int] = field(default_factory=list)
+ initial = []
+ c = C(1, initial)
+ t = astuple(c)
+ self.assertEqual(t[1], initial)
+ self.assertIsNot(t[1], initial)
+ c = C(1)
+ t = astuple(c)
+ t[1].append(1)
+ self.assertEqual(c.y, [])
+
+ def test_helper_astuple_nested(self):
+ @dataclass
+ class UserId:
+ token: int
+ group: int
+ @dataclass
+ class User:
+ name: str
+ id: UserId
+ u = User('Joe', UserId(123, 1))
+ t = astuple(u)
+ self.assertEqual(t, ('Joe', (123, 1)))
+ self.assertIsNot(astuple(u), astuple(u))
+ u.id.group = 2
+ self.assertEqual(astuple(u), ('Joe', (123, 2)))
+
+ def test_helper_astuple_builtin_containers(self):
+ @dataclass
+ class User:
+ name: str
+ id: int
+ @dataclass
+ class GroupList:
+ id: int
+ users: List[User]
+ @dataclass
+ class GroupTuple:
+ id: int
+ users: Tuple[User, ...]
+ @dataclass
+ class GroupDict:
+ id: int
+ users: Dict[str, User]
+ a = User('Alice', 1)
+ b = User('Bob', 2)
+ gl = GroupList(0, [a, b])
+ gt = GroupTuple(0, (a, b))
+ gd = GroupDict(0, {'first': a, 'second': b})
+ self.assertEqual(astuple(gl), (0, [('Alice', 1), ('Bob', 2)]))
+ self.assertEqual(astuple(gt), (0, (('Alice', 1), ('Bob', 2))))
+ self.assertEqual(astuple(gd), (0, {'first': ('Alice', 1), 'second': ('Bob', 2)}))
+
+ def test_helper_astuple_builtin_object_containers(self):
+ @dataclass
+ class Child:
+ d: object
+
+ @dataclass
+ class Parent:
+ child: Child
+
+ self.assertEqual(astuple(Parent(Child([1]))), (([1],),))
+ self.assertEqual(astuple(Parent(Child({1: 2}))), (({1: 2},),))
+
+ def test_helper_astuple_factory(self):
+ @dataclass
+ class C:
+ x: int
+ y: int
+ NT = namedtuple('NT', 'x y')
+ def nt(lst):
+ return NT(*lst)
+ c = C(1, 2)
+ t = astuple(c, tuple_factory=nt)
+ self.assertEqual(t, NT(1, 2))
+ self.assertIsNot(t, astuple(c, tuple_factory=nt))
+ c.x = 42
+ t = astuple(c, tuple_factory=nt)
+ self.assertEqual(t, NT(42, 2))
+ self.assertIs(type(t), NT)
+
+ def test_helper_astuple_namedtuple(self):
+ T = namedtuple('T', 'a b c')
+ @dataclass
+ class C:
+ x: str
+ y: T
+ c = C('outer', T(1, C('inner', T(11, 12, 13)), 2))
+
+ t = astuple(c)
+ self.assertEqual(t, ('outer', T(1, ('inner', (11, 12, 13)), 2)))
+
+ # Now, using a tuple_factory. list is convenient here.
+ t = astuple(c, tuple_factory=list)
+ self.assertEqual(t, ['outer', T(1, ['inner', T(11, 12, 13)], 2)])
+
+ def test_dynamic_class_creation(self):
+ cls_dict = {'__annotations__': {'x': int, 'y': int},
+ }
+
+ # Create the class.
+ cls = type('C', (), cls_dict)
+
+ # Make it a dataclass.
+ cls1 = dataclass(cls)
+
+ self.assertEqual(cls1, cls)
+ self.assertEqual(asdict(cls(1, 2)), {'x': 1, 'y': 2})
+
+ def test_dynamic_class_creation_using_field(self):
+ cls_dict = {'__annotations__': {'x': int, 'y': int},
+ 'y': field(default=5),
+ }
+
+ # Create the class.
+ cls = type('C', (), cls_dict)
+
+ # Make it a dataclass.
+ cls1 = dataclass(cls)
+
+ self.assertEqual(cls1, cls)
+ self.assertEqual(asdict(cls1(1)), {'x': 1, 'y': 5})
+
+ def test_init_in_order(self):
+ @dataclass
+ class C:
+ a: int
+ b: int = field()
+ c: list = field(default_factory=list, init=False)
+ d: list = field(default_factory=list)
+ e: int = field(default=4, init=False)
+ f: int = 4
+
+ calls = []
+ def setattr(self, name, value):
+ calls.append((name, value))
+
+ C.__setattr__ = setattr
+ c = C(0, 1)
+ self.assertEqual(('a', 0), calls[0])
+ self.assertEqual(('b', 1), calls[1])
+ self.assertEqual(('c', []), calls[2])
+ self.assertEqual(('d', []), calls[3])
+ self.assertNotIn(('e', 4), calls)
+ self.assertEqual(('f', 4), calls[4])
+
+ def test_items_in_dicts(self):
+ @dataclass
+ class C:
+ a: int
+ b: list = field(default_factory=list, init=False)
+ c: list = field(default_factory=list)
+ d: int = field(default=4, init=False)
+ e: int = 0
+
+ c = C(0)
+ # Class dict
+ self.assertNotIn('a', C.__dict__)
+ self.assertNotIn('b', C.__dict__)
+ self.assertNotIn('c', C.__dict__)
+ self.assertIn('d', C.__dict__)
+ self.assertEqual(C.d, 4)
+ self.assertIn('e', C.__dict__)
+ self.assertEqual(C.e, 0)
+ # Instance dict
+ self.assertIn('a', c.__dict__)
+ self.assertEqual(c.a, 0)
+ self.assertIn('b', c.__dict__)
+ self.assertEqual(c.b, [])
+ self.assertIn('c', c.__dict__)
+ self.assertEqual(c.c, [])
+ self.assertNotIn('d', c.__dict__)
+ self.assertIn('e', c.__dict__)
+ self.assertEqual(c.e, 0)
+
+ def test_alternate_classmethod_constructor(self):
+ # Since __post_init__ can't take params, use a classmethod
+ # alternate constructor. This is mostly an example to show
+ # how to use this technique.
+ @dataclass
+ class C:
+ x: int
+ @classmethod
+ def from_file(cls, filename):
+ # In a real example, create a new instance
+ # and populate 'x' from contents of a file.
+ value_in_file = 20
+ return cls(value_in_file)
+
+ self.assertEqual(C.from_file('filename').x, 20)
+
+ def test_field_metadata_default(self):
+ # Make sure the default metadata is read-only and of
+ # zero length.
+ @dataclass
+ class C:
+ i: int
+
+ self.assertFalse(fields(C)[0].metadata)
+ self.assertEqual(len(fields(C)[0].metadata), 0)
+ with self.assertRaisesRegex(TypeError,
+ 'does not support item assignment'):
+ fields(C)[0].metadata['test'] = 3
+
+ def test_field_metadata_mapping(self):
+ # Make sure only a mapping can be passed as metadata
+ # zero length.
+ with self.assertRaises(TypeError):
+ @dataclass
+ class C:
+ i: int = field(metadata=0)
+
+ # Make sure an empty dict works.
+ d = {}
+ @dataclass
+ class C:
+ i: int = field(metadata=d)
+ self.assertFalse(fields(C)[0].metadata)
+ self.assertEqual(len(fields(C)[0].metadata), 0)
+ # Update should work (see bpo-35960).
+ d['foo'] = 1
+ self.assertEqual(len(fields(C)[0].metadata), 1)
+ self.assertEqual(fields(C)[0].metadata['foo'], 1)
+ with self.assertRaisesRegex(TypeError,
+ 'does not support item assignment'):
+ fields(C)[0].metadata['test'] = 3
+
+ # Make sure a non-empty dict works.
+ d = {'test': 10, 'bar': '42', 3: 'three'}
+ @dataclass
+ class C:
+ i: int = field(metadata=d)
+ self.assertEqual(len(fields(C)[0].metadata), 3)
+ self.assertEqual(fields(C)[0].metadata['test'], 10)
+ self.assertEqual(fields(C)[0].metadata['bar'], '42')
+ self.assertEqual(fields(C)[0].metadata[3], 'three')
+ # Update should work.
+ d['foo'] = 1
+ self.assertEqual(len(fields(C)[0].metadata), 4)
+ self.assertEqual(fields(C)[0].metadata['foo'], 1)
+ with self.assertRaises(KeyError):
+ # Non-existent key.
+ fields(C)[0].metadata['baz']
+ with self.assertRaisesRegex(TypeError,
+ 'does not support item assignment'):
+ fields(C)[0].metadata['test'] = 3
+
+ def test_field_metadata_custom_mapping(self):
+ # Try a custom mapping.
+ class SimpleNameSpace:
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
+ def __getitem__(self, item):
+ if item == 'xyzzy':
+ return 'plugh'
+ return getattr(self, item)
+
+ def __len__(self):
+ return self.__dict__.__len__()
+
+ @dataclass
+ class C:
+ i: int = field(metadata=SimpleNameSpace(a=10))
+
+ self.assertEqual(len(fields(C)[0].metadata), 1)
+ self.assertEqual(fields(C)[0].metadata['a'], 10)
+ with self.assertRaises(AttributeError):
+ fields(C)[0].metadata['b']
+ # Make sure we're still talking to our custom mapping.
+ self.assertEqual(fields(C)[0].metadata['xyzzy'], 'plugh')
+
+ def test_generic_dataclasses(self):
+ T = TypeVar('T')
+
+ @dataclass
+ class LabeledBox(Generic[T]):
+ content: T
+ label: str = '<unknown>'
+
+ box = LabeledBox(42)
+ self.assertEqual(box.content, 42)
+ self.assertEqual(box.label, '<unknown>')
+
+ # Subscripting the resulting class should work, etc.
+ Alias = List[LabeledBox[int]]
+
+ def test_generic_extending(self):
+ S = TypeVar('S')
+ T = TypeVar('T')
+
+ @dataclass
+ class Base(Generic[T, S]):
+ x: T
+ y: S
+
+ @dataclass
+ class DataDerived(Base[int, T]):
+ new_field: str
+ Alias = DataDerived[str]
+ c = Alias(0, 'test1', 'test2')
+ self.assertEqual(astuple(c), (0, 'test1', 'test2'))
+
+ class NonDataDerived(Base[int, T]):
+ def new_method(self):
+ return self.y
+ Alias = NonDataDerived[float]
+ c = Alias(10, 1.0)
+ self.assertEqual(c.new_method(), 1.0)
+
+ def test_generic_dynamic(self):
+ T = TypeVar('T')
+
+ @dataclass
+ class Parent(Generic[T]):
+ x: T
+ Child = make_dataclass('Child', [('y', T), ('z', Optional[T], None)],
+ bases=(Parent[int], Generic[T]), namespace={'other': 42})
+ self.assertIs(Child[int](1, 2).z, None)
+ self.assertEqual(Child[int](1, 2, 3).z, 3)
+ self.assertEqual(Child[int](1, 2, 3).other, 42)
+ # Check that type aliases work correctly.
+ Alias = Child[T]
+ self.assertEqual(Alias[int](1, 2).x, 1)
+ # Check MRO resolution.
+ self.assertEqual(Child.__mro__, (Child, Parent, Generic, object))
+
+ def test_dataclasses_pickleable(self):
+ global P, Q, R
+ @dataclass
+ class P:
+ x: int
+ y: int = 0
+ @dataclass
+ class Q:
+ x: int
+ y: int = field(default=0, init=False)
+ @dataclass
+ class R:
+ x: int
+ y: List[int] = field(default_factory=list)
+ q = Q(1)
+ q.y = 2
+ samples = [P(1), P(1, 2), Q(1), q, R(1), R(1, [2, 3, 4])]
+ for sample in samples:
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(sample=sample, proto=proto):
+ new_sample = pickle.loads(pickle.dumps(sample, proto))
+ self.assertEqual(sample.x, new_sample.x)
+ self.assertEqual(sample.y, new_sample.y)
+ self.assertIsNot(sample, new_sample)
+ new_sample.x = 42
+ another_new_sample = pickle.loads(pickle.dumps(new_sample, proto))
+ self.assertEqual(new_sample.x, another_new_sample.x)
+ self.assertEqual(sample.y, another_new_sample.y)
+
+ def test_dataclasses_qualnames(self):
+ @dataclass(order=True, unsafe_hash=True, frozen=True)
+ class A:
+ x: int
+ y: int
+
+ self.assertEqual(A.__init__.__name__, "__init__")
+ for function in (
+ '__eq__',
+ '__lt__',
+ '__le__',
+ '__gt__',
+ '__ge__',
+ '__hash__',
+ '__init__',
+ '__repr__',
+ '__setattr__',
+ '__delattr__',
+ ):
+ self.assertEqual(getattr(A, function).__qualname__, f"TestCase.test_dataclasses_qualnames.<locals>.A.{function}")
+
+ with self.assertRaisesRegex(TypeError, r"A\.__init__\(\) missing"):
+ A()
+
+
+class TestFieldNoAnnotation(unittest.TestCase):
+ def test_field_without_annotation(self):
+ with self.assertRaisesRegex(TypeError,
+ "'f' is a field but has no type annotation"):
+ @dataclass
+ class C:
+ f = field()
+
+ def test_field_without_annotation_but_annotation_in_base(self):
+ @dataclass
+ class B:
+ f: int
+
+ with self.assertRaisesRegex(TypeError,
+ "'f' is a field but has no type annotation"):
+ # This is still an error: make sure we don't pick up the
+ # type annotation in the base class.
+ @dataclass
+ class C(B):
+ f = field()
+
+ def test_field_without_annotation_but_annotation_in_base_not_dataclass(self):
+ # Same test, but with the base class not a dataclass.
+ class B:
+ f: int
+
+ with self.assertRaisesRegex(TypeError,
+ "'f' is a field but has no type annotation"):
+ # This is still an error: make sure we don't pick up the
+ # type annotation in the base class.
+ @dataclass
+ class C(B):
+ f = field()
+
+
+class TestDocString(unittest.TestCase):
+ def assertDocStrEqual(self, a, b):
+ # Because 3.6 and 3.7 differ in how inspect.signature work
+ # (see bpo #32108), for the time being just compare them with
+ # whitespace stripped.
+ self.assertEqual(a.replace(' ', ''), b.replace(' ', ''))
+
+ def test_existing_docstring_not_overridden(self):
+ @dataclass
+ class C:
+ """Lorem ipsum"""
+ x: int
+
+ self.assertEqual(C.__doc__, "Lorem ipsum")
+
+ def test_docstring_no_fields(self):
+ @dataclass
+ class C:
+ pass
+
+ self.assertDocStrEqual(C.__doc__, "C()")
+
+ def test_docstring_one_field(self):
+ @dataclass
+ class C:
+ x: int
+
+ self.assertDocStrEqual(C.__doc__, "C(x:int)")
+
+ def test_docstring_two_fields(self):
+ @dataclass
+ class C:
+ x: int
+ y: int
+
+ self.assertDocStrEqual(C.__doc__, "C(x:int, y:int)")
+
+ def test_docstring_three_fields(self):
+ @dataclass
+ class C:
+ x: int
+ y: int
+ z: str
+
+ self.assertDocStrEqual(C.__doc__, "C(x:int, y:int, z:str)")
+
+ def test_docstring_one_field_with_default(self):
+ @dataclass
+ class C:
+ x: int = 3
+
+ self.assertDocStrEqual(C.__doc__, "C(x:int=3)")
+
+ def test_docstring_one_field_with_default_none(self):
+ @dataclass
+ class C:
+ x: Union[int, type(None)] = None
+
+ self.assertDocStrEqual(C.__doc__, "C(x:Optional[int]=None)")
+
+ def test_docstring_list_field(self):
+ @dataclass
+ class C:
+ x: List[int]
+
+ self.assertDocStrEqual(C.__doc__, "C(x:List[int])")
+
+ def test_docstring_list_field_with_default_factory(self):
+ @dataclass
+ class C:
+ x: List[int] = field(default_factory=list)
+
+ self.assertDocStrEqual(C.__doc__, "C(x:List[int]=<factory>)")
+
+ def test_docstring_deque_field(self):
+ @dataclass
+ class C:
+ x: deque
+
+ self.assertDocStrEqual(C.__doc__, "C(x:collections.deque)")
+
+ def test_docstring_deque_field_with_default_factory(self):
+ @dataclass
+ class C:
+ x: deque = field(default_factory=deque)
+
+ self.assertDocStrEqual(C.__doc__, "C(x:collections.deque=<factory>)")
+
+
+class TestInit(unittest.TestCase):
+ def test_base_has_init(self):
+ class B:
+ def __init__(self):
+ self.z = 100
+ pass
+
+ # Make sure that declaring this class doesn't raise an error.
+ # The issue is that we can't override __init__ in our class,
+ # but it should be okay to add __init__ to us if our base has
+ # an __init__.
+ @dataclass
+ class C(B):
+ x: int = 0
+ c = C(10)
+ self.assertEqual(c.x, 10)
+ self.assertNotIn('z', vars(c))
+
+ # Make sure that if we don't add an init, the base __init__
+ # gets called.
+ @dataclass(init=False)
+ class C(B):
+ x: int = 10
+ c = C()
+ self.assertEqual(c.x, 10)
+ self.assertEqual(c.z, 100)
+
+ def test_no_init(self):
+ @dataclass(init=False)
+ class C:
+ i: int = 0
+ self.assertEqual(C().i, 0)
+
+ @dataclass(init=False)
+ class C:
+ i: int = 2
+ def __init__(self):
+ self.i = 3
+ self.assertEqual(C().i, 3)
+
+ def test_overwriting_init(self):
+ # If the class has __init__, use it no matter the value of
+ # init=.
+
+ @dataclass
+ class C:
+ x: int
+ def __init__(self, x):
+ self.x = 2 * x
+ self.assertEqual(C(3).x, 6)
+
+ @dataclass(init=True)
+ class C:
+ x: int
+ def __init__(self, x):
+ self.x = 2 * x
+ self.assertEqual(C(4).x, 8)
+
+ @dataclass(init=False)
+ class C:
+ x: int
+ def __init__(self, x):
+ self.x = 2 * x
+ self.assertEqual(C(5).x, 10)
+
+ def test_inherit_from_protocol(self):
+ # Dataclasses inheriting from protocol should preserve their own `__init__`.
+ # See bpo-45081.
+
+ class P(Protocol):
+ a: int
+
+ @dataclass
+ class C(P):
+ a: int
+
+ self.assertEqual(C(5).a, 5)
+
+ @dataclass
+ class D(P):
+ def __init__(self, a):
+ self.a = a * 2
+
+ self.assertEqual(D(5).a, 10)
+
+
+class TestRepr(unittest.TestCase):
+ def test_repr(self):
+ @dataclass
+ class B:
+ x: int
+
+ @dataclass
+ class C(B):
+ y: int = 10
+
+ o = C(4)
+ self.assertEqual(repr(o), 'TestRepr.test_repr.<locals>.C(x=4, y=10)')
+
+ @dataclass
+ class D(C):
+ x: int = 20
+ self.assertEqual(repr(D()), 'TestRepr.test_repr.<locals>.D(x=20, y=10)')
+
+ @dataclass
+ class C:
+ @dataclass
+ class D:
+ i: int
+ @dataclass
+ class E:
+ pass
+ self.assertEqual(repr(C.D(0)), 'TestRepr.test_repr.<locals>.C.D(i=0)')
+ self.assertEqual(repr(C.E()), 'TestRepr.test_repr.<locals>.C.E()')
+
+ def test_no_repr(self):
+ # Test a class with no __repr__ and repr=False.
+ @dataclass(repr=False)
+ class C:
+ x: int
+ self.assertIn(f'{__name__}.TestRepr.test_no_repr.<locals>.C object at',
+ repr(C(3)))
+
+ # Test a class with a __repr__ and repr=False.
+ @dataclass(repr=False)
+ class C:
+ x: int
+ def __repr__(self):
+ return 'C-class'
+ self.assertEqual(repr(C(3)), 'C-class')
+
+ def test_overwriting_repr(self):
+ # If the class has __repr__, use it no matter the value of
+ # repr=.
+
+ @dataclass
+ class C:
+ x: int
+ def __repr__(self):
+ return 'x'
+ self.assertEqual(repr(C(0)), 'x')
+
+ @dataclass(repr=True)
+ class C:
+ x: int
+ def __repr__(self):
+ return 'x'
+ self.assertEqual(repr(C(0)), 'x')
+
+ @dataclass(repr=False)
+ class C:
+ x: int
+ def __repr__(self):
+ return 'x'
+ self.assertEqual(repr(C(0)), 'x')
+
+
+class TestEq(unittest.TestCase):
+ def test_no_eq(self):
+ # Test a class with no __eq__ and eq=False.
+ @dataclass(eq=False)
+ class C:
+ x: int
+ self.assertNotEqual(C(0), C(0))
+ c = C(3)
+ self.assertEqual(c, c)
+
+ # Test a class with an __eq__ and eq=False.
+ @dataclass(eq=False)
+ class C:
+ x: int
+ def __eq__(self, other):
+ return other == 10
+ self.assertEqual(C(3), 10)
+
+ def test_overwriting_eq(self):
+ # If the class has __eq__, use it no matter the value of
+ # eq=.
+
+ @dataclass
+ class C:
+ x: int
+ def __eq__(self, other):
+ return other == 3
+ self.assertEqual(C(1), 3)
+ self.assertNotEqual(C(1), 1)
+
+ @dataclass(eq=True)
+ class C:
+ x: int
+ def __eq__(self, other):
+ return other == 4
+ self.assertEqual(C(1), 4)
+ self.assertNotEqual(C(1), 1)
+
+ @dataclass(eq=False)
+ class C:
+ x: int
+ def __eq__(self, other):
+ return other == 5
+ self.assertEqual(C(1), 5)
+ self.assertNotEqual(C(1), 1)
+
+
+class TestOrdering(unittest.TestCase):
+ def test_functools_total_ordering(self):
+ # Test that functools.total_ordering works with this class.
+ @total_ordering
+ @dataclass
+ class C:
+ x: int
+ def __lt__(self, other):
+ # Perform the test "backward", just to make
+ # sure this is being called.
+ return self.x >= other
+
+ self.assertLess(C(0), -1)
+ self.assertLessEqual(C(0), -1)
+ self.assertGreater(C(0), 1)
+ self.assertGreaterEqual(C(0), 1)
+
+ def test_no_order(self):
+ # Test that no ordering functions are added by default.
+ @dataclass(order=False)
+ class C:
+ x: int
+ # Make sure no order methods are added.
+ self.assertNotIn('__le__', C.__dict__)
+ self.assertNotIn('__lt__', C.__dict__)
+ self.assertNotIn('__ge__', C.__dict__)
+ self.assertNotIn('__gt__', C.__dict__)
+
+ # Test that __lt__ is still called
+ @dataclass(order=False)
+ class C:
+ x: int
+ def __lt__(self, other):
+ return False
+ # Make sure other methods aren't added.
+ self.assertNotIn('__le__', C.__dict__)
+ self.assertNotIn('__ge__', C.__dict__)
+ self.assertNotIn('__gt__', C.__dict__)
+
+ def test_overwriting_order(self):
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __lt__'
+ '.*using functools.total_ordering'):
+ @dataclass(order=True)
+ class C:
+ x: int
+ def __lt__(self):
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __le__'
+ '.*using functools.total_ordering'):
+ @dataclass(order=True)
+ class C:
+ x: int
+ def __le__(self):
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __gt__'
+ '.*using functools.total_ordering'):
+ @dataclass(order=True)
+ class C:
+ x: int
+ def __gt__(self):
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __ge__'
+ '.*using functools.total_ordering'):
+ @dataclass(order=True)
+ class C:
+ x: int
+ def __ge__(self):
+ pass
+
+class TestHash(unittest.TestCase):
+ def test_unsafe_hash(self):
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int
+ y: str
+ self.assertEqual(hash(C(1, 'foo')), hash((1, 'foo')))
+
+ def test_hash_rules(self):
+ def non_bool(value):
+ # Map to something else that's True, but not a bool.
+ if value is None:
+ return None
+ if value:
+ return (3,)
+ return 0
+
+ def test(case, unsafe_hash, eq, frozen, with_hash, result):
+ with self.subTest(case=case, unsafe_hash=unsafe_hash, eq=eq,
+ frozen=frozen):
+ if result != 'exception':
+ if with_hash:
+ @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen)
+ class C:
+ def __hash__(self):
+ return 0
+ else:
+ @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen)
+ class C:
+ pass
+
+ # See if the result matches what's expected.
+ if result == 'fn':
+ # __hash__ contains the function we generated.
+ self.assertIn('__hash__', C.__dict__)
+ self.assertIsNotNone(C.__dict__['__hash__'])
+
+ elif result == '':
+ # __hash__ is not present in our class.
+ if not with_hash:
+ self.assertNotIn('__hash__', C.__dict__)
+
+ elif result == 'none':
+ # __hash__ is set to None.
+ self.assertIn('__hash__', C.__dict__)
+ self.assertIsNone(C.__dict__['__hash__'])
+
+ elif result == 'exception':
+ # Creating the class should cause an exception.
+ # This only happens with with_hash==True.
+ assert(with_hash)
+ with self.assertRaisesRegex(TypeError, 'Cannot overwrite attribute __hash__'):
+ @dataclass(unsafe_hash=unsafe_hash, eq=eq, frozen=frozen)
+ class C:
+ def __hash__(self):
+ return 0
+
+ else:
+ assert False, f'unknown result {result!r}'
+
+ # There are 8 cases of:
+ # unsafe_hash=True/False
+ # eq=True/False
+ # frozen=True/False
+ # And for each of these, a different result if
+ # __hash__ is defined or not.
+ for case, (unsafe_hash, eq, frozen, res_no_defined_hash, res_defined_hash) in enumerate([
+ (False, False, False, '', ''),
+ (False, False, True, '', ''),
+ (False, True, False, 'none', ''),
+ (False, True, True, 'fn', ''),
+ (True, False, False, 'fn', 'exception'),
+ (True, False, True, 'fn', 'exception'),
+ (True, True, False, 'fn', 'exception'),
+ (True, True, True, 'fn', 'exception'),
+ ], 1):
+ test(case, unsafe_hash, eq, frozen, False, res_no_defined_hash)
+ test(case, unsafe_hash, eq, frozen, True, res_defined_hash)
+
+ # Test non-bool truth values, too. This is just to
+ # make sure the data-driven table in the decorator
+ # handles non-bool values.
+ test(case, non_bool(unsafe_hash), non_bool(eq), non_bool(frozen), False, res_no_defined_hash)
+ test(case, non_bool(unsafe_hash), non_bool(eq), non_bool(frozen), True, res_defined_hash)
+
+
+ def test_eq_only(self):
+ # If a class defines __eq__, __hash__ is automatically added
+ # and set to None. This is normal Python behavior, not
+ # related to dataclasses. Make sure we don't interfere with
+ # that (see bpo=32546).
+
+ @dataclass
+ class C:
+ i: int
+ def __eq__(self, other):
+ return self.i == other.i
+ self.assertEqual(C(1), C(1))
+ self.assertNotEqual(C(1), C(4))
+
+ # And make sure things work in this case if we specify
+ # unsafe_hash=True.
+ @dataclass(unsafe_hash=True)
+ class C:
+ i: int
+ def __eq__(self, other):
+ return self.i == other.i
+ self.assertEqual(C(1), C(1.0))
+ self.assertEqual(hash(C(1)), hash(C(1.0)))
+
+ # And check that the classes __eq__ is being used, despite
+ # specifying eq=True.
+ @dataclass(unsafe_hash=True, eq=True)
+ class C:
+ i: int
+ def __eq__(self, other):
+ return self.i == 3 and self.i == other.i
+ self.assertEqual(C(3), C(3))
+ self.assertNotEqual(C(1), C(1))
+ self.assertEqual(hash(C(1)), hash(C(1.0)))
+
+ def test_0_field_hash(self):
+ @dataclass(frozen=True)
+ class C:
+ pass
+ self.assertEqual(hash(C()), hash(()))
+
+ @dataclass(unsafe_hash=True)
+ class C:
+ pass
+ self.assertEqual(hash(C()), hash(()))
+
+ def test_1_field_hash(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ self.assertEqual(hash(C(4)), hash((4,)))
+ self.assertEqual(hash(C(42)), hash((42,)))
+
+ @dataclass(unsafe_hash=True)
+ class C:
+ x: int
+ self.assertEqual(hash(C(4)), hash((4,)))
+ self.assertEqual(hash(C(42)), hash((42,)))
+
+ def test_hash_no_args(self):
+ # Test dataclasses with no hash= argument. This exists to
+ # make sure that if the @dataclass parameter name is changed
+ # or the non-default hashing behavior changes, the default
+ # hashability keeps working the same way.
+
+ class Base:
+ def __hash__(self):
+ return 301
+
+ # If frozen or eq is None, then use the default value (do not
+ # specify any value in the decorator).
+ for frozen, eq, base, expected in [
+ (None, None, object, 'unhashable'),
+ (None, None, Base, 'unhashable'),
+ (None, False, object, 'object'),
+ (None, False, Base, 'base'),
+ (None, True, object, 'unhashable'),
+ (None, True, Base, 'unhashable'),
+ (False, None, object, 'unhashable'),
+ (False, None, Base, 'unhashable'),
+ (False, False, object, 'object'),
+ (False, False, Base, 'base'),
+ (False, True, object, 'unhashable'),
+ (False, True, Base, 'unhashable'),
+ (True, None, object, 'tuple'),
+ (True, None, Base, 'tuple'),
+ (True, False, object, 'object'),
+ (True, False, Base, 'base'),
+ (True, True, object, 'tuple'),
+ (True, True, Base, 'tuple'),
+ ]:
+
+ with self.subTest(frozen=frozen, eq=eq, base=base, expected=expected):
+ # First, create the class.
+ if frozen is None and eq is None:
+ @dataclass
+ class C(base):
+ i: int
+ elif frozen is None:
+ @dataclass(eq=eq)
+ class C(base):
+ i: int
+ elif eq is None:
+ @dataclass(frozen=frozen)
+ class C(base):
+ i: int
+ else:
+ @dataclass(frozen=frozen, eq=eq)
+ class C(base):
+ i: int
+
+ # Now, make sure it hashes as expected.
+ if expected == 'unhashable':
+ c = C(10)
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(c)
+
+ elif expected == 'base':
+ self.assertEqual(hash(C(10)), 301)
+
+ elif expected == 'object':
+ # I'm not sure what test to use here. object's
+ # hash isn't based on id(), so calling hash()
+ # won't tell us much. So, just check the
+ # function used is object's.
+ self.assertIs(C.__hash__, object.__hash__)
+
+ elif expected == 'tuple':
+ self.assertEqual(hash(C(42)), hash((42,)))
+
+ else:
+ assert False, f'unknown value for expected={expected!r}'
+
+
+class TestFrozen(unittest.TestCase):
+ def test_frozen(self):
+ @dataclass(frozen=True)
+ class C:
+ i: int
+
+ c = C(10)
+ self.assertEqual(c.i, 10)
+ with self.assertRaises(FrozenInstanceError):
+ c.i = 5
+ self.assertEqual(c.i, 10)
+
+ def test_inherit(self):
+ @dataclass(frozen=True)
+ class C:
+ i: int
+
+ @dataclass(frozen=True)
+ class D(C):
+ j: int
+
+ d = D(0, 10)
+ with self.assertRaises(FrozenInstanceError):
+ d.i = 5
+ with self.assertRaises(FrozenInstanceError):
+ d.j = 6
+ self.assertEqual(d.i, 0)
+ self.assertEqual(d.j, 10)
+
+ def test_inherit_nonfrozen_from_empty_frozen(self):
+ @dataclass(frozen=True)
+ class C:
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'cannot inherit non-frozen dataclass from a frozen one'):
+ @dataclass
+ class D(C):
+ j: int
+
+ def test_inherit_nonfrozen_from_empty(self):
+ @dataclass
+ class C:
+ pass
+
+ @dataclass
+ class D(C):
+ j: int
+
+ d = D(3)
+ self.assertEqual(d.j, 3)
+ self.assertIsInstance(d, C)
+
+ # Test both ways: with an intermediate normal (non-dataclass)
+ # class and without an intermediate class.
+ def test_inherit_nonfrozen_from_frozen(self):
+ for intermediate_class in [True, False]:
+ with self.subTest(intermediate_class=intermediate_class):
+ @dataclass(frozen=True)
+ class C:
+ i: int
+
+ if intermediate_class:
+ class I(C): pass
+ else:
+ I = C
+
+ with self.assertRaisesRegex(TypeError,
+ 'cannot inherit non-frozen dataclass from a frozen one'):
+ @dataclass
+ class D(I):
+ pass
+
+ def test_inherit_frozen_from_nonfrozen(self):
+ for intermediate_class in [True, False]:
+ with self.subTest(intermediate_class=intermediate_class):
+ @dataclass
+ class C:
+ i: int
+
+ if intermediate_class:
+ class I(C): pass
+ else:
+ I = C
+
+ with self.assertRaisesRegex(TypeError,
+ 'cannot inherit frozen dataclass from a non-frozen one'):
+ @dataclass(frozen=True)
+ class D(I):
+ pass
+
+ def test_inherit_from_normal_class(self):
+ for intermediate_class in [True, False]:
+ with self.subTest(intermediate_class=intermediate_class):
+ class C:
+ pass
+
+ if intermediate_class:
+ class I(C): pass
+ else:
+ I = C
+
+ @dataclass(frozen=True)
+ class D(I):
+ i: int
+
+ d = D(10)
+ with self.assertRaises(FrozenInstanceError):
+ d.i = 5
+
+ def test_non_frozen_normal_derived(self):
+ # See bpo-32953.
+
+ @dataclass(frozen=True)
+ class D:
+ x: int
+ y: int = 10
+
+ class S(D):
+ pass
+
+ s = S(3)
+ self.assertEqual(s.x, 3)
+ self.assertEqual(s.y, 10)
+ s.cached = True
+
+ # But can't change the frozen attributes.
+ with self.assertRaises(FrozenInstanceError):
+ s.x = 5
+ with self.assertRaises(FrozenInstanceError):
+ s.y = 5
+ self.assertEqual(s.x, 3)
+ self.assertEqual(s.y, 10)
+ self.assertEqual(s.cached, True)
+
+ def test_overwriting_frozen(self):
+ # frozen uses __setattr__ and __delattr__.
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __setattr__'):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ def __setattr__(self):
+ pass
+
+ with self.assertRaisesRegex(TypeError,
+ 'Cannot overwrite attribute __delattr__'):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ def __delattr__(self):
+ pass
+
+ @dataclass(frozen=False)
+ class C:
+ x: int
+ def __setattr__(self, name, value):
+ self.__dict__['x'] = value * 2
+ self.assertEqual(C(10).x, 20)
+
+ def test_frozen_hash(self):
+ @dataclass(frozen=True)
+ class C:
+ x: Any
+
+ # If x is immutable, we can compute the hash. No exception is
+ # raised.
+ hash(C(3))
+
+ # If x is mutable, computing the hash is an error.
+ with self.assertRaisesRegex(TypeError, 'unhashable type'):
+ hash(C({}))
+
+
+class TestSlots(unittest.TestCase):
+ def test_simple(self):
+ @dataclass
+ class C:
+ __slots__ = ('x',)
+ x: Any
+
+ # There was a bug where a variable in a slot was assumed to
+ # also have a default value (of type
+ # types.MemberDescriptorType).
+ with self.assertRaisesRegex(TypeError,
+ r"__init__\(\) missing 1 required positional argument: 'x'"):
+ C()
+
+ # We can create an instance, and assign to x.
+ c = C(10)
+ self.assertEqual(c.x, 10)
+ c.x = 5
+ self.assertEqual(c.x, 5)
+
+ # We can't assign to anything else.
+ with self.assertRaisesRegex(AttributeError, "'C' object has no attribute 'y'"):
+ c.y = 5
+
+ def test_derived_added_field(self):
+ # See bpo-33100.
+ @dataclass
+ class Base:
+ __slots__ = ('x',)
+ x: Any
+
+ @dataclass
+ class Derived(Base):
+ x: int
+ y: int
+
+ d = Derived(1, 2)
+ self.assertEqual((d.x, d.y), (1, 2))
+
+ # We can add a new field to the derived instance.
+ d.z = 10
+
+ def test_generated_slots(self):
+ @dataclass(slots=True)
+ class C:
+ x: int
+ y: int
+
+ c = C(1, 2)
+ self.assertEqual((c.x, c.y), (1, 2))
+
+ c.x = 3
+ c.y = 4
+ self.assertEqual((c.x, c.y), (3, 4))
+
+ with self.assertRaisesRegex(AttributeError, "'C' object has no attribute 'z'"):
+ c.z = 5
+
+ def test_add_slots_when_slots_exists(self):
+ with self.assertRaisesRegex(TypeError, '^C already specifies __slots__$'):
+ @dataclass(slots=True)
+ class C:
+ __slots__ = ('x',)
+ x: int
+
+ def test_generated_slots_value(self):
+
+ class Root:
+ __slots__ = {'x'}
+
+ class Root2(Root):
+ __slots__ = {'k': '...', 'j': ''}
+
+ class Root3(Root2):
+ __slots__ = ['h']
+
+ class Root4(Root3):
+ __slots__ = 'aa'
+
+ @dataclass(slots=True)
+ class Base(Root4):
+ y: int
+ j: str
+ h: str
+
+ self.assertEqual(Base.__slots__, ('y', ))
+
+ @dataclass(slots=True)
+ class Derived(Base):
+ aa: float
+ x: str
+ z: int
+ k: str
+ h: str
+
+ self.assertEqual(Derived.__slots__, ('z', ))
+
+ @dataclass
+ class AnotherDerived(Base):
+ z: int
+
+ self.assertNotIn('__slots__', AnotherDerived.__dict__)
+
+ def test_cant_inherit_from_iterator_slots(self):
+
+ class Root:
+ __slots__ = iter(['a'])
+
+ class Root2(Root):
+ __slots__ = ('b', )
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "^Slots of 'Root' cannot be determined"
+ ):
+ @dataclass(slots=True)
+ class C(Root2):
+ x: int
+
+ def test_returns_new_class(self):
+ class A:
+ x: int
+
+ B = dataclass(A, slots=True)
+ self.assertIsNot(A, B)
+
+ self.assertFalse(hasattr(A, "__slots__"))
+ self.assertTrue(hasattr(B, "__slots__"))
+
+ # Can't be local to test_frozen_pickle.
+ @dataclass(frozen=True, slots=True)
+ class FrozenSlotsClass:
+ foo: str
+ bar: int
+
+ @dataclass(frozen=True)
+ class FrozenWithoutSlotsClass:
+ foo: str
+ bar: int
+
+ def test_frozen_pickle(self):
+ # bpo-43999
+
+ self.assertEqual(self.FrozenSlotsClass.__slots__, ("foo", "bar"))
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(proto=proto):
+ obj = self.FrozenSlotsClass("a", 1)
+ p = pickle.loads(pickle.dumps(obj, protocol=proto))
+ self.assertIsNot(obj, p)
+ self.assertEqual(obj, p)
+
+ obj = self.FrozenWithoutSlotsClass("a", 1)
+ p = pickle.loads(pickle.dumps(obj, protocol=proto))
+ self.assertIsNot(obj, p)
+ self.assertEqual(obj, p)
+
+ def test_slots_with_default_no_init(self):
+ # Originally reported in bpo-44649.
+ @dataclass(slots=True)
+ class A:
+ a: str
+ b: str = field(default='b', init=False)
+
+ obj = A("a")
+ self.assertEqual(obj.a, 'a')
+ self.assertEqual(obj.b, 'b')
+
+ def test_slots_with_default_factory_no_init(self):
+ # Originally reported in bpo-44649.
+ @dataclass(slots=True)
+ class A:
+ a: str
+ b: str = field(default_factory=lambda:'b', init=False)
+
+ obj = A("a")
+ self.assertEqual(obj.a, 'a')
+ self.assertEqual(obj.b, 'b')
+
+ def test_slots_no_weakref(self):
+ @dataclass(slots=True)
+ class A:
+ # No weakref.
+ pass
+
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A()
+ with self.assertRaisesRegex(TypeError,
+ "cannot create weak reference"):
+ weakref.ref(a)
+
+ def test_slots_weakref(self):
+ @dataclass(slots=True, weakref_slot=True)
+ class A:
+ a: int
+
+ self.assertIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_slots_weakref_base_str(self):
+ class Base:
+ __slots__ = '__weakref__'
+
+ @dataclass(slots=True)
+ class A(Base):
+ a: int
+
+ # __weakref__ is in the base class, not A. But an A is still weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_slots_weakref_base_tuple(self):
+ # Same as test_slots_weakref_base, but use a tuple instead of a string
+ # in the base class.
+ class Base:
+ __slots__ = ('__weakref__',)
+
+ @dataclass(slots=True)
+ class A(Base):
+ a: int
+
+ # __weakref__ is in the base class, not A. But an A is still
+ # weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_weakref_slot_without_slot(self):
+ with self.assertRaisesRegex(TypeError,
+ "weakref_slot is True but slots is False"):
+ @dataclass(weakref_slot=True)
+ class A:
+ a: int
+
+ def test_weakref_slot_make_dataclass(self):
+ A = make_dataclass('A', [('a', int),], slots=True, weakref_slot=True)
+ self.assertIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ # And make sure if raises if slots=True is not given.
+ with self.assertRaisesRegex(TypeError,
+ "weakref_slot is True but slots is False"):
+ B = make_dataclass('B', [('a', int),], weakref_slot=True)
+
+ def test_weakref_slot_subclass_weakref_slot(self):
+ @dataclass(slots=True, weakref_slot=True)
+ class Base:
+ field: int
+
+ # A *can* also specify weakref_slot=True if it wants to (gh-93521)
+ @dataclass(slots=True, weakref_slot=True)
+ class A(Base):
+ ...
+
+ # __weakref__ is in the base class, not A. But an instance of A
+ # is still weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_weakref_slot_subclass_no_weakref_slot(self):
+ @dataclass(slots=True, weakref_slot=True)
+ class Base:
+ field: int
+
+ @dataclass(slots=True)
+ class A(Base):
+ ...
+
+ # __weakref__ is in the base class, not A. Even though A doesn't
+ # specify weakref_slot, it should still be weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+ def test_weakref_slot_normal_base_weakref_slot(self):
+ class Base:
+ __slots__ = ('__weakref__',)
+
+ @dataclass(slots=True, weakref_slot=True)
+ class A(Base):
+ field: int
+
+ # __weakref__ is in the base class, not A. But an instance of
+ # A is still weakref-able.
+ self.assertIn("__weakref__", Base.__slots__)
+ self.assertNotIn("__weakref__", A.__slots__)
+ a = A(1)
+ weakref.ref(a)
+
+
+class TestDescriptors(unittest.TestCase):
+ def test_set_name(self):
+ # See bpo-33141.
+
+ # Create a descriptor.
+ class D:
+ def __set_name__(self, owner, name):
+ self.name = name + 'x'
+ def __get__(self, instance, owner):
+ if instance is not None:
+ return 1
+ return self
+
+ # This is the case of just normal descriptor behavior, no
+ # dataclass code is involved in initializing the descriptor.
+ @dataclass
+ class C:
+ c: int=D()
+ self.assertEqual(C.c.name, 'cx')
+
+ # Now test with a default value and init=False, which is the
+ # only time this is really meaningful. If not using
+ # init=False, then the descriptor will be overwritten, anyway.
+ @dataclass
+ class C:
+ c: int=field(default=D(), init=False)
+ self.assertEqual(C.c.name, 'cx')
+ self.assertEqual(C().c, 1)
+
+ def test_non_descriptor(self):
+ # PEP 487 says __set_name__ should work on non-descriptors.
+ # Create a descriptor.
+
+ class D:
+ def __set_name__(self, owner, name):
+ self.name = name + 'x'
+
+ @dataclass
+ class C:
+ c: int=field(default=D(), init=False)
+ self.assertEqual(C.c.name, 'cx')
+
+ def test_lookup_on_instance(self):
+ # See bpo-33175.
+ class D:
+ pass
+
+ d = D()
+ # Create an attribute on the instance, not type.
+ d.__set_name__ = Mock()
+
+ # Make sure d.__set_name__ is not called.
+ @dataclass
+ class C:
+ i: int=field(default=d, init=False)
+
+ self.assertEqual(d.__set_name__.call_count, 0)
+
+ def test_lookup_on_class(self):
+ # See bpo-33175.
+ class D:
+ pass
+ D.__set_name__ = Mock()
+
+ # Make sure D.__set_name__ is called.
+ @dataclass
+ class C:
+ i: int=field(default=D(), init=False)
+
+ self.assertEqual(D.__set_name__.call_count, 1)
+
+ def test_init_calls_set(self):
+ class D:
+ pass
+
+ D.__set__ = Mock()
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ # Make sure D.__set__ is called.
+ D.__set__.reset_mock()
+ c = C(5)
+ self.assertEqual(D.__set__.call_count, 1)
+
+ def test_getting_field_calls_get(self):
+ class D:
+ pass
+
+ D.__set__ = Mock()
+ D.__get__ = Mock()
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ c = C(5)
+
+ # Make sure D.__get__ is called.
+ D.__get__.reset_mock()
+ value = c.i
+ self.assertEqual(D.__get__.call_count, 1)
+
+ def test_setting_field_calls_set(self):
+ class D:
+ pass
+
+ D.__set__ = Mock()
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ c = C(5)
+
+ # Make sure D.__set__ is called.
+ D.__set__.reset_mock()
+ c.i = 10
+ self.assertEqual(D.__set__.call_count, 1)
+
+ def test_setting_uninitialized_descriptor_field(self):
+ class D:
+ pass
+
+ D.__set__ = Mock()
+
+ @dataclass
+ class C:
+ i: D
+
+ # D.__set__ is not called because there's no D instance to call it on
+ D.__set__.reset_mock()
+ c = C(5)
+ self.assertEqual(D.__set__.call_count, 0)
+
+ # D.__set__ still isn't called after setting i to an instance of D
+ # because descriptors don't behave like that when stored as instance vars
+ c.i = D()
+ c.i = 5
+ self.assertEqual(D.__set__.call_count, 0)
+
+ def test_default_value(self):
+ class D:
+ def __get__(self, instance: Any, owner: object) -> int:
+ if instance is None:
+ return 100
+
+ return instance._x
+
+ def __set__(self, instance: Any, value: int) -> None:
+ instance._x = value
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ c = C()
+ self.assertEqual(c.i, 100)
+
+ c = C(5)
+ self.assertEqual(c.i, 5)
+
+ def test_no_default_value(self):
+ class D:
+ def __get__(self, instance: Any, owner: object) -> int:
+ if instance is None:
+ raise AttributeError()
+
+ return instance._x
+
+ def __set__(self, instance: Any, value: int) -> None:
+ instance._x = value
+
+ @dataclass
+ class C:
+ i: D = D()
+
+ with self.assertRaisesRegex(TypeError, 'missing 1 required positional argument'):
+ c = C()
+
+class TestStringAnnotations(unittest.TestCase):
+ def test_classvar(self):
+ # Some expressions recognized as ClassVar really aren't. But
+ # if you're using string annotations, it's not an exact
+ # science.
+ # These tests assume that both "import typing" and "from
+ # typing import *" have been run in this file.
+ for typestr in ('ClassVar[int]',
+ 'ClassVar [int]',
+ ' ClassVar [int]',
+ 'ClassVar',
+ ' ClassVar ',
+ 'typing.ClassVar[int]',
+ 'typing.ClassVar[str]',
+ ' typing.ClassVar[str]',
+ 'typing .ClassVar[str]',
+ 'typing. ClassVar[str]',
+ 'typing.ClassVar [str]',
+ 'typing.ClassVar [ str]',
+
+ # Not syntactically valid, but these will
+ # be treated as ClassVars.
+ 'typing.ClassVar.[int]',
+ 'typing.ClassVar+',
+ ):
+ with self.subTest(typestr=typestr):
+ @dataclass
+ class C:
+ x: typestr
+
+ # x is a ClassVar, so C() takes no args.
+ C()
+
+ # And it won't appear in the class's dict because it doesn't
+ # have a default.
+ self.assertNotIn('x', C.__dict__)
+
+ def test_isnt_classvar(self):
+ for typestr in ('CV',
+ 't.ClassVar',
+ 't.ClassVar[int]',
+ 'typing..ClassVar[int]',
+ 'Classvar',
+ 'Classvar[int]',
+ 'typing.ClassVarx[int]',
+ 'typong.ClassVar[int]',
+ 'dataclasses.ClassVar[int]',
+ 'typingxClassVar[str]',
+ ):
+ with self.subTest(typestr=typestr):
+ @dataclass
+ class C:
+ x: typestr
+
+ # x is not a ClassVar, so C() takes one arg.
+ self.assertEqual(C(10).x, 10)
+
+ def test_initvar(self):
+ # These tests assume that both "import dataclasses" and "from
+ # dataclasses import *" have been run in this file.
+ for typestr in ('InitVar[int]',
+ 'InitVar [int]'
+ ' InitVar [int]',
+ 'InitVar',
+ ' InitVar ',
+ 'dataclasses.InitVar[int]',
+ 'dataclasses.InitVar[str]',
+ ' dataclasses.InitVar[str]',
+ 'dataclasses .InitVar[str]',
+ 'dataclasses. InitVar[str]',
+ 'dataclasses.InitVar [str]',
+ 'dataclasses.InitVar [ str]',
+
+ # Not syntactically valid, but these will
+ # be treated as InitVars.
+ 'dataclasses.InitVar.[int]',
+ 'dataclasses.InitVar+',
+ ):
+ with self.subTest(typestr=typestr):
+ @dataclass
+ class C:
+ x: typestr
+
+ # x is an InitVar, so doesn't create a member.
+ with self.assertRaisesRegex(AttributeError,
+ "object has no attribute 'x'"):
+ C(1).x
+
+ def test_isnt_initvar(self):
+ for typestr in ('IV',
+ 'dc.InitVar',
+ 'xdataclasses.xInitVar',
+ 'typing.xInitVar[int]',
+ ):
+ with self.subTest(typestr=typestr):
+ @dataclass
+ class C:
+ x: typestr
+
+ # x is not an InitVar, so there will be a member x.
+ self.assertEqual(C(10).x, 10)
+
+ def test_classvar_module_level_import(self):
+ from test import dataclass_module_1
+ from test import dataclass_module_1_str
+ from test import dataclass_module_2
+ from test import dataclass_module_2_str
+
+ for m in (dataclass_module_1, dataclass_module_1_str,
+ dataclass_module_2, dataclass_module_2_str,
+ ):
+ with self.subTest(m=m):
+ # There's a difference in how the ClassVars are
+ # interpreted when using string annotations or
+ # not. See the imported modules for details.
+ if m.USING_STRINGS:
+ c = m.CV(10)
+ else:
+ c = m.CV()
+ self.assertEqual(c.cv0, 20)
+
+
+ # There's a difference in how the InitVars are
+ # interpreted when using string annotations or
+ # not. See the imported modules for details.
+ c = m.IV(0, 1, 2, 3, 4)
+
+ for field_name in ('iv0', 'iv1', 'iv2', 'iv3'):
+ with self.subTest(field_name=field_name):
+ with self.assertRaisesRegex(AttributeError, f"object has no attribute '{field_name}'"):
+ # Since field_name is an InitVar, it's
+ # not an instance field.
+ getattr(c, field_name)
+
+ if m.USING_STRINGS:
+ # iv4 is interpreted as a normal field.
+ self.assertIn('not_iv4', c.__dict__)
+ self.assertEqual(c.not_iv4, 4)
+ else:
+ # iv4 is interpreted as an InitVar, so it
+ # won't exist on the instance.
+ self.assertNotIn('not_iv4', c.__dict__)
+
+ def test_text_annotations(self):
+ from test import dataclass_textanno
+
+ self.assertEqual(
+ get_type_hints(dataclass_textanno.Bar),
+ {'foo': dataclass_textanno.Foo})
+ self.assertEqual(
+ get_type_hints(dataclass_textanno.Bar.__init__),
+ {'foo': dataclass_textanno.Foo,
+ 'return': type(None)})
+
+
+class TestMakeDataclass(unittest.TestCase):
+ def test_simple(self):
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', int, field(default=5))],
+ namespace={'add_one': lambda self: self.x + 1})
+ c = C(10)
+ self.assertEqual((c.x, c.y), (10, 5))
+ self.assertEqual(c.add_one(), 11)
+
+
+ def test_no_mutate_namespace(self):
+ # Make sure a provided namespace isn't mutated.
+ ns = {}
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', int, field(default=5))],
+ namespace=ns)
+ self.assertEqual(ns, {})
+
+ def test_base(self):
+ class Base1:
+ pass
+ class Base2:
+ pass
+ C = make_dataclass('C',
+ [('x', int)],
+ bases=(Base1, Base2))
+ c = C(2)
+ self.assertIsInstance(c, C)
+ self.assertIsInstance(c, Base1)
+ self.assertIsInstance(c, Base2)
+
+ def test_base_dataclass(self):
+ @dataclass
+ class Base1:
+ x: int
+ class Base2:
+ pass
+ C = make_dataclass('C',
+ [('y', int)],
+ bases=(Base1, Base2))
+ with self.assertRaisesRegex(TypeError, 'required positional'):
+ c = C(2)
+ c = C(1, 2)
+ self.assertIsInstance(c, C)
+ self.assertIsInstance(c, Base1)
+ self.assertIsInstance(c, Base2)
+
+ self.assertEqual((c.x, c.y), (1, 2))
+
+ def test_init_var(self):
+ def post_init(self, y):
+ self.x *= y
+
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', InitVar[int]),
+ ],
+ namespace={'__post_init__': post_init},
+ )
+ c = C(2, 3)
+ self.assertEqual(vars(c), {'x': 6})
+ self.assertEqual(len(fields(c)), 1)
+
+ def test_class_var(self):
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', ClassVar[int], 10),
+ ('z', ClassVar[int], field(default=20)),
+ ])
+ c = C(1)
+ self.assertEqual(vars(c), {'x': 1})
+ self.assertEqual(len(fields(c)), 1)
+ self.assertEqual(C.y, 10)
+ self.assertEqual(C.z, 20)
+
+ def test_other_params(self):
+ C = make_dataclass('C',
+ [('x', int),
+ ('y', ClassVar[int], 10),
+ ('z', ClassVar[int], field(default=20)),
+ ],
+ init=False)
+ # Make sure we have a repr, but no init.
+ self.assertNotIn('__init__', vars(C))
+ self.assertIn('__repr__', vars(C))
+
+ # Make sure random other params don't work.
+ with self.assertRaisesRegex(TypeError, 'unexpected keyword argument'):
+ C = make_dataclass('C',
+ [],
+ xxinit=False)
+
+ def test_no_types(self):
+ C = make_dataclass('Point', ['x', 'y', 'z'])
+ c = C(1, 2, 3)
+ self.assertEqual(vars(c), {'x': 1, 'y': 2, 'z': 3})
+ self.assertEqual(C.__annotations__, {'x': 'typing.Any',
+ 'y': 'typing.Any',
+ 'z': 'typing.Any'})
+
+ C = make_dataclass('Point', ['x', ('y', int), 'z'])
+ c = C(1, 2, 3)
+ self.assertEqual(vars(c), {'x': 1, 'y': 2, 'z': 3})
+ self.assertEqual(C.__annotations__, {'x': 'typing.Any',
+ 'y': int,
+ 'z': 'typing.Any'})
+
+ def test_invalid_type_specification(self):
+ for bad_field in [(),
+ (1, 2, 3, 4),
+ ]:
+ with self.subTest(bad_field=bad_field):
+ with self.assertRaisesRegex(TypeError, r'Invalid field: '):
+ make_dataclass('C', ['a', bad_field])
+
+ # And test for things with no len().
+ for bad_field in [float,
+ lambda x:x,
+ ]:
+ with self.subTest(bad_field=bad_field):
+ with self.assertRaisesRegex(TypeError, r'has no len\(\)'):
+ make_dataclass('C', ['a', bad_field])
+
+ def test_duplicate_field_names(self):
+ for field in ['a', 'ab']:
+ with self.subTest(field=field):
+ with self.assertRaisesRegex(TypeError, 'Field name duplicated'):
+ make_dataclass('C', [field, 'a', field])
+
+ def test_keyword_field_names(self):
+ for field in ['for', 'async', 'await', 'as']:
+ with self.subTest(field=field):
+ with self.assertRaisesRegex(TypeError, 'must not be keywords'):
+ make_dataclass('C', ['a', field])
+ with self.assertRaisesRegex(TypeError, 'must not be keywords'):
+ make_dataclass('C', [field])
+ with self.assertRaisesRegex(TypeError, 'must not be keywords'):
+ make_dataclass('C', [field, 'a'])
+
+ def test_non_identifier_field_names(self):
+ for field in ['()', 'x,y', '*', '2@3', '', 'little johnny tables']:
+ with self.subTest(field=field):
+ with self.assertRaisesRegex(TypeError, 'must be valid identifiers'):
+ make_dataclass('C', ['a', field])
+ with self.assertRaisesRegex(TypeError, 'must be valid identifiers'):
+ make_dataclass('C', [field])
+ with self.assertRaisesRegex(TypeError, 'must be valid identifiers'):
+ make_dataclass('C', [field, 'a'])
+
+ def test_underscore_field_names(self):
+ # Unlike namedtuple, it's okay if dataclass field names have
+ # an underscore.
+ make_dataclass('C', ['_', '_a', 'a_a', 'a_'])
+
+ def test_funny_class_names_names(self):
+ # No reason to prevent weird class names, since
+ # types.new_class allows them.
+ for classname in ['()', 'x,y', '*', '2@3', '']:
+ with self.subTest(classname=classname):
+ C = make_dataclass(classname, ['a', 'b'])
+ self.assertEqual(C.__name__, classname)
+
+class TestReplace(unittest.TestCase):
+ def test(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int
+
+ c = C(1, 2)
+ c1 = replace(c, x=3)
+ self.assertEqual(c1.x, 3)
+ self.assertEqual(c1.y, 2)
+
+ def test_frozen(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int
+ z: int = field(init=False, default=10)
+ t: int = field(init=False, default=100)
+
+ c = C(1, 2)
+ c1 = replace(c, x=3)
+ self.assertEqual((c.x, c.y, c.z, c.t), (1, 2, 10, 100))
+ self.assertEqual((c1.x, c1.y, c1.z, c1.t), (3, 2, 10, 100))
+
+
+ with self.assertRaisesRegex(ValueError, 'init=False'):
+ replace(c, x=3, z=20, t=50)
+ with self.assertRaisesRegex(ValueError, 'init=False'):
+ replace(c, z=20)
+ replace(c, x=3, z=20, t=50)
+
+ # Make sure the result is still frozen.
+ with self.assertRaisesRegex(FrozenInstanceError, "cannot assign to field 'x'"):
+ c1.x = 3
+
+ # Make sure we can't replace an attribute that doesn't exist,
+ # if we're also replacing one that does exist. Test this
+ # here, because setting attributes on frozen instances is
+ # handled slightly differently from non-frozen ones.
+ with self.assertRaisesRegex(TypeError, r"__init__\(\) got an unexpected "
+ "keyword argument 'a'"):
+ c1 = replace(c, x=20, a=5)
+
+ def test_invalid_field_name(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int
+
+ c = C(1, 2)
+ with self.assertRaisesRegex(TypeError, r"__init__\(\) got an unexpected "
+ "keyword argument 'z'"):
+ c1 = replace(c, z=3)
+
+ def test_invalid_object(self):
+ @dataclass(frozen=True)
+ class C:
+ x: int
+ y: int
+
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ replace(C, x=3)
+
+ with self.assertRaisesRegex(TypeError, 'dataclass instance'):
+ replace(0, x=3)
+
+ def test_no_init(self):
+ @dataclass
+ class C:
+ x: int
+ y: int = field(init=False, default=10)
+
+ c = C(1)
+ c.y = 20
+
+ # Make sure y gets the default value.
+ c1 = replace(c, x=5)
+ self.assertEqual((c1.x, c1.y), (5, 10))
+
+ # Trying to replace y is an error.
+ with self.assertRaisesRegex(ValueError, 'init=False'):
+ replace(c, x=2, y=30)
+
+ with self.assertRaisesRegex(ValueError, 'init=False'):
+ replace(c, y=30)
+
+ def test_classvar(self):
+ @dataclass
+ class C:
+ x: int
+ y: ClassVar[int] = 1000
+
+ c = C(1)
+ d = C(2)
+
+ self.assertIs(c.y, d.y)
+ self.assertEqual(c.y, 1000)
+
+ # Trying to replace y is an error: can't replace ClassVars.
+ with self.assertRaisesRegex(TypeError, r"__init__\(\) got an "
+ "unexpected keyword argument 'y'"):
+ replace(c, y=30)
+
+ replace(c, x=5)
+
+ def test_initvar_is_specified(self):
+ @dataclass
+ class C:
+ x: int
+ y: InitVar[int]
+
+ def __post_init__(self, y):
+ self.x *= y
+
+ c = C(1, 10)
+ self.assertEqual(c.x, 10)
+ with self.assertRaisesRegex(ValueError, r"InitVar 'y' must be "
+ "specified with replace()"):
+ replace(c, x=3)
+ c = replace(c, x=3, y=5)
+ self.assertEqual(c.x, 15)
+
+ def test_initvar_with_default_value(self):
+ @dataclass
+ class C:
+ x: int
+ y: InitVar[int] = None
+ z: InitVar[int] = 42
+
+ def __post_init__(self, y, z):
+ if y is not None:
+ self.x += y
+ if z is not None:
+ self.x += z
+
+ c = C(x=1, y=10, z=1)
+ self.assertEqual(replace(c), C(x=12))
+ self.assertEqual(replace(c, y=4), C(x=12, y=4, z=42))
+ self.assertEqual(replace(c, y=4, z=1), C(x=12, y=4, z=1))
+
+ def test_recursive_repr(self):
+ @dataclass
+ class C:
+ f: "C"
+
+ c = C(None)
+ c.f = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr.<locals>.C(f=...)")
+
+ def test_recursive_repr_two_attrs(self):
+ @dataclass
+ class C:
+ f: "C"
+ g: "C"
+
+ c = C(None, None)
+ c.f = c
+ c.g = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr_two_attrs"
+ ".<locals>.C(f=..., g=...)")
+
+ def test_recursive_repr_indirection(self):
+ @dataclass
+ class C:
+ f: "D"
+
+ @dataclass
+ class D:
+ f: "C"
+
+ c = C(None)
+ d = D(None)
+ c.f = d
+ d.f = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr_indirection"
+ ".<locals>.C(f=TestReplace.test_recursive_repr_indirection"
+ ".<locals>.D(f=...))")
+
+ def test_recursive_repr_indirection_two(self):
+ @dataclass
+ class C:
+ f: "D"
+
+ @dataclass
+ class D:
+ f: "E"
+
+ @dataclass
+ class E:
+ f: "C"
+
+ c = C(None)
+ d = D(None)
+ e = E(None)
+ c.f = d
+ d.f = e
+ e.f = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr_indirection_two"
+ ".<locals>.C(f=TestReplace.test_recursive_repr_indirection_two"
+ ".<locals>.D(f=TestReplace.test_recursive_repr_indirection_two"
+ ".<locals>.E(f=...)))")
+
+ def test_recursive_repr_misc_attrs(self):
+ @dataclass
+ class C:
+ f: "C"
+ g: int
+
+ c = C(None, 1)
+ c.f = c
+ self.assertEqual(repr(c), "TestReplace.test_recursive_repr_misc_attrs"
+ ".<locals>.C(f=..., g=1)")
+
+ ## def test_initvar(self):
+ ## @dataclass
+ ## class C:
+ ## x: int
+ ## y: InitVar[int]
+
+ ## c = C(1, 10)
+ ## d = C(2, 20)
+
+ ## # In our case, replacing an InitVar is a no-op
+ ## self.assertEqual(c, replace(c, y=5))
+
+ ## replace(c, x=5)
+
+class TestAbstract(unittest.TestCase):
+ def test_abc_implementation(self):
+ class Ordered(abc.ABC):
+ @abc.abstractmethod
+ def __lt__(self, other):
+ pass
+
+ @abc.abstractmethod
+ def __le__(self, other):
+ pass
+
+ @dataclass(order=True)
+ class Date(Ordered):
+ year: int
+ month: 'Month'
+ day: 'int'
+
+ self.assertFalse(inspect.isabstract(Date))
+ self.assertGreater(Date(2020,12,25), Date(2020,8,31))
+
+ def test_maintain_abc(self):
+ class A(abc.ABC):
+ @abc.abstractmethod
+ def foo(self):
+ pass
+
+ @dataclass
+ class Date(A):
+ year: int
+ month: 'Month'
+ day: 'int'
+
+ self.assertTrue(inspect.isabstract(Date))
+ msg = 'class Date without an implementation for abstract method foo'
+ self.assertRaisesRegex(TypeError, msg, Date)
+
+
+class TestMatchArgs(unittest.TestCase):
+ def test_match_args(self):
+ @dataclass
+ class C:
+ a: int
+ self.assertEqual(C(42).__match_args__, ('a',))
+
+ def test_explicit_match_args(self):
+ ma = ()
+ @dataclass
+ class C:
+ a: int
+ __match_args__ = ma
+ self.assertIs(C(42).__match_args__, ma)
+
+ def test_bpo_43764(self):
+ @dataclass(repr=False, eq=False, init=False)
+ class X:
+ a: int
+ b: int
+ c: int
+ self.assertEqual(X.__match_args__, ("a", "b", "c"))
+
+ def test_match_args_argument(self):
+ @dataclass(match_args=False)
+ class X:
+ a: int
+ self.assertNotIn('__match_args__', X.__dict__)
+
+ @dataclass(match_args=False)
+ class Y:
+ a: int
+ __match_args__ = ('b',)
+ self.assertEqual(Y.__match_args__, ('b',))
+
+ @dataclass(match_args=False)
+ class Z(Y):
+ z: int
+ self.assertEqual(Z.__match_args__, ('b',))
+
+ # Ensure parent dataclass __match_args__ is seen, if child class
+ # specifies match_args=False.
+ @dataclass
+ class A:
+ a: int
+ z: int
+ @dataclass(match_args=False)
+ class B(A):
+ b: int
+ self.assertEqual(B.__match_args__, ('a', 'z'))
+
+ def test_make_dataclasses(self):
+ C = make_dataclass('C', [('x', int), ('y', int)])
+ self.assertEqual(C.__match_args__, ('x', 'y'))
+
+ C = make_dataclass('C', [('x', int), ('y', int)], match_args=True)
+ self.assertEqual(C.__match_args__, ('x', 'y'))
+
+ C = make_dataclass('C', [('x', int), ('y', int)], match_args=False)
+ self.assertNotIn('__match__args__', C.__dict__)
+
+ C = make_dataclass('C', [('x', int), ('y', int)], namespace={'__match_args__': ('z',)})
+ self.assertEqual(C.__match_args__, ('z',))
+
+
+class TestKeywordArgs(unittest.TestCase):
+ def test_no_classvar_kwarg(self):
+ msg = 'field a is a ClassVar but specifies kw_only'
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: ClassVar[int] = field(kw_only=True)
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: ClassVar[int] = field(kw_only=False)
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass(kw_only=True)
+ class A:
+ a: ClassVar[int] = field(kw_only=False)
+
+ def test_field_marked_as_kwonly(self):
+ #######################
+ # Using dataclass(kw_only=True)
+ @dataclass(kw_only=True)
+ class A:
+ a: int
+ self.assertTrue(fields(A)[0].kw_only)
+
+ @dataclass(kw_only=True)
+ class A:
+ a: int = field(kw_only=True)
+ self.assertTrue(fields(A)[0].kw_only)
+
+ @dataclass(kw_only=True)
+ class A:
+ a: int = field(kw_only=False)
+ self.assertFalse(fields(A)[0].kw_only)
+
+ #######################
+ # Using dataclass(kw_only=False)
+ @dataclass(kw_only=False)
+ class A:
+ a: int
+ self.assertFalse(fields(A)[0].kw_only)
+
+ @dataclass(kw_only=False)
+ class A:
+ a: int = field(kw_only=True)
+ self.assertTrue(fields(A)[0].kw_only)
+
+ @dataclass(kw_only=False)
+ class A:
+ a: int = field(kw_only=False)
+ self.assertFalse(fields(A)[0].kw_only)
+
+ #######################
+ # Not specifying dataclass(kw_only)
+ @dataclass
+ class A:
+ a: int
+ self.assertFalse(fields(A)[0].kw_only)
+
+ @dataclass
+ class A:
+ a: int = field(kw_only=True)
+ self.assertTrue(fields(A)[0].kw_only)
+
+ @dataclass
+ class A:
+ a: int = field(kw_only=False)
+ self.assertFalse(fields(A)[0].kw_only)
+
+ def test_match_args(self):
+ # kw fields don't show up in __match_args__.
+ @dataclass(kw_only=True)
+ class C:
+ a: int
+ self.assertEqual(C(a=42).__match_args__, ())
+
+ @dataclass
+ class C:
+ a: int
+ b: int = field(kw_only=True)
+ self.assertEqual(C(42, b=10).__match_args__, ('a',))
+
+ def test_KW_ONLY(self):
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int
+ A(3, c=5, b=4)
+ msg = "takes 2 positional arguments but 4 were given"
+ with self.assertRaisesRegex(TypeError, msg):
+ A(3, 4, 5)
+
+
+ @dataclass(kw_only=True)
+ class B:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int
+ B(a=3, b=4, c=5)
+ msg = "takes 1 positional argument but 4 were given"
+ with self.assertRaisesRegex(TypeError, msg):
+ B(3, 4, 5)
+
+ # Explicitly make a field that follows KW_ONLY be non-keyword-only.
+ @dataclass
+ class C:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int = field(kw_only=False)
+ c = C(1, 2, b=3)
+ self.assertEqual(c.a, 1)
+ self.assertEqual(c.b, 3)
+ self.assertEqual(c.c, 2)
+ c = C(1, b=3, c=2)
+ self.assertEqual(c.a, 1)
+ self.assertEqual(c.b, 3)
+ self.assertEqual(c.c, 2)
+ c = C(1, b=3, c=2)
+ self.assertEqual(c.a, 1)
+ self.assertEqual(c.b, 3)
+ self.assertEqual(c.c, 2)
+ c = C(c=2, b=3, a=1)
+ self.assertEqual(c.a, 1)
+ self.assertEqual(c.b, 3)
+ self.assertEqual(c.c, 2)
+
+ def test_KW_ONLY_as_string(self):
+ @dataclass
+ class A:
+ a: int
+ _: 'dataclasses.KW_ONLY'
+ b: int
+ c: int
+ A(3, c=5, b=4)
+ msg = "takes 2 positional arguments but 4 were given"
+ with self.assertRaisesRegex(TypeError, msg):
+ A(3, 4, 5)
+
+ def test_KW_ONLY_twice(self):
+ msg = "'Y' is KW_ONLY, but KW_ONLY has already been specified"
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: int
+ X: KW_ONLY
+ Y: KW_ONLY
+ b: int
+ c: int
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: int
+ X: KW_ONLY
+ b: int
+ Y: KW_ONLY
+ c: int
+
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: int
+ X: KW_ONLY
+ b: int
+ c: int
+ Y: KW_ONLY
+
+ # But this usage is okay, since it's not using KW_ONLY.
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int = field(kw_only=True)
+
+ # And if inheriting, it's okay.
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int
+ @dataclass
+ class B(A):
+ _: KW_ONLY
+ d: int
+
+ # Make sure the error is raised in a derived class.
+ with self.assertRaisesRegex(TypeError, msg):
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: int
+ c: int
+ @dataclass
+ class B(A):
+ X: KW_ONLY
+ d: int
+ Y: KW_ONLY
+
+
+ def test_post_init(self):
+ @dataclass
+ class A:
+ a: int
+ _: KW_ONLY
+ b: InitVar[int]
+ c: int
+ d: InitVar[int]
+ def __post_init__(self, b, d):
+ raise CustomError(f'{b=} {d=}')
+ with self.assertRaisesRegex(CustomError, 'b=3 d=4'):
+ A(1, c=2, b=3, d=4)
+
+ @dataclass
+ class B:
+ a: int
+ _: KW_ONLY
+ b: InitVar[int]
+ c: int
+ d: InitVar[int]
+ def __post_init__(self, b, d):
+ self.a = b
+ self.c = d
+ b = B(1, c=2, b=3, d=4)
+ self.assertEqual(asdict(b), {'a': 3, 'c': 4})
+
+ def test_defaults(self):
+ # For kwargs, make sure we can have defaults after non-defaults.
+ @dataclass
+ class A:
+ a: int = 0
+ _: KW_ONLY
+ b: int
+ c: int = 1
+ d: int
+
+ a = A(d=4, b=3)
+ self.assertEqual(a.a, 0)
+ self.assertEqual(a.b, 3)
+ self.assertEqual(a.c, 1)
+ self.assertEqual(a.d, 4)
+
+ # Make sure we still check for non-kwarg non-defaults not following
+ # defaults.
+ err_regex = "non-default argument 'z' follows default argument"
+ with self.assertRaisesRegex(TypeError, err_regex):
+ @dataclass
+ class A:
+ a: int = 0
+ z: int
+ _: KW_ONLY
+ b: int
+ c: int = 1
+ d: int
+
+ def test_make_dataclass(self):
+ A = make_dataclass("A", ['a'], kw_only=True)
+ self.assertTrue(fields(A)[0].kw_only)
+
+ B = make_dataclass("B",
+ ['a', ('b', int, field(kw_only=False))],
+ kw_only=True)
+ self.assertTrue(fields(B)[0].kw_only)
+ self.assertFalse(fields(B)[1].kw_only)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Tools/make_dataclass_tests.py b/Tools/make_dataclass_tests.py
new file mode 100644
index 000000000..dc38eee70
--- /dev/null
+++ b/Tools/make_dataclass_tests.py
@@ -0,0 +1,443 @@
+# Used to generate tests/run/test_dataclasses.pyx but translating the CPython test suite
+# dataclass file. Initially run using Python 3.10 - this file is not designed to be
+# backwards compatible since it will be run manually and infrequently.
+
+import ast
+import os.path
+import sys
+
+unavailable_functions = frozenset(
+ {
+ "dataclass_textanno", # part of CPython test module
+ "dataclass_module_1", # part of CPython test module
+ "make_dataclass", # not implemented in Cython dataclasses (probably won't be implemented)
+ }
+)
+
+skip_tests = frozenset(
+ {
+ # needs Cython compile
+ # ====================
+ ("TestCase", "test_field_default_default_factory_error"),
+ ("TestCase", "test_two_fields_one_default"),
+ ("TestCase", "test_overwrite_hash"),
+ ("TestCase", "test_eq_order"),
+ ("TestCase", "test_no_unhashable_default"),
+ ("TestCase", "test_disallowed_mutable_defaults"),
+ ("TestCase", "test_classvar_default_factory"),
+ ("TestCase", "test_field_metadata_mapping"),
+ ("TestFieldNoAnnotation", "test_field_without_annotation"),
+ (
+ "TestFieldNoAnnotation",
+ "test_field_without_annotation_but_annotation_in_base",
+ ),
+ (
+ "TestFieldNoAnnotation",
+ "test_field_without_annotation_but_annotation_in_base_not_dataclass",
+ ),
+ ("TestOrdering", "test_overwriting_order"),
+ ("TestHash", "test_hash_rules"),
+ ("TestHash", "test_hash_no_args"),
+ ("TestFrozen", "test_inherit_nonfrozen_from_empty_frozen"),
+ ("TestFrozen", "test_inherit_nonfrozen_from_frozen"),
+ ("TestFrozen", "test_inherit_frozen_from_nonfrozen"),
+ ("TestFrozen", "test_overwriting_frozen"),
+ ("TestSlots", "test_add_slots_when_slots_exists"),
+ ("TestSlots", "test_cant_inherit_from_iterator_slots"),
+ ("TestSlots", "test_weakref_slot_without_slot"),
+ ("TestKeywordArgs", "test_no_classvar_kwarg"),
+ ("TestKeywordArgs", "test_KW_ONLY_twice"),
+ ("TestKeywordArgs", "test_defaults"),
+ # uses local variable in class definition
+ ("TestCase", "test_default_factory"),
+ ("TestCase", "test_default_factory_with_no_init"),
+ ("TestCase", "test_field_default"),
+ ("TestCase", "test_function_annotations"),
+ ("TestDescriptors", "test_lookup_on_instance"),
+ ("TestCase", "test_default_factory_not_called_if_value_given"),
+ ("TestCase", "test_class_attrs"),
+ ("TestCase", "test_hash_field_rules"),
+ ("TestStringAnnotations",), # almost all the texts here use local variables
+ # Currently unsupported
+ # =====================
+ (
+ "TestOrdering",
+ "test_functools_total_ordering",
+ ), # combination of cython dataclass and total_ordering
+ ("TestCase", "test_missing_default_factory"), # we're MISSING MISSING
+ ("TestCase", "test_missing_default"), # MISSING
+ ("TestCase", "test_missing_repr"), # MISSING
+ ("TestSlots",), # __slots__ isn't understood
+ ("TestMatchArgs",),
+ ("TestKeywordArgs", "test_field_marked_as_kwonly"),
+ ("TestKeywordArgs", "test_match_args"),
+ ("TestKeywordArgs", "test_KW_ONLY"),
+ ("TestKeywordArgs", "test_KW_ONLY_as_string"),
+ ("TestKeywordArgs", "test_post_init"),
+ (
+ "TestCase",
+ "test_class_var_frozen",
+ ), # __annotations__ not present on cdef classes https://github.com/cython/cython/issues/4519
+ ("TestCase", "test_dont_include_other_annotations"), # __annotations__
+ ("TestDocString",), # don't think cython dataclasses currently set __doc__
+ # either cython.dataclasses.field or cython.dataclasses.dataclass called directly as functions
+ # (will probably never be supported)
+ ("TestCase", "test_field_repr"),
+ ("TestCase", "test_dynamic_class_creation"),
+ ("TestCase", "test_dynamic_class_creation_using_field"),
+ # Requires inheritance from non-cdef class
+ ("TestCase", "test_is_dataclass_genericalias"),
+ ("TestCase", "test_generic_extending"),
+ ("TestCase", "test_generic_dataclasses"),
+ ("TestCase", "test_generic_dynamic"),
+ ("TestInit", "test_inherit_from_protocol"),
+ ("TestAbstract", "test_abc_implementation"),
+ ("TestAbstract", "test_maintain_abc"),
+ # Requires multiple inheritance from extension types
+ ("TestCase", "test_post_init_not_auto_added"),
+ # Refers to nonlocal from enclosing function
+ (
+ "TestCase",
+ "test_post_init_staticmethod",
+ ), # TODO replicate the gist of the test elsewhere
+ # PEP487 isn't support in Cython
+ ("TestDescriptors", "test_non_descriptor"),
+ ("TestDescriptors", "test_set_name"),
+ ("TestDescriptors", "test_setting_field_calls_set"),
+ ("TestDescriptors", "test_setting_uninitialized_descriptor_field"),
+ # Looks up __dict__, which cdef classes don't typically have
+ ("TestCase", "test_init_false_no_default"),
+ ("TestCase", "test_init_var_inheritance"), # __dict__ again
+ ("TestCase", "test_base_has_init"),
+ ("TestInit", "test_base_has_init"), # needs __dict__ for vars
+ # Requires arbitrary attributes to be writeable
+ ("TestCase", "test_post_init_super"),
+ ('TestCase', 'test_init_in_order'),
+ # Cython being strict about argument types - expected difference
+ ("TestDescriptors", "test_getting_field_calls_get"),
+ ("TestDescriptors", "test_init_calls_set"),
+ ("TestHash", "test_eq_only"),
+ # I think an expected difference with cdef classes - the property will be in the dict
+ ("TestCase", "test_items_in_dicts"),
+ # These tests are probably fine, but the string substitution in this file doesn't get it right
+ ("TestRepr", "test_repr"),
+ ("TestCase", "test_not_in_repr"),
+ ('TestRepr', 'test_no_repr'),
+ # class variable doesn't exist in Cython so uninitialized variable appears differently - for now this is deliberate
+ ('TestInit', 'test_no_init'),
+ # I believe the test works but the ordering functions do appear in the class dict (and default slot wrappers which
+ # just raise NotImplementedError
+ ('TestOrdering', 'test_no_order'),
+ # not possible to add attributes on extension types
+ ("TestCase", "test_post_init_classmethod"),
+ # Cannot redefine the same field in a base dataclass (tested in dataclass_e6)
+ ("TestCase", "test_field_order"),
+ (
+ "TestCase",
+ "test_overwrite_fields_in_derived_class",
+ ),
+ # Bugs
+ #======
+ # not specifically a dataclass issue - a C int crashes classvar
+ ("TestCase", "test_class_var"),
+ (
+ "TestFrozen",
+ ), # raises AttributeError, not FrozenInstanceError (may be hard to fix)
+ ('TestCase', 'test_post_init'), # Works except for AttributeError instead of FrozenInstanceError
+ ("TestReplace", "test_frozen"), # AttributeError not FrozenInstanceError
+ (
+ "TestCase",
+ "test_dataclasses_qualnames",
+ ), # doesn't define __setattr__ and just relies on Cython to enforce readonly properties
+ ("TestCase", "test_compare_subclasses"), # wrong comparison
+ ("TestCase", "test_simple_compare"), # wrong comparison
+ (
+ "TestCase",
+ "test_field_named_self",
+ ), # I think just an error in inspecting the signature
+ (
+ "TestCase",
+ "test_init_var_default_factory",
+ ), # should be raising a compile error
+ ("TestCase", "test_init_var_no_default"), # should be raising a compile error
+ ("TestCase", "test_init_var_with_default"), # not sure...
+ ("TestReplace", "test_initvar_with_default_value"), # needs investigating
+ # Maybe bugs?
+ # ==========
+ # non-default argument 'z' follows default argument in dataclass __init__ - this message looks right to me!
+ ("TestCase", "test_class_marker"),
+ # cython.dataclasses.field parameter 'metadata' must be a literal value - possibly not something we can support?
+ ("TestCase", "test_field_metadata_custom_mapping"),
+ (
+ "TestCase",
+ "test_class_var_default_factory",
+ ), # possibly to do with ClassVar being assigned a field
+ (
+ "TestCase",
+ "test_class_var_with_default",
+ ), # possibly to do with ClassVar being assigned a field
+ (
+ "TestDescriptors",
+ ), # mostly don't work - I think this may be a limitation of cdef classes but needs investigating
+ }
+)
+
+version_specific_skips = {
+ # The version numbers are the first version that the test should be run on
+ ("TestCase", "test_init_var_preserve_type"): (
+ 3,
+ 10,
+ ), # needs language support for | operator on types
+}
+
+class DataclassInDecorators(ast.NodeVisitor):
+ found = False
+
+ def visit_Name(self, node):
+ if node.id == "dataclass":
+ self.found = True
+ return self.generic_visit(node)
+
+ def generic_visit(self, node):
+ if self.found:
+ return # skip
+ return super().generic_visit(node)
+
+
+def dataclass_in_decorators(decorator_list):
+ finder = DataclassInDecorators()
+ for dec in decorator_list:
+ finder.visit(dec)
+ if finder.found:
+ return True
+ return False
+
+
+class SubstituteNameString(ast.NodeTransformer):
+ def __init__(self, substitutions):
+ super().__init__()
+ self.substitutions = substitutions
+
+ def visit_Constant(self, node):
+ # attempt to handle some difference in class names
+ # (note: requires Python>=3.8)
+ if isinstance(node.value, str):
+ if node.value.find("<locals>") != -1:
+ import re
+
+ new_value = new_value2 = re.sub("[\w.]*<locals>", "", node.value)
+ for key, value in self.substitutions.items():
+ new_value2 = re.sub(f"(?<![\w])[.]{key}(?![\w])", value, new_value2)
+ if new_value != new_value2:
+ node.value = new_value2
+ return node
+
+
+class SubstituteName(SubstituteNameString):
+ def visit_Name(self, node):
+ if isinstance(node.ctx, ast.Store): # don't reassign lhs
+ return node
+ replacement = self.substitutions.get(node.id, None)
+ if replacement is not None:
+ return ast.Name(id=replacement, ctx=node.ctx)
+ else:
+ return node
+
+
+class IdentifyCdefClasses(ast.NodeVisitor):
+ def __init__(self):
+ super().__init__()
+ self.top_level_class = True
+ self.classes = {}
+ self.cdef_classes = set()
+
+ def visit_ClassDef(self, node):
+ top_level_class, self.top_level_class = self.top_level_class, False
+ try:
+ if not top_level_class:
+ self.classes[node.name] = node
+ if dataclass_in_decorators(node.decorator_list):
+ self.handle_cdef_class(node)
+ self.generic_visit(node) # any nested classes in it?
+ else:
+ self.generic_visit(node)
+ finally:
+ self.top_level_class = top_level_class
+
+ def visit_FunctionDef(self, node):
+ classes, self.classes = self.classes, {}
+ self.generic_visit(node)
+ self.classes = classes
+
+ def handle_cdef_class(self, cls_node):
+ if cls_node not in self.cdef_classes:
+ self.cdef_classes.add(cls_node)
+ # go back through previous classes we've seen and pick out any first bases
+ if cls_node.bases and isinstance(cls_node.bases[0], ast.Name):
+ base0_node = self.classes.get(cls_node.bases[0].id)
+ if base0_node:
+ self.handle_cdef_class(base0_node)
+
+
+class ExtractDataclassesToTopLevel(ast.NodeTransformer):
+ def __init__(self, cdef_classes_set):
+ super().__init__()
+ self.nested_name = []
+ self.current_function_global_classes = []
+ self.global_classes = []
+ self.cdef_classes_set = cdef_classes_set
+ self.used_names = set()
+ self.collected_substitutions = {}
+ self.uses_unavailable_name = False
+ self.top_level_class = True
+
+ def visit_ClassDef(self, node):
+ if not self.top_level_class:
+ # Include any non-toplevel class in this to be able
+ # to test inheritance.
+
+ self.generic_visit(node) # any nested classes in it?
+ if not node.body:
+ node.body.append(ast.Pass)
+
+ # First, make it a C class.
+ if node in self.cdef_classes_set:
+ node.decorator_list.append(ast.Name(id="cclass", ctx=ast.Load()))
+ # otherwise move it to the global scope, but don't make it cdef
+ # change the name
+ old_name = node.name
+ new_name = "_".join([node.name] + self.nested_name)
+ while new_name in self.used_names:
+ new_name = new_name + "_"
+ node.name = new_name
+ self.current_function_global_classes.append(node)
+ self.used_names.add(new_name)
+ # hmmmm... possibly there's a few cases where there's more than one name?
+ self.collected_substitutions[old_name] = node.name
+
+ return ast.Assign(
+ targets=[ast.Name(id=old_name, ctx=ast.Store())],
+ value=ast.Name(id=new_name, ctx=ast.Load()),
+ lineno=-1,
+ )
+ else:
+ top_level_class, self.top_level_class = self.top_level_class, False
+ self.nested_name.append(node.name)
+ if tuple(self.nested_name) in skip_tests:
+ self.top_level_class = top_level_class
+ self.nested_name.pop()
+ return None
+ self.generic_visit(node)
+ self.nested_name.pop()
+ if not node.body:
+ node.body.append(ast.Pass())
+ self.top_level_class = top_level_class
+ return node
+
+ def visit_FunctionDef(self, node):
+ self.nested_name.append(node.name)
+ if tuple(self.nested_name) in skip_tests:
+ self.nested_name.pop()
+ return None
+ if tuple(self.nested_name) in version_specific_skips:
+ version = version_specific_skips[tuple(self.nested_name)]
+ decorator = ast.parse(
+ f"skip_on_versions_below({version})", mode="eval"
+ ).body
+ node.decorator_list.append(decorator)
+ collected_subs, self.collected_substitutions = self.collected_substitutions, {}
+ uses_unavailable_name, self.uses_unavailable_name = (
+ self.uses_unavailable_name,
+ False,
+ )
+ current_func_globs, self.current_function_global_classes = (
+ self.current_function_global_classes,
+ [],
+ )
+
+ # visit once to work out what the substitutions should be
+ self.generic_visit(node)
+ if self.collected_substitutions:
+ # replace strings in this function
+ node = SubstituteNameString(self.collected_substitutions).visit(node)
+ replacer = SubstituteName(self.collected_substitutions)
+ # replace any base classes
+ for global_class in self.current_function_global_classes:
+ global_class = replacer.visit(global_class)
+ self.global_classes.append(self.current_function_global_classes)
+
+ self.nested_name.pop()
+ self.collected_substitutions = collected_subs
+ if self.uses_unavailable_name:
+ node = None
+ self.uses_unavailable_name = uses_unavailable_name
+ self.current_function_global_classes = current_func_globs
+ return node
+
+ def visit_Name(self, node):
+ if node.id in unavailable_functions:
+ self.uses_unavailable_name = True
+ return self.generic_visit(node)
+
+ def visit_Import(self, node):
+ return None # drop imports, we add these into the text ourself
+
+ def visit_ImportFrom(self, node):
+ return None # drop imports, we add these into the text ourself
+
+ def visit_Call(self, node):
+ if (
+ isinstance(node.func, ast.Attribute)
+ and node.func.attr == "assertRaisesRegex"
+ ):
+ # we end up with a bunch of subtle name changes that are very hard to correct for
+ # therefore, replace with "assertRaises"
+ node.func.attr = "assertRaises"
+ node.args.pop()
+ return self.generic_visit(node)
+
+ def visit_Module(self, node):
+ self.generic_visit(node)
+ node.body[0:0] = self.global_classes
+ return node
+
+ def visit_AnnAssign(self, node):
+ # string annotations are forward declarations but the string will be wrong
+ # (because we're renaming the class)
+ if (isinstance(node.annotation, ast.Constant) and
+ isinstance(node.annotation.value, str)):
+ # although it'd be good to resolve these declarations, for the
+ # sake of the tests they only need to be "object"
+ node.annotation = ast.Name(id="object", ctx=ast.Load)
+
+ return node
+
+
+def main():
+ script_path = os.path.split(sys.argv[0])[0]
+ filename = "test_dataclasses.py"
+ py_module_path = os.path.join(script_path, "dataclass_test_data", filename)
+ with open(py_module_path, "r") as f:
+ tree = ast.parse(f.read(), filename)
+
+ cdef_class_finder = IdentifyCdefClasses()
+ cdef_class_finder.visit(tree)
+ transformer = ExtractDataclassesToTopLevel(cdef_class_finder.cdef_classes)
+ tree = transformer.visit(tree)
+
+ output_path = os.path.join(script_path, "..", "tests", "run", filename + "x")
+ with open(output_path, "w") as f:
+ print("# AUTO-GENERATED BY Tools/make_dataclass_tests.py", file=f)
+ print("# DO NOT EDIT", file=f)
+ print(file=f)
+ # the directive doesn't get applied outside the include if it's put
+ # in the pxi file
+ print("# cython: language_level=3", file=f)
+ # any extras Cython needs to add go in this include file
+ print('include "test_dataclasses.pxi"', file=f)
+ print(file=f)
+ print(ast.unparse(tree), file=f)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index 6787b98cc..000000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,138 +0,0 @@
-# https://ci.appveyor.com/project/cython/cython
-
-environment:
-
- global:
- # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
- # /E:ON and /V:ON options are not enabled in the batch script interpreter
- # See: https://stackoverflow.com/questions/11267463/compiling-python-modules-on-windows-x64/13751649#13751649
- WITH_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd"
- BACKEND: c
- PARALLEL: "-j4"
- EXTRA_CFLAGS: ""
-
- matrix:
- - PYTHON: "C:\\Python27"
- PYTHON_VERSION: "2.7"
- PYTHON_ARCH: "32"
- PYTHONIOENCODING: "utf-8"
- PARALLEL: ""
-
- - PYTHON: "C:\\Python27-x64"
- PYTHON_VERSION: "2.7"
- PYTHON_ARCH: "64"
- PYTHONIOENCODING: "utf-8"
- PARALLEL: ""
-
- - PYTHON: "C:\\Python39"
- PYTHON_VERSION: "3.9"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python39-x64"
- PYTHON_VERSION: "3.9"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python38"
- PYTHON_VERSION: "3.8"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python38-x64"
- PYTHON_VERSION: "3.8"
- PYTHON_ARCH: "64"
- EXTRA_CFLAGS: "-DCYTHON_USE_TYPE_SPECS=1"
-
- - PYTHON: "C:\\Python38-x64"
- PYTHON_VERSION: "3.8"
- PYTHON_ARCH: "64"
- BACKEND: c,cpp
-
- - PYTHON: "C:\\Python37"
- PYTHON_VERSION: "3.7"
- PYTHON_ARCH: "32"
- BACKEND: c,cpp
-
- - PYTHON: "C:\\Python37-x64"
- PYTHON_VERSION: "3.7"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python37-x64"
- PYTHON_VERSION: "3.7"
- PYTHON_ARCH: "64"
- EXTRA_CFLAGS: "-DCYTHON_USE_TYPE_SPECS=1"
-
- - PYTHON: "C:\\Python37-x64"
- PYTHON_VERSION: "3.7"
- PYTHON_ARCH: "64"
- BACKEND: cpp
-
- - PYTHON: "C:\\Python36"
- PYTHON_VERSION: "3.6"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python36-x64"
- PYTHON_VERSION: "3.6"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python35"
- PYTHON_VERSION: "3.5"
- PYTHON_ARCH: "32"
-
- - PYTHON: "C:\\Python35-x64"
- PYTHON_VERSION: "3.5"
- PYTHON_ARCH: "64"
-
- - PYTHON: "C:\\Python34"
- PYTHON_VERSION: "3.4"
- PYTHON_ARCH: "32"
- PARALLEL: ""
-
- - PYTHON: "C:\\Python34-x64"
- PYTHON_VERSION: "3.4"
- PYTHON_ARCH: "64"
- PARALLEL: ""
-
- - PYTHON: "C:\\Python27-x64"
- PYTHON_VERSION: "2.7"
- PYTHON_ARCH: "64"
- BACKEND: cpp
- PYTHONIOENCODING: "utf-8"
- PARALLEL: ""
-
-clone_depth: 5
-
-branches:
- only:
- - master
- - release
- - 0.29.x
-
-init:
- - "ECHO Python %PYTHON_VERSION% (%PYTHON_ARCH%bit) from %PYTHON%"
-
-install:
- - "powershell appveyor\\install.ps1"
- - "%PYTHON%\\python.exe --version"
- - "%PYTHON%\\Scripts\\pip.exe --version"
- - "%PYTHON%\\Scripts\\wheel.exe version"
-
-build: off
-build_script:
- - "%WITH_ENV% %PYTHON%\\python.exe setup.py build_ext %PARALLEL%"
- - "%WITH_ENV% %PYTHON%\\python.exe setup.py build_ext --inplace"
- - "%WITH_ENV% %PYTHON%\\python.exe setup.py bdist_wheel"
-
-test: off
-test_script:
- - "%PYTHON%\\Scripts\\pip.exe install -r test-requirements.txt"
- - "%PYTHON%\\Scripts\\pip.exe install win_unicode_console"
- - "set CFLAGS=/Od /W3 %EXTRA_CFLAGS%"
- - "%WITH_ENV% %PYTHON%\\python.exe runtests.py -vv --backend=%BACKEND% --no-code-style -j5"
-
-artifacts:
- - path: dist\*
-
-cache:
- - C:\Downloads\Cython -> appveyor\install.ps1
-
-#on_success:
-# - TODO: upload the content of dist/*.whl to a public wheelhouse
diff --git a/docs/examples/tutorial/clibraries/queue.py b/docs/examples/tutorial/clibraries/queue.py
index 45529fa94..e99b9b32c 100644
--- a/docs/examples/tutorial/clibraries/queue.py
+++ b/docs/examples/tutorial/clibraries/queue.py
@@ -2,7 +2,7 @@ from cython.cimports import cqueue
@cython.cclass
class Queue:
- _c_queue = cython.declare(cython.pointer(cqueue.Queue))
+ _c_queue: cython.pointer(cqueue.Queue)
def __cinit__(self):
self._c_queue = cqueue.queue_new()
diff --git a/docs/examples/tutorial/embedding/embedded.pyx b/docs/examples/tutorial/embedding/embedded.pyx
index 26704d45f..2ed823945 100644
--- a/docs/examples/tutorial/embedding/embedded.pyx
+++ b/docs/examples/tutorial/embedding/embedded.pyx
@@ -1,8 +1,9 @@
# embedded.pyx
-# The following two lines are for test purposed only, please ignore them.
+# The following two lines are for test purposes only, please ignore them.
# distutils: sources = embedded_main.c
# tag: py3only
+# tag: no-cpp
TEXT_TO_SAY = 'Hello from Python!'
diff --git a/docs/examples/tutorial/pure/disabled_annotations.py b/docs/examples/tutorial/pure/disabled_annotations.py
new file mode 100644
index 000000000..c92b4cf8e
--- /dev/null
+++ b/docs/examples/tutorial/pure/disabled_annotations.py
@@ -0,0 +1,33 @@
+import cython
+
+@cython.annotation_typing(False)
+def function_without_typing(a: int, b: int) -> int:
+ """Cython is ignoring annotations in this function"""
+ c: int = a + b
+ return c * a
+
+
+@cython.annotation_typing(False)
+@cython.cclass
+class NotAnnotatedClass:
+ """Cython is ignoring annotatons in this class except annotated_method"""
+ d: dict
+
+ def __init__(self, dictionary: dict):
+ self.d = dictionary
+
+ @cython.annotation_typing(True)
+ def annotated_method(self, key: str, a: cython.int, b: cython.int):
+ prefixed_key: str = 'prefix_' + key
+ self.d[prefixed_key] = a + b
+
+
+def annotated_function(a: cython.int, b: cython.int):
+ s: cython.int = a + b
+ with cython.annotation_typing(False):
+ # Cython is ignoring annotations within this code block
+ c: list = []
+ c.append(a)
+ c.append(b)
+ c.append(s)
+ return c
diff --git a/docs/examples/userguide/buffer/matrix.py b/docs/examples/userguide/buffer/matrix.py
new file mode 100644
index 000000000..79a3d3f12
--- /dev/null
+++ b/docs/examples/userguide/buffer/matrix.py
@@ -0,0 +1,15 @@
+# distutils: language = c++
+
+from cython.cimports.libcpp.vector import vector
+
+@cython.cclass
+class Matrix:
+ ncols: cython.unsigned
+ v: vector[cython.float]
+
+ def __cinit__(self, ncols: cython.unsigned):
+ self.ncols = ncols
+
+ def add_row(self):
+ """Adds a row, initially zero-filled."""
+ self.v.resize(self.v.size() + self.ncols)
diff --git a/docs/examples/userguide/buffer/matrix.pyx b/docs/examples/userguide/buffer/matrix.pyx
index ca597c2f2..f2547f6c3 100644
--- a/docs/examples/userguide/buffer/matrix.pyx
+++ b/docs/examples/userguide/buffer/matrix.pyx
@@ -1,9 +1,8 @@
# distutils: language = c++
-# matrix.pyx
-
from libcpp.vector cimport vector
+
cdef class Matrix:
cdef unsigned ncols
cdef vector[float] v
diff --git a/docs/examples/userguide/buffer/matrix_with_buffer.py b/docs/examples/userguide/buffer/matrix_with_buffer.py
new file mode 100644
index 000000000..34ccc6591
--- /dev/null
+++ b/docs/examples/userguide/buffer/matrix_with_buffer.py
@@ -0,0 +1,48 @@
+# distutils: language = c++
+from cython.cimports.cpython import Py_buffer
+from cython.cimports.libcpp.vector import vector
+
+@cython.cclass
+class Matrix:
+ ncols: cython.Py_ssize_t
+ shape: cython.Py_ssize_t[2]
+ strides: cython.Py_ssize_t[2]
+ v: vector[cython.float]
+
+ def __cinit__(self, ncols: cython.Py_ssize_t):
+ self.ncols = ncols
+
+ def add_row(self):
+ """Adds a row, initially zero-filled."""
+ self.v.resize(self.v.size() + self.ncols)
+
+ def __getbuffer__(self, buffer: cython.pointer(Py_buffer), flags: cython.int):
+ itemsize: cython.Py_ssize_t = cython.sizeof(self.v[0])
+
+ self.shape[0] = self.v.size() // self.ncols
+ self.shape[1] = self.ncols
+
+ # Stride 1 is the distance, in bytes, between two items in a row;
+ # this is the distance between two adjacent items in the vector.
+ # Stride 0 is the distance between the first elements of adjacent rows.
+ self.strides[1] = cython.cast(cython.Py_ssize_t, (
+ cython.cast(cython.p_char, cython.address(self.v[1]))
+ - cython.cast(cython.p_char, cython.address(self.v[0]))
+ )
+ )
+ self.strides[0] = self.ncols * self.strides[1]
+
+ buffer.buf = cython.cast(cython.p_char, cython.address(self.v[0]))
+ buffer.format = 'f' # float
+ buffer.internal = cython.NULL # see References
+ buffer.itemsize = itemsize
+ buffer.len = self.v.size() * itemsize # product(shape) * itemsize
+ buffer.ndim = 2
+ buffer.obj = self
+ buffer.readonly = 0
+ buffer.shape = self.shape
+ buffer.strides = self.strides
+ buffer.suboffsets = cython.NULL # for pointer arrays only
+
+ def __releasebuffer__(self, buffer: cython.pointer(Py_buffer)):
+ pass
diff --git a/docs/examples/userguide/buffer/matrix_with_buffer.pyx b/docs/examples/userguide/buffer/matrix_with_buffer.pyx
index c355f0fe8..16239d199 100644
--- a/docs/examples/userguide/buffer/matrix_with_buffer.pyx
+++ b/docs/examples/userguide/buffer/matrix_with_buffer.pyx
@@ -1,8 +1,8 @@
# distutils: language = c++
-
from cpython cimport Py_buffer
from libcpp.vector cimport vector
+
cdef class Matrix:
cdef Py_ssize_t ncols
cdef Py_ssize_t shape[2]
@@ -19,7 +19,7 @@ cdef class Matrix:
def __getbuffer__(self, Py_buffer *buffer, int flags):
cdef Py_ssize_t itemsize = sizeof(self.v[0])
- self.shape[0] = self.v.size() / self.ncols
+ self.shape[0] = self.v.size() // self.ncols
self.shape[1] = self.ncols
# Stride 1 is the distance, in bytes, between two items in a row;
@@ -27,6 +27,9 @@ cdef class Matrix:
# Stride 0 is the distance between the first elements of adjacent rows.
self.strides[1] = <Py_ssize_t>( <char *>&(self.v[1])
- <char *>&(self.v[0]))
+
+
+
self.strides[0] = self.ncols * self.strides[1]
buffer.buf = <char *>&(self.v[0])
diff --git a/docs/examples/userguide/buffer/view_count.py b/docs/examples/userguide/buffer/view_count.py
new file mode 100644
index 000000000..6a0554abc
--- /dev/null
+++ b/docs/examples/userguide/buffer/view_count.py
@@ -0,0 +1,30 @@
+# distutils: language = c++
+
+from cython.cimports.cpython import Py_buffer
+from cython.cimports.libcpp.vector import vector
+
+@cython.cclass
+class Matrix:
+
+ view_count: cython.int
+
+ ncols: cython.Py_ssize_t
+ v: vector[cython.float]
+ # ...
+
+ def __cinit__(self, ncols: cython.Py_ssize_t):
+ self.ncols = ncols
+ self.view_count = 0
+
+ def add_row(self):
+ if self.view_count > 0:
+ raise ValueError("can't add row while being viewed")
+ self.v.resize(self.v.size() + self.ncols)
+
+ def __getbuffer__(self, buffer: cython.pointer(Py_buffer), flags: cython.int):
+ # ... as before
+
+ self.view_count += 1
+
+ def __releasebuffer__(self, buffer: cython.pointer(Py_buffer)):
+ self.view_count -= 1
diff --git a/docs/examples/userguide/buffer/view_count.pyx b/docs/examples/userguide/buffer/view_count.pyx
index 8027f3ee9..8c4b1d524 100644
--- a/docs/examples/userguide/buffer/view_count.pyx
+++ b/docs/examples/userguide/buffer/view_count.pyx
@@ -3,6 +3,7 @@
from cpython cimport Py_buffer
from libcpp.vector cimport vector
+
cdef class Matrix:
cdef int view_count
@@ -26,4 +27,4 @@ cdef class Matrix:
self.view_count += 1
def __releasebuffer__(self, Py_buffer *buffer):
- self.view_count -= 1 \ No newline at end of file
+ self.view_count -= 1
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle.py b/docs/examples/userguide/early_binding_for_speed/rectangle.py
new file mode 100644
index 000000000..cd534d051
--- /dev/null
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle.py
@@ -0,0 +1,22 @@
+@cython.cclass
+class Rectangle:
+ x0: cython.int
+ y0: cython.int
+ x1: cython.int
+ y1: cython.int
+
+ def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int):
+ self.x0 = x0
+ self.y0 = y0
+ self.x1 = x1
+ self.y1 = y1
+
+ def area(self):
+ area = (self.x1 - self.x0) * (self.y1 - self.y0)
+ if area < 0:
+ area = -area
+ return area
+
+def rectArea(x0, y0, x1, y1):
+ rect = Rectangle(x0, y0, x1, y1)
+ return rect.area()
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle.pyx
index de70b0263..b58f6534b 100644
--- a/docs/examples/userguide/early_binding_for_speed/rectangle.pyx
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle.pyx
@@ -1,7 +1,10 @@
+
cdef class Rectangle:
cdef int x0, y0
cdef int x1, y1
+
+
def __init__(self, int x0, int y0, int x1, int y1):
self.x0 = x0
self.y0 = y0
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py
new file mode 100644
index 000000000..ee2a14fb8
--- /dev/null
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.py
@@ -0,0 +1,26 @@
+@cython.cclass
+class Rectangle:
+ x0: cython.int
+ y0: cython.int
+ x1: cython.int
+ y1: cython.int
+
+ def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int):
+ self.x0 = x0
+ self.y0 = y0
+ self.x1 = x1
+ self.y1 = y1
+
+ @cython.cfunc
+ def _area(self) -> cython.int:
+ area: cython.int = (self.x1 - self.x0) * (self.y1 - self.y0)
+ if area < 0:
+ area = -area
+ return area
+
+ def area(self):
+ return self._area()
+
+def rectArea(x0, y0, x1, y1):
+ rect: Rectangle = Rectangle(x0, y0, x1, y1)
+ return rect._area()
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
index 1933326d2..3b64d766b 100644
--- a/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
@@ -1,13 +1,17 @@
+
cdef class Rectangle:
cdef int x0, y0
cdef int x1, y1
+
+
def __init__(self, int x0, int y0, int x1, int y1):
self.x0 = x0
self.y0 = y0
self.x1 = x1
self.y1 = y1
+
cdef int _area(self):
cdef int area = (self.x1 - self.x0) * (self.y1 - self.y0)
if area < 0:
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py
new file mode 100644
index 000000000..670f340a4
--- /dev/null
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.py
@@ -0,0 +1,23 @@
+@cython.cclass
+class Rectangle:
+ x0: cython.int
+ y0: cython.int
+ x1: cython.int
+ y1: cython.int
+
+ def __init__(self, x0: cython.int, y0: cython.int, x1: cython.int, y1: cython.int):
+ self.x0 = x0
+ self.y0 = y0
+ self.x1 = x1
+ self.y1 = y1
+
+ @cython.ccall
+ def area(self)-> cython.int:
+ area: cython.int = (self.x1 - self.x0) * (self.y1 - self.y0)
+ if area < 0:
+ area = -area
+ return area
+
+def rectArea(x0, y0, x1, y1):
+ rect: Rectangle = Rectangle(x0, y0, x1, y1)
+ return rect.area()
diff --git a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
index f8b7d86a8..53f2a8ad2 100644
--- a/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
+++ b/docs/examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
@@ -1,15 +1,19 @@
+
cdef class Rectangle:
cdef int x0, y0
cdef int x1, y1
+
+
def __init__(self, int x0, int y0, int x1, int y1):
self.x0 = x0
self.y0 = y0
self.x1 = x1
self.y1 = y1
+
cpdef int area(self):
- area = (self.x1 - self.x0) * (self.y1 - self.y0)
+ cdef int area = (self.x1 - self.x0) * (self.y1 - self.y0)
if area < 0:
area = -area
return area
diff --git a/docs/examples/userguide/extension_types/cheesy.py b/docs/examples/userguide/extension_types/cheesy.py
new file mode 100644
index 000000000..0995c3993
--- /dev/null
+++ b/docs/examples/userguide/extension_types/cheesy.py
@@ -0,0 +1,36 @@
+import cython
+
+@cython.cclass
+class CheeseShop:
+
+ cheeses: object
+
+ def __cinit__(self):
+ self.cheeses = []
+
+ @property
+ def cheese(self):
+ return "We don't have: %s" % self.cheeses
+
+ @cheese.setter
+ def cheese(self, value):
+ self.cheeses.append(value)
+
+ @cheese.deleter
+ def cheese(self):
+ del self.cheeses[:]
+
+# Test input
+from cheesy import CheeseShop
+
+shop = CheeseShop()
+print(shop.cheese)
+
+shop.cheese = "camembert"
+print(shop.cheese)
+
+shop.cheese = "cheddar"
+print(shop.cheese)
+
+del shop.cheese
+print(shop.cheese)
diff --git a/docs/examples/userguide/extension_types/cheesy.pyx b/docs/examples/userguide/extension_types/cheesy.pyx
new file mode 100644
index 000000000..2859d848f
--- /dev/null
+++ b/docs/examples/userguide/extension_types/cheesy.pyx
@@ -0,0 +1,36 @@
+
+
+
+cdef class CheeseShop:
+
+ cdef object cheeses
+
+ def __cinit__(self):
+ self.cheeses = []
+
+ @property
+ def cheese(self):
+ return "We don't have: %s" % self.cheeses
+
+ @cheese.setter
+ def cheese(self, value):
+ self.cheeses.append(value)
+
+ @cheese.deleter
+ def cheese(self):
+ del self.cheeses[:]
+
+# Test input
+from cheesy import CheeseShop
+
+shop = CheeseShop()
+print(shop.cheese)
+
+shop.cheese = "camembert"
+print(shop.cheese)
+
+shop.cheese = "cheddar"
+print(shop.cheese)
+
+del shop.cheese
+print(shop.cheese)
diff --git a/docs/examples/userguide/extension_types/dataclass.py b/docs/examples/userguide/extension_types/dataclass.py
new file mode 100644
index 000000000..d8ed68666
--- /dev/null
+++ b/docs/examples/userguide/extension_types/dataclass.py
@@ -0,0 +1,21 @@
+import cython
+try:
+ import typing
+ import dataclasses
+except ImportError:
+ pass # The modules don't actually have to exists for Cython to use them as annotations
+
+@cython.dataclasses.dataclass
+@cython.cclass
+class MyDataclass:
+ # fields can be declared using annotations
+ a: cython.int = 0
+ b: double = cython.dataclasses.field(default_factory = lambda: 10, repr=False)
+
+
+ c: str = 'hello'
+
+
+ # typing.InitVar and typing.ClassVar also work
+ d: dataclasses.InitVar[double] = 5
+ e: typing.ClassVar[list] = []
diff --git a/docs/examples/userguide/extension_types/dataclass.pyx b/docs/examples/userguide/extension_types/dataclass.pyx
index 0529890ba..b03d5f7b1 100644
--- a/docs/examples/userguide/extension_types/dataclass.pyx
+++ b/docs/examples/userguide/extension_types/dataclass.pyx
@@ -5,6 +5,7 @@ try:
except ImportError:
pass # The modules don't actually have to exists for Cython to use them as annotations
+
@cython.dataclasses.dataclass
cdef class MyDataclass:
# fields can be declared using annotations
@@ -16,5 +17,5 @@ cdef class MyDataclass:
c = "hello" # assignment of default value on a separate line
# typing.InitVar and typing.ClassVar also work
- d: dataclasses.InitVar[double] = 5
+ d: dataclasses.InitVar[cython.double] = 5
e: typing.ClassVar[list] = []
diff --git a/docs/examples/userguide/extension_types/dict_animal.py b/docs/examples/userguide/extension_types/dict_animal.py
new file mode 100644
index 000000000..a36dd3f89
--- /dev/null
+++ b/docs/examples/userguide/extension_types/dict_animal.py
@@ -0,0 +1,12 @@
+@cython.cclass
+class Animal:
+
+ number_of_legs: cython.int
+ __dict__: dict
+
+ def __cinit__(self, number_of_legs: cython.int):
+ self.number_of_legs = number_of_legs
+
+
+dog = Animal(4)
+dog.has_tail = True
diff --git a/docs/examples/userguide/extension_types/dict_animal.pyx b/docs/examples/userguide/extension_types/dict_animal.pyx
index 1aa0ccc11..575b835e9 100644
--- a/docs/examples/userguide/extension_types/dict_animal.pyx
+++ b/docs/examples/userguide/extension_types/dict_animal.pyx
@@ -1,9 +1,10 @@
+
cdef class Animal:
cdef int number_of_legs
cdef dict __dict__
- def __cinit__(self, int number_of_legs):
+ def __init__(self, int number_of_legs):
self.number_of_legs = number_of_legs
diff --git a/docs/examples/userguide/extension_types/extendable_animal.py b/docs/examples/userguide/extension_types/extendable_animal.py
new file mode 100644
index 000000000..2eef69460
--- /dev/null
+++ b/docs/examples/userguide/extension_types/extendable_animal.py
@@ -0,0 +1,15 @@
+@cython.cclass
+class Animal:
+
+ number_of_legs: cython.int
+
+ def __cinit__(self, number_of_legs: cython.int):
+ self.number_of_legs = number_of_legs
+
+
+class ExtendableAnimal(Animal): # Note that we use class, not cdef class
+ pass
+
+
+dog = ExtendableAnimal(4)
+dog.has_tail = True
diff --git a/docs/examples/userguide/extension_types/extendable_animal.pyx b/docs/examples/userguide/extension_types/extendable_animal.pyx
index 701a93148..2ec165421 100644
--- a/docs/examples/userguide/extension_types/extendable_animal.pyx
+++ b/docs/examples/userguide/extension_types/extendable_animal.pyx
@@ -1,8 +1,9 @@
+
cdef class Animal:
cdef int number_of_legs
- def __cinit__(self, int number_of_legs):
+ def __init__(self, int number_of_legs):
self.number_of_legs = number_of_legs
@@ -11,4 +12,4 @@ class ExtendableAnimal(Animal): # Note that we use class, not cdef class
dog = ExtendableAnimal(4)
-dog.has_tail = True \ No newline at end of file
+dog.has_tail = True
diff --git a/docs/examples/userguide/extension_types/owned_pointer.py b/docs/examples/userguide/extension_types/owned_pointer.py
new file mode 100644
index 000000000..1c235a883
--- /dev/null
+++ b/docs/examples/userguide/extension_types/owned_pointer.py
@@ -0,0 +1,17 @@
+import cython
+from cython.cimports.libc.stdlib import free
+
+@cython.cclass
+class OwnedPointer:
+ ptr: cython.pointer(cython.void)
+
+ def __dealloc__(self):
+ if self.ptr is not cython.NULL:
+ free(self.ptr)
+
+ @staticmethod
+ @cython.cfunc
+ def create(ptr: cython.pointer(cython.void)):
+ p = OwnedPointer()
+ p.ptr = ptr
+ return p
diff --git a/docs/examples/userguide/extension_types/owned_pointer.pyx b/docs/examples/userguide/extension_types/owned_pointer.pyx
new file mode 100644
index 000000000..98b61d91c
--- /dev/null
+++ b/docs/examples/userguide/extension_types/owned_pointer.pyx
@@ -0,0 +1,17 @@
+
+from libc.stdlib cimport free
+
+
+cdef class OwnedPointer:
+ cdef void* ptr
+
+ def __dealloc__(self):
+ if self.ptr is not NULL:
+ free(self.ptr)
+
+
+ @staticmethod
+ cdef create(void* ptr):
+ p = OwnedPointer()
+ p.ptr = ptr
+ return p
diff --git a/docs/examples/userguide/extension_types/penguin.py b/docs/examples/userguide/extension_types/penguin.py
new file mode 100644
index 000000000..6db8eba16
--- /dev/null
+++ b/docs/examples/userguide/extension_types/penguin.py
@@ -0,0 +1,14 @@
+import cython
+
+@cython.cclass
+class Penguin:
+ food: object
+
+ def __cinit__(self, food):
+ self.food = food
+
+ def __init__(self, food):
+ print("eating!")
+
+normal_penguin = Penguin('fish')
+fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() !
diff --git a/docs/examples/userguide/extension_types/penguin.pyx b/docs/examples/userguide/extension_types/penguin.pyx
new file mode 100644
index 000000000..b890c9ffd
--- /dev/null
+++ b/docs/examples/userguide/extension_types/penguin.pyx
@@ -0,0 +1,14 @@
+
+
+
+cdef class Penguin:
+ cdef object food
+
+ def __cinit__(self, food):
+ self.food = food
+
+ def __init__(self, food):
+ print("eating!")
+
+normal_penguin = Penguin('fish')
+fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() !
diff --git a/docs/examples/userguide/extension_types/penguin2.py b/docs/examples/userguide/extension_types/penguin2.py
new file mode 100644
index 000000000..063563d16
--- /dev/null
+++ b/docs/examples/userguide/extension_types/penguin2.py
@@ -0,0 +1,12 @@
+import cython
+
+@cython.freelist(8)
+@cython.cclass
+class Penguin:
+ food: object
+ def __cinit__(self, food):
+ self.food = food
+
+penguin = Penguin('fish 1')
+penguin = None
+penguin = Penguin('fish 2') # does not need to allocate memory!
diff --git a/docs/examples/userguide/extension_types/penguin2.pyx b/docs/examples/userguide/extension_types/penguin2.pyx
new file mode 100644
index 000000000..726aeef8e
--- /dev/null
+++ b/docs/examples/userguide/extension_types/penguin2.pyx
@@ -0,0 +1,12 @@
+cimport cython
+
+
+@cython.freelist(8)
+cdef class Penguin:
+ cdef object food
+ def __cinit__(self, food):
+ self.food = food
+
+penguin = Penguin('fish 1')
+penguin = None
+penguin = Penguin('fish 2') # does not need to allocate memory!
diff --git a/docs/examples/userguide/extension_types/pets.py b/docs/examples/userguide/extension_types/pets.py
new file mode 100644
index 000000000..fc6497cb0
--- /dev/null
+++ b/docs/examples/userguide/extension_types/pets.py
@@ -0,0 +1,22 @@
+import cython
+
+@cython.cclass
+class Parrot:
+
+ @cython.cfunc
+ def describe(self) -> cython.void:
+ print("This parrot is resting.")
+
+@cython.cclass
+class Norwegian(Parrot):
+
+ @cython.cfunc
+ def describe(self) -> cython.void:
+ Parrot.describe(self)
+ print("Lovely plumage!")
+
+cython.declare(p1=Parrot, p2=Parrot)
+p1 = Parrot()
+p2 = Norwegian()
+print("p2:")
+p2.describe()
diff --git a/docs/examples/userguide/extension_types/pets.pyx b/docs/examples/userguide/extension_types/pets.pyx
new file mode 100644
index 000000000..bb06e059d
--- /dev/null
+++ b/docs/examples/userguide/extension_types/pets.pyx
@@ -0,0 +1,22 @@
+
+
+cdef class Parrot:
+
+
+
+ cdef void describe(self):
+ print("This parrot is resting.")
+
+
+cdef class Norwegian(Parrot):
+
+
+ cdef void describe(self):
+ Parrot.describe(self)
+ print("Lovely plumage!")
+
+cdef Parrot p1, p2
+p1 = Parrot()
+p2 = Norwegian()
+print("p2:")
+p2.describe()
diff --git a/docs/examples/userguide/extension_types/python_access.py b/docs/examples/userguide/extension_types/python_access.py
new file mode 100644
index 000000000..27478f50c
--- /dev/null
+++ b/docs/examples/userguide/extension_types/python_access.py
@@ -0,0 +1,7 @@
+import cython
+
+@cython.cclass
+class Shrubbery:
+ width = cython.declare(cython.int, visibility='public')
+ height = cython.declare(cython.int, visibility='public')
+ depth = cython.declare(cython.float, visibility='readonly')
diff --git a/docs/examples/userguide/extension_types/python_access.pyx b/docs/examples/userguide/extension_types/python_access.pyx
index 6d5225ec0..db11de63c 100644
--- a/docs/examples/userguide/extension_types/python_access.pyx
+++ b/docs/examples/userguide/extension_types/python_access.pyx
@@ -1,3 +1,7 @@
+
+
+
cdef class Shrubbery:
cdef public int width, height
+
cdef readonly float depth
diff --git a/docs/examples/userguide/extension_types/shrubbery.py b/docs/examples/userguide/extension_types/shrubbery.py
index 075664527..0e624a1d2 100644
--- a/docs/examples/userguide/extension_types/shrubbery.py
+++ b/docs/examples/userguide/extension_types/shrubbery.py
@@ -1,5 +1,3 @@
-from __future__ import print_function
-
@cython.cclass
class Shrubbery:
width: cython.int
diff --git a/docs/examples/userguide/extension_types/shrubbery.pyx b/docs/examples/userguide/extension_types/shrubbery.pyx
index b74dfbd1b..8c4e58776 100644
--- a/docs/examples/userguide/extension_types/shrubbery.pyx
+++ b/docs/examples/userguide/extension_types/shrubbery.pyx
@@ -1,6 +1,4 @@
from __future__ import print_function
-
-
cdef class Shrubbery:
cdef int width
cdef int height
diff --git a/docs/examples/userguide/extension_types/shrubbery_2.py b/docs/examples/userguide/extension_types/shrubbery_2.py
new file mode 100644
index 000000000..d6b722500
--- /dev/null
+++ b/docs/examples/userguide/extension_types/shrubbery_2.py
@@ -0,0 +1,10 @@
+import cython
+from cython.cimports.my_module import Shrubbery
+
+@cython.cfunc
+def another_shrubbery(sh1: Shrubbery) -> Shrubbery:
+ sh2: Shrubbery
+ sh2 = Shrubbery()
+ sh2.width = sh1.width
+ sh2.height = sh1.height
+ return sh2
diff --git a/docs/examples/userguide/extension_types/shrubbery_2.pyx b/docs/examples/userguide/extension_types/shrubbery_2.pyx
index d05d28243..4a7782735 100644
--- a/docs/examples/userguide/extension_types/shrubbery_2.pyx
+++ b/docs/examples/userguide/extension_types/shrubbery_2.pyx
@@ -1,5 +1,7 @@
+
from my_module cimport Shrubbery
+
cdef Shrubbery another_shrubbery(Shrubbery sh1):
cdef Shrubbery sh2
sh2 = Shrubbery()
diff --git a/docs/examples/userguide/extension_types/widen_shrubbery.py b/docs/examples/userguide/extension_types/widen_shrubbery.py
new file mode 100644
index 000000000..f69f4dc96
--- /dev/null
+++ b/docs/examples/userguide/extension_types/widen_shrubbery.py
@@ -0,0 +1,6 @@
+import cython
+from cython.cimports.my_module import Shrubbery
+
+@cython.cfunc
+def widen_shrubbery(sh: Shrubbery, extra_width):
+ sh.width = sh.width + extra_width
diff --git a/docs/examples/userguide/extension_types/widen_shrubbery.pyx b/docs/examples/userguide/extension_types/widen_shrubbery.pyx
index a312fbfd9..c6f58f00c 100644
--- a/docs/examples/userguide/extension_types/widen_shrubbery.pyx
+++ b/docs/examples/userguide/extension_types/widen_shrubbery.pyx
@@ -1,4 +1,6 @@
+
from my_module cimport Shrubbery
+
cdef widen_shrubbery(Shrubbery sh, extra_width):
sh.width = sh.width + extra_width
diff --git a/docs/examples/userguide/extension_types/wrapper_class.py b/docs/examples/userguide/extension_types/wrapper_class.py
new file mode 100644
index 000000000..b625ffebd
--- /dev/null
+++ b/docs/examples/userguide/extension_types/wrapper_class.py
@@ -0,0 +1,65 @@
+import cython
+from cython.cimports.libc.stdlib import malloc, free
+
+# Example C struct
+my_c_struct = cython.struct(
+ a = cython.int,
+ b = cython.int,
+)
+
+@cython.cclass
+class WrapperClass:
+ """A wrapper class for a C/C++ data structure"""
+ _ptr: cython.pointer(my_c_struct)
+ ptr_owner: cython.bint
+
+ def __cinit__(self):
+ self.ptr_owner = False
+
+ def __dealloc__(self):
+ # De-allocate if not null and flag is set
+ if self._ptr is not cython.NULL and self.ptr_owner is True:
+ free(self._ptr)
+ self._ptr = cython.NULL
+
+ def __init__(self):
+ # Prevent accidental instantiation from normal Python code
+ # since we cannot pass a struct pointer into a Python constructor.
+ raise TypeError("This class cannot be instantiated directly.")
+
+ # Extension class properties
+ @property
+ def a(self):
+ return self._ptr.a if self._ptr is not cython.NULL else None
+
+ @property
+ def b(self):
+ return self._ptr.b if self._ptr is not cython.NULL else None
+
+ @staticmethod
+ @cython.cfunc
+ def from_ptr(_ptr: cython.pointer(my_c_struct), owner: cython.bint=False) -> WrapperClass:
+ """Factory function to create WrapperClass objects from
+ given my_c_struct pointer.
+
+ Setting ``owner`` flag to ``True`` causes
+ the extension type to ``free`` the structure pointed to by ``_ptr``
+ when the wrapper object is deallocated."""
+ # Fast call to __new__() that bypasses the __init__() constructor.
+ wrapper: WrapperClass = WrapperClass.__new__(WrapperClass)
+ wrapper._ptr = _ptr
+ wrapper.ptr_owner = owner
+ return wrapper
+
+ @staticmethod
+ @cython.cfunc
+ def new_struct() -> WrapperClass:
+ """Factory function to create WrapperClass objects with
+ newly allocated my_c_struct"""
+ _ptr: cython.pointer(my_c_struct) = cython.cast(
+ cython.pointer(my_c_struct), malloc(cython.sizeof(my_c_struct)))
+ if _ptr is cython.NULL:
+ raise MemoryError
+ _ptr.a = 0
+ _ptr.b = 0
+ return WrapperClass.from_ptr(_ptr, owner=True)
diff --git a/docs/examples/userguide/extension_types/wrapper_class.pyx b/docs/examples/userguide/extension_types/wrapper_class.pyx
new file mode 100644
index 000000000..e2a0c3ff2
--- /dev/null
+++ b/docs/examples/userguide/extension_types/wrapper_class.pyx
@@ -0,0 +1,65 @@
+
+from libc.stdlib cimport malloc, free
+
+# Example C struct
+ctypedef struct my_c_struct:
+ int a
+ int b
+
+
+
+cdef class WrapperClass:
+ """A wrapper class for a C/C++ data structure"""
+ cdef my_c_struct *_ptr
+ cdef bint ptr_owner
+
+ def __cinit__(self):
+ self.ptr_owner = False
+
+ def __dealloc__(self):
+ # De-allocate if not null and flag is set
+ if self._ptr is not NULL and self.ptr_owner is True:
+ free(self._ptr)
+ self._ptr = NULL
+
+ def __init__(self):
+ # Prevent accidental instantiation from normal Python code
+ # since we cannot pass a struct pointer into a Python constructor.
+ raise TypeError("This class cannot be instantiated directly.")
+
+ # Extension class properties
+ @property
+ def a(self):
+ return self._ptr.a if self._ptr is not NULL else None
+
+ @property
+ def b(self):
+ return self._ptr.b if self._ptr is not NULL else None
+
+
+ @staticmethod
+ cdef WrapperClass from_ptr(my_c_struct *_ptr, bint owner=False):
+ """Factory function to create WrapperClass objects from
+ given my_c_struct pointer.
+
+ Setting ``owner`` flag to ``True`` causes
+ the extension type to ``free`` the structure pointed to by ``_ptr``
+ when the wrapper object is deallocated."""
+ # Fast call to __new__() that bypasses the __init__() constructor.
+ cdef WrapperClass wrapper = WrapperClass.__new__(WrapperClass)
+ wrapper._ptr = _ptr
+ wrapper.ptr_owner = owner
+ return wrapper
+
+
+ @staticmethod
+ cdef WrapperClass new_struct():
+ """Factory function to create WrapperClass objects with
+ newly allocated my_c_struct"""
+ cdef my_c_struct *_ptr = <my_c_struct *>malloc(sizeof(my_c_struct))
+
+ if _ptr is NULL:
+ raise MemoryError
+ _ptr.a = 0
+ _ptr.b = 0
+ return WrapperClass.from_ptr(_ptr, owner=True)
diff --git a/docs/examples/userguide/language_basics/struct_union_enum.pyx b/docs/examples/userguide/language_basics/enum.pyx
index af9b06d9a..1b5f5d614 100644
--- a/docs/examples/userguide/language_basics/struct_union_enum.pyx
+++ b/docs/examples/userguide/language_basics/enum.pyx
@@ -1,11 +1,3 @@
-cdef struct Grail:
- int age
- float volume
-
-cdef union Food:
- char *spam
- float *eggs
-
cdef enum CheeseType:
cheddar, edam,
camembert
@@ -14,3 +6,6 @@ cdef enum CheeseState:
hard = 1
soft = 2
runny = 3
+
+print(CheeseType.cheddar)
+print(CheeseState.hard)
diff --git a/docs/examples/userguide/language_basics/function_pointer.pyx b/docs/examples/userguide/language_basics/function_pointer.pyx
new file mode 100644
index 000000000..b345c62b4
--- /dev/null
+++ b/docs/examples/userguide/language_basics/function_pointer.pyx
@@ -0,0 +1,8 @@
+cdef int(*ptr_add)(int, int)
+
+cdef int add(int a, int b):
+ return a + b
+
+ptr_add = add
+
+print(ptr_add(1, 3))
diff --git a/docs/examples/userguide/language_basics/function_pointer_struct.pyx b/docs/examples/userguide/language_basics/function_pointer_struct.pyx
new file mode 100644
index 000000000..5ef618961
--- /dev/null
+++ b/docs/examples/userguide/language_basics/function_pointer_struct.pyx
@@ -0,0 +1,9 @@
+cdef struct Bar:
+ int sum(int a, int b)
+
+cdef int add(int a, int b):
+ return a + b
+
+cdef Bar bar = Bar(add)
+
+print(bar.sum(1, 2))
diff --git a/docs/examples/userguide/language_basics/struct.py b/docs/examples/userguide/language_basics/struct.py
new file mode 100644
index 000000000..32b6b252a
--- /dev/null
+++ b/docs/examples/userguide/language_basics/struct.py
@@ -0,0 +1,7 @@
+Grail = cython.struct(
+ age=cython.int,
+ volume=cython.float)
+
+def main():
+ grail: Grail = Grail(5, 3.0)
+ print(grail.age, grail.volume)
diff --git a/docs/examples/userguide/language_basics/struct.pyx b/docs/examples/userguide/language_basics/struct.pyx
new file mode 100644
index 000000000..3ef79172b
--- /dev/null
+++ b/docs/examples/userguide/language_basics/struct.pyx
@@ -0,0 +1,7 @@
+cdef struct Grail:
+ int age
+ float volume
+
+def main():
+ cdef Grail grail = Grail(5, 3.0)
+ print(grail.age, grail.volume)
diff --git a/docs/examples/userguide/language_basics/struct_union_enum.py b/docs/examples/userguide/language_basics/struct_union_enum.py
deleted file mode 100644
index b78c0aa02..000000000
--- a/docs/examples/userguide/language_basics/struct_union_enum.py
+++ /dev/null
@@ -1,7 +0,0 @@
-Grail = cython.struct(
- age=cython.int,
- volume=cython.float)
-
-Food = cython.union(
- spam=cython.p_char,
- eggs=cython.p_float)
diff --git a/docs/examples/userguide/language_basics/union.py b/docs/examples/userguide/language_basics/union.py
new file mode 100644
index 000000000..efcda358b
--- /dev/null
+++ b/docs/examples/userguide/language_basics/union.py
@@ -0,0 +1,9 @@
+Food = cython.union(
+ spam=cython.p_char,
+ eggs=cython.p_float)
+
+def main():
+ arr: cython.p_float = [1.0, 2.0]
+ spam: Food = Food(spam='b')
+ eggs: Food = Food(eggs=arr)
+ print(spam.spam, eggs.eggs[0])
diff --git a/docs/examples/userguide/language_basics/union.pyx b/docs/examples/userguide/language_basics/union.pyx
new file mode 100644
index 000000000..e05f63fcc
--- /dev/null
+++ b/docs/examples/userguide/language_basics/union.pyx
@@ -0,0 +1,9 @@
+cdef union Food:
+ char *spam
+ float *eggs
+
+def main():
+ cdef float *arr = [1.0, 2.0]
+ cdef Food spam = Food(spam='b')
+ cdef Food eggs = Food(eggs=arr)
+ print(spam.spam, eggs.eggs[0])
diff --git a/docs/examples/userguide/parallelism/breaking_loop.py b/docs/examples/userguide/parallelism/breaking_loop.py
new file mode 100644
index 000000000..00d0225b5
--- /dev/null
+++ b/docs/examples/userguide/parallelism/breaking_loop.py
@@ -0,0 +1,15 @@
+from cython.parallel import prange
+
+@cython.exceptval(-1)
+@cython.cfunc
+def func(n: cython.Py_ssize_t) -> cython.int:
+ i: cython.Py_ssize_t
+
+ for i in prange(n, nogil=True):
+ if i == 8:
+ with cython.gil:
+ raise Exception()
+ elif i == 4:
+ break
+ elif i == 2:
+ return i
diff --git a/docs/examples/userguide/parallelism/breaking_loop.pyx b/docs/examples/userguide/parallelism/breaking_loop.pyx
index 2cf562edf..e7445082d 100644
--- a/docs/examples/userguide/parallelism/breaking_loop.pyx
+++ b/docs/examples/userguide/parallelism/breaking_loop.pyx
@@ -1,5 +1,7 @@
from cython.parallel import prange
+
+
cdef int func(Py_ssize_t n) except -1:
cdef Py_ssize_t i
diff --git a/docs/examples/userguide/parallelism/cimport_openmp.py b/docs/examples/userguide/parallelism/cimport_openmp.py
new file mode 100644
index 000000000..9288a4381
--- /dev/null
+++ b/docs/examples/userguide/parallelism/cimport_openmp.py
@@ -0,0 +1,11 @@
+# tag: openmp
+
+from cython.parallel import parallel
+from cython.cimports.openmp import omp_set_dynamic, omp_get_num_threads
+
+num_threads = cython.declare(cython.int)
+
+omp_set_dynamic(1)
+with cython.nogil, parallel():
+ num_threads = omp_get_num_threads()
+ # ...
diff --git a/docs/examples/userguide/parallelism/cimport_openmp.pyx b/docs/examples/userguide/parallelism/cimport_openmp.pyx
index 797936fe7..54d5f18db 100644
--- a/docs/examples/userguide/parallelism/cimport_openmp.pyx
+++ b/docs/examples/userguide/parallelism/cimport_openmp.pyx
@@ -1,6 +1,4 @@
# tag: openmp
-# You can ignore the previous line.
-# It's for internal testing of the Cython documentation.
from cython.parallel cimport parallel
cimport openmp
diff --git a/docs/examples/userguide/parallelism/memoryview_sum.py b/docs/examples/userguide/parallelism/memoryview_sum.py
new file mode 100644
index 000000000..6cff5d587
--- /dev/null
+++ b/docs/examples/userguide/parallelism/memoryview_sum.py
@@ -0,0 +1,7 @@
+from cython.parallel import prange
+
+def func(x: cython.double[:], alpha: cython.double):
+ i: cython.Py_ssize_t
+
+ for i in prange(x.shape[0], nogil=True):
+ x[i] = alpha * x[i]
diff --git a/docs/examples/userguide/parallelism/memoryview_sum.pyx b/docs/examples/userguide/parallelism/memoryview_sum.pyx
new file mode 100644
index 000000000..bdc1c9feb
--- /dev/null
+++ b/docs/examples/userguide/parallelism/memoryview_sum.pyx
@@ -0,0 +1,7 @@
+from cython.parallel import prange
+
+def func(double[:] x, double alpha):
+ cdef Py_ssize_t i
+
+ for i in prange(x.shape[0], nogil=True):
+ x[i] = alpha * x[i]
diff --git a/docs/examples/userguide/parallelism/parallel.py b/docs/examples/userguide/parallelism/parallel.py
new file mode 100644
index 000000000..0fb62d10f
--- /dev/null
+++ b/docs/examples/userguide/parallelism/parallel.py
@@ -0,0 +1,30 @@
+from cython.parallel import parallel, prange
+from cython.cimports.libc.stdlib import abort, malloc, free
+
+@cython.nogil
+@cython.cfunc
+def func(buf: cython.p_int) -> cython.void:
+ pass
+ # ...
+
+idx = cython.declare(cython.Py_ssize_t)
+i = cython.declare(cython.Py_ssize_t)
+j = cython.declare(cython.Py_ssize_t)
+n = cython.declare(cython.Py_ssize_t, 100)
+local_buf = cython.declare(p_int)
+size = cython.declare(cython.size_t, 10)
+
+with cython.nogil, parallel():
+ local_buf: cython.p_int = cython.cast(cython.p_int, malloc(cython.sizeof(cython.int) * size))
+ if local_buf is cython.NULL:
+ abort()
+
+ # populate our local buffer in a sequential loop
+ for i in range(size):
+ local_buf[i] = i * 2
+
+ # share the work using the thread-local buffer(s)
+ for j in prange(n, schedule='guided'):
+ func(local_buf)
+
+ free(local_buf)
diff --git a/docs/examples/userguide/parallelism/parallel.pyx b/docs/examples/userguide/parallelism/parallel.pyx
new file mode 100644
index 000000000..2a952d537
--- /dev/null
+++ b/docs/examples/userguide/parallelism/parallel.pyx
@@ -0,0 +1,30 @@
+from cython.parallel import parallel, prange
+from libc.stdlib cimport abort, malloc, free
+
+
+
+cdef void func(int *buf) nogil:
+ pass
+ # ...
+
+cdef Py_ssize_t idx, i, j, n = 100
+cdef int * local_buf
+cdef size_t size = 10
+
+
+
+
+with nogil, parallel():
+ local_buf = <int *> malloc(sizeof(int) * size)
+ if local_buf is NULL:
+ abort()
+
+ # populate our local buffer in a sequential loop
+ for i in range(size):
+ local_buf[i] = i * 2
+
+ # share the work using the thread-local buffer(s)
+ for j in prange(n, schedule='guided'):
+ func(local_buf)
+
+ free(local_buf)
diff --git a/docs/examples/userguide/parallelism/setup_py.py b/docs/examples/userguide/parallelism/setup_py.py
new file mode 100644
index 000000000..85a037dc5
--- /dev/null
+++ b/docs/examples/userguide/parallelism/setup_py.py
@@ -0,0 +1,16 @@
+from setuptools import Extension, setup
+from Cython.Build import cythonize
+
+ext_modules = [
+ Extension(
+ "hello",
+ ["hello.py"],
+ extra_compile_args=['-fopenmp'],
+ extra_link_args=['-fopenmp'],
+ )
+]
+
+setup(
+ name='hello-parallel-world',
+ ext_modules=cythonize(ext_modules),
+)
diff --git a/docs/examples/userguide/parallelism/setup.py b/docs/examples/userguide/parallelism/setup_pyx.py
index fe6d0a64c..fe6d0a64c 100644
--- a/docs/examples/userguide/parallelism/setup.py
+++ b/docs/examples/userguide/parallelism/setup_pyx.py
diff --git a/docs/examples/userguide/parallelism/simple_sum.py b/docs/examples/userguide/parallelism/simple_sum.py
new file mode 100644
index 000000000..f952a8556
--- /dev/null
+++ b/docs/examples/userguide/parallelism/simple_sum.py
@@ -0,0 +1,10 @@
+from cython.parallel import prange
+
+i = cython.declare(cython.int)
+n = cython.declare(cython.int, 30)
+sum = cython.declare(cython.int, 0)
+
+for i in prange(n, nogil=True):
+ sum += i
+
+print(sum)
diff --git a/docs/examples/userguide/sharing_declarations/landscaping.py b/docs/examples/userguide/sharing_declarations/landscaping.py
new file mode 100644
index 000000000..2d2c4b5b7
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/landscaping.py
@@ -0,0 +1,7 @@
+from cython.cimports.shrubbing import Shrubbery
+import shrubbing
+
+def main():
+ sh: Shrubbery
+ sh = shrubbing.standard_shrubbery()
+ print("Shrubbery size is", sh.width, 'x', sh.length)
diff --git a/docs/examples/userguide/sharing_declarations/lunch.py b/docs/examples/userguide/sharing_declarations/lunch.py
new file mode 100644
index 000000000..df56913eb
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/lunch.py
@@ -0,0 +1,5 @@
+import cython
+from cython.cimports.c_lunch import eject_tomato as c_eject_tomato
+
+def eject_tomato(speed: cython.float):
+ c_eject_tomato(speed)
diff --git a/docs/examples/userguide/sharing_declarations/lunch.pyx b/docs/examples/userguide/sharing_declarations/lunch.pyx
index 8b0911510..fea5e4c87 100644
--- a/docs/examples/userguide/sharing_declarations/lunch.pyx
+++ b/docs/examples/userguide/sharing_declarations/lunch.pyx
@@ -1,3 +1,4 @@
+
cimport c_lunch
def eject_tomato(float speed):
diff --git a/docs/examples/userguide/sharing_declarations/restaurant.py b/docs/examples/userguide/sharing_declarations/restaurant.py
new file mode 100644
index 000000000..b4bdb2eba
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/restaurant.py
@@ -0,0 +1,12 @@
+import cython
+from cython.cimports.dishes import spamdish, sausage
+
+@cython.cfunc
+def prepare(d: cython.pointer(spamdish)) -> cython.void:
+ d.oz_of_spam = 42
+ d.filler = sausage
+
+def serve():
+ d: spamdish
+ prepare(cython.address(d))
+ print(f'{d.oz_of_spam} oz spam, filler no. {d.filler}')
diff --git a/docs/examples/userguide/sharing_declarations/restaurant.pyx b/docs/examples/userguide/sharing_declarations/restaurant.pyx
index 3257c681b..f556646dc 100644
--- a/docs/examples/userguide/sharing_declarations/restaurant.pyx
+++ b/docs/examples/userguide/sharing_declarations/restaurant.pyx
@@ -1,4 +1,4 @@
-from __future__ import print_function
+
cimport dishes
from dishes cimport spamdish
diff --git a/docs/examples/userguide/sharing_declarations/setup_py.py b/docs/examples/userguide/sharing_declarations/setup_py.py
new file mode 100644
index 000000000..45ded0ff4
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/setup_py.py
@@ -0,0 +1,4 @@
+from setuptools import setup
+from Cython.Build import cythonize
+
+setup(ext_modules=cythonize(["landscaping.py", "shrubbing.py"]))
diff --git a/docs/examples/userguide/sharing_declarations/setup.py b/docs/examples/userguide/sharing_declarations/setup_pyx.py
index 505b53e9d..505b53e9d 100644
--- a/docs/examples/userguide/sharing_declarations/setup.py
+++ b/docs/examples/userguide/sharing_declarations/setup_pyx.py
diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.py b/docs/examples/userguide/sharing_declarations/shrubbing.py
new file mode 100644
index 000000000..27e20d631
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/shrubbing.py
@@ -0,0 +1,10 @@
+import cython
+
+@cython.cclass
+class Shrubbery:
+ def __cinit__(self, w: cython.int, l: cython.int):
+ self.width = w
+ self.length = l
+
+def standard_shrubbery():
+ return Shrubbery(3, 7)
diff --git a/docs/examples/userguide/sharing_declarations/shrubbing.pyx b/docs/examples/userguide/sharing_declarations/shrubbing.pyx
index bb97e7e77..91235e5ec 100644
--- a/docs/examples/userguide/sharing_declarations/shrubbing.pyx
+++ b/docs/examples/userguide/sharing_declarations/shrubbing.pyx
@@ -1,5 +1,8 @@
+
+
+
cdef class Shrubbery:
- def __cinit__(self, int w, int l):
+ def __init__(self, int w, int l):
self.width = w
self.length = l
diff --git a/docs/examples/userguide/sharing_declarations/spammery.py b/docs/examples/userguide/sharing_declarations/spammery.py
new file mode 100644
index 000000000..88554be4a
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/spammery.py
@@ -0,0 +1,10 @@
+import cython
+from cython.cimports.volume import cube
+
+def menu(description, size):
+ print(description, ":", cube(size),
+ "cubic metres of spam")
+
+menu("Entree", 1)
+menu("Main course", 3)
+menu("Dessert", 2)
diff --git a/docs/examples/userguide/sharing_declarations/spammery.pyx b/docs/examples/userguide/sharing_declarations/spammery.pyx
index 16cbda06e..da11e737e 100644
--- a/docs/examples/userguide/sharing_declarations/spammery.pyx
+++ b/docs/examples/userguide/sharing_declarations/spammery.pyx
@@ -1,5 +1,4 @@
-from __future__ import print_function
-
+
from volume cimport cube
def menu(description, size):
diff --git a/docs/examples/userguide/sharing_declarations/volume.py b/docs/examples/userguide/sharing_declarations/volume.py
new file mode 100644
index 000000000..1f6ff9c72
--- /dev/null
+++ b/docs/examples/userguide/sharing_declarations/volume.py
@@ -0,0 +1,2 @@
+def cube(x):
+ return x * x * x
diff --git a/docs/examples/userguide/special_methods/total_ordering.py b/docs/examples/userguide/special_methods/total_ordering.py
new file mode 100644
index 000000000..7d164d6df
--- /dev/null
+++ b/docs/examples/userguide/special_methods/total_ordering.py
@@ -0,0 +1,13 @@
+import cython
+@cython.total_ordering
+@cython.cclass
+class ExtGe:
+ x: cython.int
+
+ def __ge__(self, other):
+ if not isinstance(other, ExtGe):
+ return NotImplemented
+ return self.x >= cython.cast(ExtGe, other).x
+
+ def __eq__(self, other):
+ return isinstance(other, ExtGe) and self.x == cython.cast(ExtGe, other).x
diff --git a/docs/examples/userguide/special_methods/total_ordering.pyx b/docs/examples/userguide/special_methods/total_ordering.pyx
new file mode 100644
index 000000000..06d2ccef7
--- /dev/null
+++ b/docs/examples/userguide/special_methods/total_ordering.pyx
@@ -0,0 +1,13 @@
+import cython
+
+@cython.total_ordering
+cdef class ExtGe:
+ cdef int x
+
+ def __ge__(self, other):
+ if not isinstance(other, ExtGe):
+ return NotImplemented
+ return self.x >= (<ExtGe>other).x
+
+ def __eq__(self, other):
+ return isinstance(other, ExtGe) and self.x == (<ExtGe>other).x
diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx
index e7c4423ef..d8eec16ef 100644
--- a/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx
+++ b/docs/examples/userguide/wrapping_CPlusPlus/rect.pyx
@@ -8,7 +8,7 @@ from Rectangle cimport Rectangle
cdef class PyRectangle:
cdef Rectangle c_rect # Hold a C++ instance which we're wrapping
- def __cinit__(self, int x0, int y0, int x1, int y1):
+ def __init__(self, int x0, int y0, int x1, int y1):
self.c_rect = Rectangle(x0, y0, x1, y1)
def get_area(self):
diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx
index 0c48689e7..ec4b34ab4 100644
--- a/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx
+++ b/docs/examples/userguide/wrapping_CPlusPlus/rect_ptr.pyx
@@ -5,8 +5,14 @@ from Rectangle cimport Rectangle
cdef class PyRectangle:
cdef Rectangle*c_rect # hold a pointer to the C++ instance which we're wrapping
- def __cinit__(self, int x0, int y0, int x1, int y1):
- self.c_rect = new Rectangle(x0, y0, x1, y1)
+ def __cinit__(self):
+ self.c_rect = new Rectangle()
+
+ def __init__(self, int x0, int y0, int x1, int y1):
+ self.c_rect.x0 = x0
+ self.c_rect.y0 = y0
+ self.c_rect.x1 = x1
+ self.c_rect.y1 = y1
def __dealloc__(self):
del self.c_rect
diff --git a/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx b/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx
index 1bac30dec..441292ace 100644
--- a/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx
+++ b/docs/examples/userguide/wrapping_CPlusPlus/rect_with_attributes.pyx
@@ -5,7 +5,7 @@ from Rectangle cimport Rectangle
cdef class PyRectangle:
cdef Rectangle c_rect
- def __cinit__(self, int x0, int y0, int x1, int y1):
+ def __init__(self, int x0, int y0, int x1, int y1):
self.c_rect = Rectangle(x0, y0, x1, y1)
def get_area(self):
diff --git a/docs/src/quickstart/build.rst b/docs/src/quickstart/build.rst
index 5d9e8a307..3cbcfa087 100644
--- a/docs/src/quickstart/build.rst
+++ b/docs/src/quickstart/build.rst
@@ -18,6 +18,10 @@ one may want to read more about
There are several ways to build Cython code:
- Write a setuptools ``setup.py``. This is the normal and recommended way.
+ - Run the ``cythonize`` command-line utility. This is a good approach for
+ compiling a single Cython source file directly to an extension.
+ A source file can be built "in place" (so that the extension module is created
+ next to the source file, ready to be imported) with ``cythonize -i filename.pyx``.
- Use :ref:`Pyximport<pyximport>`, importing Cython ``.pyx`` files as if they
were ``.py`` files (using setuptools to compile and build in the background).
This method is easier than writing a ``setup.py``, but is not very flexible.
diff --git a/docs/src/quickstart/install.rst b/docs/src/quickstart/install.rst
index 8b5f4c350..979d0f178 100644
--- a/docs/src/quickstart/install.rst
+++ b/docs/src/quickstart/install.rst
@@ -15,8 +15,10 @@ according to the system used:
- **Linux** The GNU C Compiler (gcc) is usually present, or easily
available through the package system. On Ubuntu or Debian, for
- instance, the command ``sudo apt-get install build-essential`` will
- fetch everything you need.
+ instance, it is part of the ``build-essential`` package. Next to a
+ C compiler, Cython requires the Python header files. On Ubuntu or
+ Debian, the command ``sudo apt-get install build-essential python3-dev``
+ will fetch everything you need.
- **Mac OS X** To retrieve gcc, one option is to install Apple's
XCode, which can be retrieved from the Mac OS X's install DVDs or
@@ -27,7 +29,11 @@ according to the system used:
built with. This is usually a specific version of Microsoft Visual
C/C++ (MSVC) - see https://wiki.python.org/moin/WindowsCompilers.
MSVC is the only compiler that Cython is currently tested with on
- Windows. A possible alternative is the open source MinGW (a
+ Windows. If you're having difficulty making setuptools detect
+ MSVC then `PyMSVC <https://github.com/kdschlosser/python_msvc>`_
+ aims to solve this.
+
+ A possible alternative is the open source MinGW (a
Windows distribution of gcc). See the appendix for instructions for
setting up MinGW manually. Enthought Canopy and Python(x,y) bundle
MinGW, but some of the configuration steps in the appendix might
diff --git a/docs/src/tutorial/clibraries.rst b/docs/src/tutorial/clibraries.rst
index ddc02f443..3542dbe8e 100644
--- a/docs/src/tutorial/clibraries.rst
+++ b/docs/src/tutorial/clibraries.rst
@@ -125,9 +125,6 @@ Here is a first start for the Queue class:
.. literalinclude:: ../../examples/tutorial/clibraries/queue.py
:caption: queue.py
- .. note:: Currently, Cython contains a bug not allowing using
- annotations with types containing pointers (GitHub issue :issue:`4293`).
-
.. group-tab:: Cython
.. literalinclude:: ../../examples/tutorial/clibraries/queue.pyx
@@ -584,7 +581,6 @@ and check if the queue really is empty or not:
.. code-block:: python
@cython.cfunc
- @cython.exceptval(-1, check=True)
def peek(self) -> cython.int:
value: cython.int = cython.cast(cython.Py_ssize_t, cqueue.queue_peek_head(self._c_queue))
if value == 0:
@@ -598,7 +594,7 @@ and check if the queue really is empty or not:
.. code-block:: cython
- cdef int peek(self) except? -1:
+ cdef int peek(self):
cdef int value = <Py_ssize_t>cqueue.queue_peek_head(self._c_queue)
if value == 0:
# this may mean that the queue is empty, or
@@ -611,39 +607,27 @@ Note how we have effectively created a fast path through the method in
the hopefully common cases that the return value is not ``0``. Only
that specific case needs an additional check if the queue is empty.
-The ``except? -1`` or ``@cython.exceptval(-1, check=True)`` declaration
-in the method signature falls into the
-same category. If the function was a Python function returning a
+If the ``peek`` function was a Python function returning a
Python object value, CPython would simply return ``NULL`` internally
instead of a Python object to indicate an exception, which would
immediately be propagated by the surrounding code. The problem is
that the return type is ``int`` and any ``int`` value is a valid queue
item value, so there is no way to explicitly signal an error to the
-calling code. In fact, without such a declaration, there is no
-obvious way for Cython to know what to return on exceptions and for
-calling code to even know that this method *may* exit with an
-exception.
+calling code.
The only way calling code can deal with this situation is to call
``PyErr_Occurred()`` when returning from a function to check if an
exception was raised, and if so, propagate the exception. This
-obviously has a performance penalty. Cython therefore allows you to
-declare which value it should implicitly return in the case of an
+obviously has a performance penalty. Cython therefore uses a dedicated value
+that it implicitly returns in the case of an
exception, so that the surrounding code only needs to check for an
exception when receiving this exact value.
-We chose to use ``-1`` as the exception return value as we expect it
-to be an unlikely value to be put into the queue. The question mark
-in the ``except? -1`` declaration and ``check=True`` in ``@cython.exceptval``
-indicates that the return value is
-ambiguous (there *may* be a ``-1`` value in the queue, after all) and
-that an additional exception check using ``PyErr_Occurred()`` is
-needed in calling code. Without it, Cython code that calls this
-method and receives the exception return value would silently (and
-sometimes incorrectly) assume that an exception has been raised. In
-any case, all other return values will be passed through almost
+By default, the value ``-1`` is used as the exception return value.
+All other return values will be passed through almost
without a penalty, thus again creating a fast path for 'normal'
-values.
+values. See :ref:`error_return_values` for more details.
+
Now that the ``peek()`` method is implemented, the ``pop()`` method
also needs adaptation. Since it removes a value from the queue,
@@ -657,7 +641,6 @@ removal. Instead, we must test it on entry:
.. code-block:: python
@cython.cfunc
- @cython.exceptval(-1, check=True)
def pop(self) -> cython.int:
if cqueue.queue_is_empty(self._c_queue):
raise IndexError("Queue is empty")
@@ -667,7 +650,7 @@ removal. Instead, we must test it on entry:
.. code-block:: cython
- cdef int pop(self) except? -1:
+ cdef int pop(self):
if cqueue.queue_is_empty(self._c_queue):
raise IndexError("Queue is empty")
return <Py_ssize_t>cqueue.queue_pop_head(self._c_queue)
diff --git a/docs/src/tutorial/cython_tutorial.rst b/docs/src/tutorial/cython_tutorial.rst
index 647ec62b2..e3ab46005 100644
--- a/docs/src/tutorial/cython_tutorial.rst
+++ b/docs/src/tutorial/cython_tutorial.rst
@@ -390,13 +390,13 @@ Now we can ensure that those two programs output the same values::
It's possible to compare the speed now::
- python -m timeit -s 'from primes_python import primes' 'primes(1000)'
+ python -m timeit -s "from primes_python import primes" "primes(1000)"
10 loops, best of 3: 23 msec per loop
- python -m timeit -s 'from primes_python_compiled import primes' 'primes(1000)'
+ python -m timeit -s "from primes_python_compiled import primes" "primes(1000)"
100 loops, best of 3: 11.9 msec per loop
- python -m timeit -s 'from primes import primes' 'primes(1000)'
+ python -m timeit -s "from primes import primes" "primes(1000)"
1000 loops, best of 3: 1.65 msec per loop
The cythonize version of ``primes_python`` is 2 times faster than the Python one,
diff --git a/docs/src/tutorial/embedding.rst b/docs/src/tutorial/embedding.rst
index 3f6325428..819506cde 100644
--- a/docs/src/tutorial/embedding.rst
+++ b/docs/src/tutorial/embedding.rst
@@ -75,3 +75,10 @@ option. Or use the
script to embed multiple modules. See the
`embedding demo program <https://github.com/cython/cython/tree/master/Demos/embed>`_
for a complete example setup.
+
+Be aware that your application will not contain any external dependencies that
+you use (including Python standard library modules) and so may not be truly portable.
+If you want to generate a portable application we recommend using a specialized
+tool (e.g. `PyInstaller <https://pyinstaller.org/en/stable/>`_
+or `cx_freeze <https://cx-freeze.readthedocs.io/en/latest/index.html>`_) to find and
+bundle these dependencies.
diff --git a/docs/src/tutorial/pure.rst b/docs/src/tutorial/pure.rst
index a536f2b31..91a381d1a 100644
--- a/docs/src/tutorial/pure.rst
+++ b/docs/src/tutorial/pure.rst
@@ -29,6 +29,7 @@ In pure mode, you are more or less restricted to code that can be expressed
beyond that can only be done in .pyx files with extended language syntax,
because it depends on features of the Cython compiler.
+.. _augmenting_pxd:
Augmenting .pxd
---------------
@@ -249,6 +250,8 @@ releasing or acquiring the GIL. The condition must be constant (at compile time)
A common use case for conditionally acquiring and releasing the GIL are fused types
that allow different GIL handling depending on the specific type (see :ref:`gil_conditional`).
+.. py:module:: cython.cimports
+
cimports
^^^^^^^^
@@ -344,8 +347,7 @@ PEP-484 type annotations
Python `type hints <https://www.python.org/dev/peps/pep-0484>`_
can be used to declare argument types, as shown in the
-following example. To avoid conflicts with other kinds of annotation
-usages, this can be disabled with the directive ``annotation_typing=False``.
+following example:
.. literalinclude:: ../../examples/tutorial/pure/annotations.py
@@ -375,6 +377,18 @@ declare types of variables in a Python 3.6 compatible way as follows:
There is currently no way to express the visibility of object attributes.
+Disabling annotations
+^^^^^^^^^^^^^^^^^^^^^
+
+To avoid conflicts with other kinds of annotation
+usages, Cython's use of annotations to specify types can be disabled with the
+``annotation_typing`` :ref:`compiler directive<compiler-directives>`. From Cython 3
+you can use this as a decorator or a with statement, as shown in the following example:
+
+.. literalinclude:: ../../examples/tutorial/pure/disabled_annotations.py
+
+
+
``typing`` Module
^^^^^^^^^^^^^^^^^
diff --git a/docs/src/two-syntax-variants-used b/docs/src/two-syntax-variants-used
index af583a0a9..c5cd02cb1 100644
--- a/docs/src/two-syntax-variants-used
+++ b/docs/src/two-syntax-variants-used
@@ -16,3 +16,7 @@
.. code-block:: python
import cython
+
+ If you use the pure Python syntax we strongly recommend you use a recent
+ Cython 3 release, since significant improvements have been made here
+ compared to the 0.29.x releases.
diff --git a/docs/src/userguide/buffer.rst b/docs/src/userguide/buffer.rst
index 08661a184..3687cf2fd 100644
--- a/docs/src/userguide/buffer.rst
+++ b/docs/src/userguide/buffer.rst
@@ -3,6 +3,10 @@
Implementing the buffer protocol
================================
+.. include::
+ ../two-syntax-variants-used
+
+
Cython objects can expose memory buffers to Python code
by implementing the "buffer protocol".
This chapter shows how to implement the protocol
@@ -16,7 +20,15 @@ The following Cython/C++ code implements a matrix of floats,
where the number of columns is fixed at construction time
but rows can be added dynamically.
-.. literalinclude:: ../../examples/userguide/buffer/matrix.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/buffer/matrix.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/buffer/matrix.pyx
There are no methods to do anything productive with the matrices' contents.
We could implement custom ``__getitem__``, ``__setitem__``, etc. for this,
@@ -27,7 +39,15 @@ Implementing the buffer protocol requires adding two methods,
``__getbuffer__`` and ``__releasebuffer__``,
which Cython handles specially.
-.. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/buffer/matrix_with_buffer.pyx
The method ``Matrix.__getbuffer__`` fills a descriptor structure,
called a ``Py_buffer``, that is defined by the Python C-API.
@@ -75,7 +95,15 @@ This is where ``__releasebuffer__`` comes in.
We can add a reference count to each matrix,
and lock it for mutation whenever a view exists.
-.. literalinclude:: ../../examples/userguide/buffer/view_count.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/buffer/view_count.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/buffer/view_count.pyx
Flags
-----
diff --git a/docs/src/userguide/early_binding_for_speed.rst b/docs/src/userguide/early_binding_for_speed.rst
index 9bb8cf724..4a442d973 100644
--- a/docs/src/userguide/early_binding_for_speed.rst
+++ b/docs/src/userguide/early_binding_for_speed.rst
@@ -6,6 +6,9 @@
Early Binding for Speed
**************************
+.. include::
+ ../two-syntax-variants-used
+
As a dynamic language, Python encourages a programming style of considering
classes and objects in terms of their methods and attributes, more than where
they fit into the class hierarchy.
@@ -22,7 +25,15 @@ use of 'early binding' programming techniques.
For example, consider the following (silly) code example:
-.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle.pyx
In the :func:`rectArea` method, the call to :meth:`rect.area` and the
:meth:`.area` method contain a lot of Python overhead.
@@ -30,7 +41,15 @@ In the :func:`rectArea` method, the call to :meth:`rect.area` and the
However, in Cython, it is possible to eliminate a lot of this overhead in cases
where calls occur within Cython code. For example:
-.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cdef.pyx
Here, in the Rectangle extension class, we have defined two different area
calculation methods, the efficient :meth:`_area` C method, and the
@@ -46,10 +65,18 @@ dual-access methods - methods that can be efficiently called at C level, but
can also be accessed from pure Python code at the cost of the Python access
overheads. Consider this code:
-.. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/early_binding_for_speed/rectangle_cpdef.pyx
-Here, we just have a single area method, declared as :keyword:`cpdef` to make it
-efficiently callable as a C function, but still accessible from pure Python
+Here, we just have a single area method, declared as :keyword:`cpdef` or with ``@ccall`` decorator
+to make it efficiently callable as a C function, but still accessible from pure Python
(or late-binding Cython) code.
If within Cython code, we have a variable already 'early-bound' (ie, declared
diff --git a/docs/src/userguide/extension_types.rst b/docs/src/userguide/extension_types.rst
index 678ddf5c8..42d77c378 100644
--- a/docs/src/userguide/extension_types.rst
+++ b/docs/src/userguide/extension_types.rst
@@ -9,20 +9,56 @@ Extension Types
Introduction
==============
+.. include::
+ ../two-syntax-variants-used
+
As well as creating normal user-defined classes with the Python class
statement, Cython also lets you create new built-in Python types, known as
:term:`extension types<Extension type>`. You define an extension type using the :keyword:`cdef` class
-statement. Here's an example:
+statement or decorating the class with the ``@cclass`` decorator. Here's an example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py
-.. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx
As you can see, a Cython extension type definition looks a lot like a Python
-class definition. Within it, you use the def statement to define methods that
+class definition. Within it, you use the :keyword:`def` statement to define methods that
can be called from Python code. You can even define many of the special
methods such as :meth:`__init__` as you would in Python.
-The main difference is that you can use the :keyword:`cdef` statement to define
-attributes. The attributes may be Python objects (either generic or of a
+The main difference is that you can define attributes using
+
+* the :keyword:`cdef` statement,
+* the :func:`cython.declare()` function or
+* the annotation of an attribute name.
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cclass
+ class Shrubbery:
+ width = declare(cython.int)
+ height: cython.int
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef class Shrubbery:
+
+ cdef int width
+ cdef int height
+
+The attributes may be Python objects (either generic or of a
particular extension type), or they may be of any C data type. So you can use
extension types to wrap arbitrary C data structures and provide a Python-like
interface to them.
@@ -50,7 +86,15 @@ not Python access, which means that they are not accessible from Python code.
To make them accessible from Python code, you need to declare them as
:keyword:`public` or :keyword:`readonly`. For example:
-.. literalinclude:: ../../examples/userguide/extension_types/python_access.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/python_access.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/python_access.pyx
makes the width and height attributes readable and writable from Python code,
and the depth attribute readable but not writable.
@@ -74,15 +118,32 @@ Dynamic Attributes
It is not possible to add attributes to an extension type at runtime by default.
You have two ways of avoiding this limitation, both add an overhead when
-a method is called from Python code. Especially when calling ``cpdef`` methods.
+a method is called from Python code. Especially when calling hybrid methods declared
+with :keyword:`cpdef` in .pyx files or with the ``@ccall`` decorator.
+
+The first approach is to create a Python subclass:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.pyx
-The first approach is to create a Python subclass.:
+Declaring a ``__dict__`` attribute is the second way of enabling dynamic attributes:
-.. literalinclude:: ../../examples/userguide/extension_types/extendable_animal.pyx
+.. tabs::
-Declaring a ``__dict__`` attribute is the second way of enabling dynamic attributes.:
+ .. group-tab:: Pure Python
-.. literalinclude:: ../../examples/userguide/extension_types/dict_animal.pyx
+ .. literalinclude:: ../../examples/userguide/extension_types/dict_animal.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/dict_animal.pyx
Type declarations
===================
@@ -93,10 +154,24 @@ generic Python object. It knows this already in the case of the ``self``
parameter of the methods of that type, but in other cases you will have to use
a type declaration.
-For example, in the following function::
+For example, in the following function:
- cdef widen_shrubbery(sh, extra_width): # BAD
- sh.width = sh.width + extra_width
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cfunc
+ def widen_shrubbery(sh, extra_width): # BAD
+ sh.width = sh.width + extra_width
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef widen_shrubbery(sh, extra_width): # BAD
+ sh.width = sh.width + extra_width
because the ``sh`` parameter hasn't been given a type, the width attribute
will be accessed by a Python attribute lookup. If the attribute has been
@@ -107,18 +182,35 @@ will be very inefficient. If the attribute is private, it will not work at all
The solution is to declare ``sh`` as being of type :class:`Shrubbery`, as
follows:
-.. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/widen_shrubbery.pyx
Now the Cython compiler knows that ``sh`` has a C attribute called
:attr:`width` and will generate code to access it directly and efficiently.
The same consideration applies to local variables, for example:
-.. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery_2.pyx
.. note::
- We here ``cimport`` the class :class:`Shrubbery`, and this is necessary
- to declare the type at compile time. To be able to ``cimport`` an extension type,
+ Here, we *cimport* the class :class:`Shrubbery` (using the :keyword:`cimport` statement
+ or importing from special ``cython.cimports`` package), and this is necessary
+ to declare the type at compile time. To be able to cimport an extension type,
we split the class definition into two parts, one in a definition file and
the other in the corresponding implementation file. You should read
:ref:`sharing_extension_types` to learn to do that.
@@ -128,24 +220,61 @@ Type Testing and Casting
------------------------
Suppose I have a method :meth:`quest` which returns an object of type :class:`Shrubbery`.
-To access it's width I could write::
+To access its width I could write:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
- cdef Shrubbery sh = quest()
- print(sh.width)
+ .. code-block:: python
+
+ sh: Shrubbery = quest()
+ print(sh.width)
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef Shrubbery sh = quest()
+ print(sh.width)
which requires the use of a local variable and performs a type test on assignment.
If you *know* the return value of :meth:`quest` will be of type :class:`Shrubbery`
-you can use a cast to write::
+you can use a cast to write:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
- print( (<Shrubbery>quest()).width )
+ .. code-block:: python
+
+ print( cython.cast(Shrubbery, quest()).width )
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ print( (<Shrubbery>quest()).width )
This may be dangerous if :meth:`quest()` is not actually a :class:`Shrubbery`, as it
will try to access width as a C struct member which may not exist. At the C level,
rather than raising an :class:`AttributeError`, either an nonsensical result will be
returned (interpreting whatever data is at that address as an int) or a segfault
-may result from trying to access invalid memory. Instead, one can write::
+may result from trying to access invalid memory. Instead, one can write:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
- print( (<Shrubbery?>quest()).width )
+ print( cython.cast(Shrubbery, quest(), typecheck=True).width )
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ print( (<Shrubbery?>quest()).width )
which performs a type check (possibly raising a :class:`TypeError`) before making the
cast and allowing the code to proceed.
@@ -155,14 +284,18 @@ For known builtin or extension types, Cython translates these into a
fast and safe type check that ignores changes to
the object's ``__class__`` attribute etc., so that after a successful
:meth:`isinstance` test, code can rely on the expected C structure of the
-extension type and its :keyword:`cdef` attributes and methods.
+extension type and its C-level attributes (stored in the object’s C struct) and
+:keyword:`cdef`/``@cfunc`` methods.
.. _extension_types_and_none:
Extension types and None
=========================
-When you declare a parameter or C variable as being of an extension type,
+Cython handles ``None`` values differently in C-like type declarations and when Python annotations are used.
+
+In :keyword:`cdef` declarations and C-like function argument declarations (``func(list x)``),
+when you declare an argument or C variable as having an extension or Python builtin type,
Cython will allow it to take on the value ``None`` as well as values of its
declared type. This is analogous to the way a C pointer can take on the value
``NULL``, and you need to exercise the same caution because of it. There is no
@@ -172,24 +305,24 @@ of an extension type (as in the widen_shrubbery function above), it's up to
you to make sure the reference you're using is not ``None`` -- in the
interests of efficiency, Cython does not check this.
-You need to be particularly careful when exposing Python functions which take
-extension types as arguments. If we wanted to make :func:`widen_shrubbery` a
-Python function, for example, if we simply wrote::
+With the C-like declaration syntax, you need to be particularly careful when
+exposing Python functions which take extension types as arguments::
def widen_shrubbery(Shrubbery sh, extra_width): # This is
sh.width = sh.width + extra_width # dangerous!
-then users of our module could crash it by passing ``None`` for the ``sh``
+The users of our module could crash it by passing ``None`` for the ``sh``
parameter.
-One way to fix this would be::
+As in Python, whenever it is unclear whether a variable can be ``None``,
+but the code requires a non-None value, an explicit check can help::
def widen_shrubbery(Shrubbery sh, extra_width):
if sh is None:
raise TypeError
sh.width = sh.width + extra_width
-but since this is anticipated to be such a frequent requirement, Cython
+but since this is anticipated to be such a frequent requirement, Cython language
provides a more convenient way. Parameters of a Python function declared as an
extension type can have a ``not None`` clause::
@@ -199,18 +332,41 @@ extension type can have a ``not None`` clause::
Now the function will automatically check that ``sh`` is ``not None`` along
with checking that it has the right type.
+When annotations are used, the behaviour follows the Python typing semantics of
+`PEP-484 <https://www.python.org/dev/peps/pep-0484/>`_ instead.
+The value ``None`` is not allowed when a variable is annotated only with its plain type::
+
+ def widen_shrubbery(sh: Shrubbery, extra_width): # TypeError is raised
+ sh.width = sh.width + extra_width # when sh is None
+
+To also allow ``None``, ``typing.Optional[ ]`` must be used explicitly.
+For function arguments, this is also automatically allowed when they have a
+default argument of `None``, e.g. ``func(x: list = None)`` does not require ``typing.Optional``::
+
+ import typing
+ def widen_shrubbery(sh: typing.Optional[Shrubbery], extra_width):
+ if sh is None:
+ # We want to raise a custom exception in case of a None value.
+ raise ValueError
+ sh.width = sh.width + extra_width
+
+The upside of using annotations here is that they are safe by default because
+you need to explicitly allow ``None`` values for them.
+
+
.. note::
- ``not None`` clause can only be used in Python functions (defined with
- :keyword:`def`) and not C functions (defined with :keyword:`cdef`). If
- you need to check whether a parameter to a C function is None, you will
+ The ``not None`` and ``typing.Optional`` can only be used in Python functions (defined with
+ :keyword:`def` and without ``@cython.cfunc`` decorator) and not C functions
+ (defined with :keyword:`cdef` or decorated using ``@cython.cfunc``). If
+ you need to check whether a parameter to a C function is ``None``, you will
need to do it yourself.
.. note::
Some more things:
- * The self parameter of a method of an extension type is guaranteed never to
+ * The ``self`` parameter of a method of an extension type is guaranteed never to
be ``None``.
* When comparing a value with ``None``, keep in mind that, if ``x`` is a Python
object, ``x is None`` and ``x is not None`` are very efficient because they
@@ -232,23 +388,49 @@ extension types.
Properties
============
-You can declare properties in an extension class using the same syntax as in ordinary Python code::
+You can declare properties in an extension class using the same syntax as in ordinary Python code:
- cdef class Spam:
+.. tabs::
- @property
- def cheese(self):
- # This is called when the property is read.
- ...
+ .. group-tab:: Pure Python
- @cheese.setter
- def cheese(self, value):
- # This is called when the property is written.
- ...
+ .. code-block:: python
+
+ @cython.cclass
+ class Spam:
+ @property
+ def cheese(self):
+ # This is called when the property is read.
+ ...
+
+ @cheese.setter
+ def cheese(self, value):
+ # This is called when the property is written.
+ ...
- @cheese.deleter
- def cheese(self):
- # This is called when the property is deleted.
+ @cheese.deleter
+ def cheese(self):
+ # This is called when the property is deleted.
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef class Spam:
+
+ @property
+ def cheese(self):
+ # This is called when the property is read.
+ ...
+
+ @cheese.setter
+ def cheese(self, value):
+ # This is called when the property is written.
+ ...
+
+ @cheese.deleter
+ def cheese(self):
+ # This is called when the property is deleted.
There is also a special (deprecated) legacy syntax for defining properties in an extension class::
@@ -277,50 +459,83 @@ corresponding operation is attempted.
Here's a complete example. It defines a property which adds to a list each
time it is written to, returns the list when it is read, and empties the list
-when it is deleted.::
+when it is deleted:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/cheesy.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/cheesy.pyx
+
+.. code-block:: text
- # cheesy.pyx
- cdef class CheeseShop:
+ # Test output
+ We don't have: []
+ We don't have: ['camembert']
+ We don't have: ['camembert', 'cheddar']
+ We don't have: []
- cdef object cheeses
- def __cinit__(self):
- self.cheeses = []
+C methods
+=========
- @property
- def cheese(self):
- return "We don't have: %s" % self.cheeses
+Extension types can have C methods as well as Python methods. Like C
+functions, C methods are declared using
- @cheese.setter
- def cheese(self, value):
- self.cheeses.append(value)
+* :keyword:`cdef` instead of :keyword:`def` or ``@cfunc`` decorator for *C methods*, or
+* :keyword:`cpdef` instead of :keyword:`def` or ``@ccall`` decorator for *hybrid methods*.
- @cheese.deleter
- def cheese(self):
- del self.cheeses[:]
+C methods are "virtual", and may be overridden in derived extension types.
+In addition, :keyword:`cpdef`/``@ccall`` methods can even be overridden by Python
+methods when called as C method. This adds a little to their calling overhead
+compared to a :keyword:`cdef`/``@cfunc`` method:
- # Test input
- from cheesy import CheeseShop
+.. tabs::
- shop = CheeseShop()
- print(shop.cheese)
+ .. group-tab:: Pure Python
- shop.cheese = "camembert"
- print(shop.cheese)
+ .. literalinclude:: ../../examples/userguide/extension_types/pets.py
- shop.cheese = "cheddar"
- print(shop.cheese)
+ .. group-tab:: Cython
- del shop.cheese
- print(shop.cheese)
+ .. literalinclude:: ../../examples/userguide/extension_types/pets.pyx
.. code-block:: text
- # Test output
- We don't have: []
- We don't have: ['camembert']
- We don't have: ['camembert', 'cheddar']
- We don't have: []
+ # Output
+ p1:
+ This parrot is resting.
+ p2:
+ This parrot is resting.
+ Lovely plumage!
+
+The above example also illustrates that a C method can call an inherited C
+method using the usual Python technique, i.e.::
+
+ Parrot.describe(self)
+
+:keyword:`cdef`/``@ccall`` methods can be declared static by using the ``@staticmethod`` decorator.
+This can be especially useful for constructing classes that take non-Python compatible types:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/owned_pointer.pyx
+
+.. note::
+
+ Cython currently does not support decorating :keyword:`cdef`/``@ccall`` methods with
+ the ``@classmethod`` decorator.
+
.. _subclassing:
@@ -328,22 +543,41 @@ Subclassing
=============
If an extension type inherits from other types, the first base class must be
-a built-in type or another extension type::
+a built-in type or another extension type:
- cdef class Parrot:
- ...
+.. tabs::
- cdef class Norwegian(Parrot):
- ...
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cclass
+ class Parrot:
+ ...
+
+ @cython.cclass
+ class Norwegian(Parrot):
+ ...
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef class Parrot:
+ ...
+
+
+ cdef class Norwegian(Parrot):
+ ...
A complete definition of the base type must be available to Cython, so if the
base type is a built-in type, it must have been previously declared as an
extern extension type. If the base type is defined in another Cython module, it
must either be declared as an extern extension type or imported using the
-:keyword:`cimport` statement.
+:keyword:`cimport` statement or importing from the special ``cython.cimports`` package.
-Multiple inheritance is supported, however the second and subsequent base
+Multiple inheritance is supported, however the second and subsequent base
classes must be an ordinary Python class (not an extension type or a built-in
type).
@@ -354,84 +588,54 @@ must be compatible).
There is a way to prevent extension types from
being subtyped in Python. This is done via the ``final`` directive,
-usually set on an extension type using a decorator::
-
- cimport cython
-
- @cython.final
- cdef class Parrot:
- def done(self): pass
+usually set on an extension type or C method using a decorator:
-Trying to create a Python subclass from this type will raise a
-:class:`TypeError` at runtime. Cython will also prevent subtyping a
-final type inside of the same module, i.e. creating an extension type
-that uses a final type as its base type will fail at compile time.
-Note, however, that this restriction does not currently propagate to
-other extension modules, so even final extension types can still be
-subtyped at the C level by foreign code.
+.. tabs::
+ .. group-tab:: Pure Python
-C methods
-=========
+ .. code-block:: python
-Extension types can have C methods as well as Python methods. Like C
-functions, C methods are declared using :keyword:`cdef` or :keyword:`cpdef` instead of
-:keyword:`def`. C methods are "virtual", and may be overridden in derived
-extension types. In addition, :keyword:`cpdef` methods can even be overridden by python
-methods when called as C method. This adds a little to their calling overhead
-compared to a :keyword:`cdef` method::
+ import cython
- # pets.pyx
- cdef class Parrot:
+ @cython.final
+ @cython.cclass
+ class Parrot:
+ def describe(self): pass
- cdef void describe(self):
- print("This parrot is resting.")
+ @cython.cclass
+ class Lizard:
- cdef class Norwegian(Parrot):
+ @cython.final
+ @cython.cfunc
+ def done(self): pass
- cdef void describe(self):
- Parrot.describe(self)
- print("Lovely plumage!")
+ .. group-tab:: Cython
+ .. code-block:: cython
- cdef Parrot p1, p2
- p1 = Parrot()
- p2 = Norwegian()
- print("p1:")
- p1.describe()
- print("p2:")
- p2.describe()
+ cimport cython
-.. code-block:: text
+ @cython.final
+ cdef class Parrot:
+ def describe(self): pass
- # Output
- p1:
- This parrot is resting.
- p2:
- This parrot is resting.
- Lovely plumage!
-The above example also illustrates that a C method can call an inherited C
-method using the usual Python technique, i.e.::
- Parrot.describe(self)
+ cdef class Lizard:
-`cdef` methods can be declared static by using the @staticmethod decorator.
-This can be especially useful for constructing classes that take non-Python
-compatible types.::
- cdef class OwnedPointer:
- cdef void* ptr
+ @cython.final
+ cdef done(self): pass
- def __dealloc__(self):
- if self.ptr is not NULL:
- free(self.ptr)
+Trying to create a Python subclass from a final type or overriding a final method will raise
+a :class:`TypeError` at runtime. Cython will also prevent subtyping a
+final type or overriding a final method inside of the same module, i.e. creating
+an extension type that uses a final type as its base type will fail at compile time.
+Note, however, that this restriction does not currently propagate to
+other extension modules, so Cython is unable to prevent final extension types
+from being subtyped at the C level by foreign code.
- @staticmethod
- cdef create(void* ptr):
- p = OwnedPointer()
- p.ptr = ptr
- return p
.. _forward_declaring_extension_types:
@@ -460,19 +664,17 @@ Fast instantiation
Cython provides two ways to speed up the instantiation of extension types.
The first one is a direct call to the ``__new__()`` special static method,
as known from Python. For an extension type ``Penguin``, you could use
-the following code::
+the following code:
- cdef class Penguin:
- cdef object food
+.. tabs::
- def __cinit__(self, food):
- self.food = food
+ .. group-tab:: Pure Python
- def __init__(self, food):
- print("eating!")
+ .. literalinclude:: ../../examples/userguide/extension_types/penguin.py
- normal_penguin = Penguin('fish')
- fast_penguin = Penguin.__new__(Penguin, 'wheat') # note: not calling __init__() !
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/penguin.pyx
Note that the path through ``__new__()`` will *not* call the type's
``__init__()`` method (again, as known from Python). Thus, in the example
@@ -480,24 +682,23 @@ above, the first instantiation will print ``eating!``, but the second will
not. This is only one of the reasons why the ``__cinit__()`` method is
safer than the normal ``__init__()`` method for initialising extension types
and bringing them into a correct and safe state.
-See section :ref:`_initialisation_methods` about the differences.
+See the :ref:`Initialisation Methods Section <initialisation_methods>` about
+the differences.
The second performance improvement applies to types that are often created
and deleted in a row, so that they can benefit from a freelist. Cython
provides the decorator ``@cython.freelist(N)`` for this, which creates a
-statically sized freelist of ``N`` instances for a given type. Example::
+statically sized freelist of ``N`` instances for a given type. Example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
- cimport cython
+ .. literalinclude:: ../../examples/userguide/extension_types/penguin2.py
- @cython.freelist(8)
- cdef class Penguin:
- cdef object food
- def __cinit__(self, food):
- self.food = food
+ .. group-tab:: Cython
- penguin = Penguin('fish 1')
- penguin = None
- penguin = Penguin('fish 2') # does not need to allocate memory!
+ .. literalinclude:: ../../examples/userguide/extension_types/penguin2.pyx
.. _existing-pointers-instantiation:
@@ -508,63 +709,17 @@ It is quite common to want to instantiate an extension class from an existing
(pointer to a) data structure, often as returned by external C/C++ functions.
As extension classes can only accept Python objects as arguments in their
-constructors, this necessitates the use of factory functions. For example, ::
-
- from libc.stdlib cimport malloc, free
-
- # Example C struct
- ctypedef struct my_c_struct:
- int a
- int b
-
-
- cdef class WrapperClass:
- """A wrapper class for a C/C++ data structure"""
- cdef my_c_struct *_ptr
- cdef bint ptr_owner
-
- def __cinit__(self):
- self.ptr_owner = False
-
- def __dealloc__(self):
- # De-allocate if not null and flag is set
- if self._ptr is not NULL and self.ptr_owner is True:
- free(self._ptr)
- self._ptr = NULL
-
- # Extension class properties
- @property
- def a(self):
- return self._ptr.a if self._ptr is not NULL else None
-
- @property
- def b(self):
- return self._ptr.b if self._ptr is not NULL else None
-
- @staticmethod
- cdef WrapperClass from_ptr(my_c_struct *_ptr, bint owner=False):
- """Factory function to create WrapperClass objects from
- given my_c_struct pointer.
-
- Setting ``owner`` flag to ``True`` causes
- the extension type to ``free`` the structure pointed to by ``_ptr``
- when the wrapper object is deallocated."""
- # Call to __new__ bypasses __init__ constructor
- cdef WrapperClass wrapper = WrapperClass.__new__(WrapperClass)
- wrapper._ptr = _ptr
- wrapper.ptr_owner = owner
- return wrapper
-
- @staticmethod
- cdef WrapperClass new_struct():
- """Factory function to create WrapperClass objects with
- newly allocated my_c_struct"""
- cdef my_c_struct *_ptr = <my_c_struct *>malloc(sizeof(my_c_struct))
- if _ptr is NULL:
- raise MemoryError
- _ptr.a = 0
- _ptr.b = 0
- return WrapperClass.from_ptr(_ptr, owner=True)
+constructors, this necessitates the use of factory functions or factory methods. For example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/wrapper_class.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/wrapper_class.pyx
To then create a ``WrapperClass`` object from an existing ``my_c_struct``
@@ -606,13 +761,30 @@ Making extension types weak-referenceable
By default, extension types do not support having weak references made to
them. You can enable weak referencing by declaring a C attribute of type
-object called :attr:`__weakref__`. For example,::
+object called :attr:`__weakref__`. For example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cclass
+ class ExplodingAnimal:
+ """This animal will self-destruct when it is
+ no longer strongly referenced."""
- cdef class ExplodingAnimal:
- """This animal will self-destruct when it is
- no longer strongly referenced."""
+ __weakref__: object
- cdef object __weakref__
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef class ExplodingAnimal:
+ """This animal will self-destruct when it is
+ no longer strongly referenced."""
+
+ cdef object __weakref__
Controlling deallocation and garbage collection in CPython
@@ -690,12 +862,28 @@ CPython invented a mechanism for this called the *trashcan*. It limits the
recursion depth of deallocations by delaying some deallocations.
By default, Cython extension types do not use the trashcan but it can be
-enabled by setting the ``trashcan`` directive to ``True``. For example::
+enabled by setting the ``trashcan`` directive to ``True``. For example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
- cimport cython
- @cython.trashcan(True)
- cdef class Object:
- cdef dict __dict__
+ import cython
+ @cython.trashcan(True)
+ @cython.cclass
+ class Object:
+ __dict__: dict
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cimport cython
+ @cython.trashcan(True)
+ cdef class Object:
+ cdef dict __dict__
Trashcan usage is inherited by subclasses
(unless explicitly disabled by ``@cython.trashcan(False)``).
@@ -719,15 +907,34 @@ have triggered a call to ``tp_clear`` to clear the object
In that case, any object references have vanished when ``__dealloc__``
is called. Now your cleanup code lost access to the objects it has to clean up.
To fix this, you can disable clearing instances of a specific class by using
-the ``no_gc_clear`` directive::
+the ``no_gc_clear`` directive:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
- @cython.no_gc_clear
- cdef class DBCursor:
- cdef DBConnection conn
- cdef DBAPI_Cursor *raw_cursor
- # ...
- def __dealloc__(self):
- DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor)
+ @cython.no_gc_clear
+ @cython.cclass
+ class DBCursor:
+ conn: DBConnection
+ raw_cursor: cython.pointer(DBAPI_Cursor)
+ # ...
+ def __dealloc__(self):
+ DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor)
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ @cython.no_gc_clear
+ cdef class DBCursor:
+ cdef DBConnection conn
+ cdef DBAPI_Cursor *raw_cursor
+ # ...
+ def __dealloc__(self):
+ DBAPI_close_cursor(self.conn.raw_conn, self.raw_cursor)
This example tries to close a cursor via a database connection when the Python
object is destroyed. The ``DBConnection`` object is kept alive by the reference
@@ -747,12 +954,29 @@ but the compiler won't be able to prove this. This would be the case if
the class can never reference itself, even indirectly.
In that case, you can manually disable cycle collection by using the
``no_gc`` directive, but beware that doing so when in fact the extension type
-can participate in cycles could cause memory leaks ::
+can participate in cycles could cause memory leaks:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
- @cython.no_gc
- cdef class UserInfo:
- cdef str name
- cdef tuple addresses
+ @cython.no_gc
+ @cython.cclass
+ class UserInfo:
+ name: str
+ addresses: tuple
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ @cython.no_gc
+ cdef class UserInfo:
+
+ cdef str name
+ cdef tuple addresses
If you can be sure addresses will contain only references to strings,
the above would be safe, and it may yield a significant speedup, depending on
@@ -785,6 +1009,13 @@ declaration makes an extension type defined in external C code available to a
Cython module. A public extension type declaration makes an extension type
defined in a Cython module available to external C code.
+.. note::
+
+ Cython currently does not support Extension types declared as extern or public
+ in Pure Python mode. This is not considered an issue since public/extern extension
+ types are most commonly declared in `.pxd` files and not in `.py` files.
+
+
.. _external_extension_types:
External extension types
@@ -801,7 +1032,7 @@ objects defined in the Python core or in a non-Cython extension module.
:ref:`sharing-declarations`.
Here is an example which will let you get at the C-level members of the
-built-in complex object.::
+built-in complex object::
from __future__ import print_function
@@ -1072,7 +1303,15 @@ can only be applied to extension types (types marked ``cdef`` or created with th
``cython.cclass`` decorator) and not to regular classes. If
you need to define special properties on a field then use ``cython.dataclasses.field``
-.. literalinclude:: ../../examples/userguide/extension_types/dataclass.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/dataclass.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/dataclass.pyx
You may use C-level types such as structs, pointers, or C++ classes.
However, you may find these types are not compatible with the auto-generated
diff --git a/docs/src/userguide/external_C_code.rst b/docs/src/userguide/external_C_code.rst
index b080ecf0e..2e977243d 100644
--- a/docs/src/userguide/external_C_code.rst
+++ b/docs/src/userguide/external_C_code.rst
@@ -471,7 +471,9 @@ For example, in the following snippet that includes :file:`grail.h`:
}
This C code can then be built together with the Cython-generated C code
-in a single program (or library).
+in a single program (or library). Be aware that this program will not include
+any external dependencies that your module uses. Therefore typically this will
+not generate a truly portable application for most cases.
In Python 3.x, calling the module init function directly should be avoided. Instead,
use the `inittab mechanism <https://docs.python.org/3/c-api/import.html#c._inittab>`_
diff --git a/docs/src/userguide/language_basics.rst b/docs/src/userguide/language_basics.rst
index 593542eae..11561e1ee 100644
--- a/docs/src/userguide/language_basics.rst
+++ b/docs/src/userguide/language_basics.rst
@@ -48,7 +48,7 @@ the use of ‘early binding’ programming techniques.
C variable and type definitions
===============================
-C variables can be declared by
+C variables can be declared by
* using the Cython specific :keyword:`cdef` statement,
* using PEP-484/526 type annotations with C data types or
@@ -128,51 +128,6 @@ the declaration in most cases:
cdef float *g = [1, 2, 3, 4]
cdef float *h = &f
-In addition to the basic types, C :keyword:`struct`, :keyword:`union` and :keyword:`enum`
-are supported:
-
-.. tabs::
-
- .. group-tab:: Pure Python
-
- .. literalinclude:: ../../examples/userguide/language_basics/struct_union_enum.py
-
- .. note:: Currently, Pure Python mode does not support enums. (GitHub issue :issue:`4252`)
-
- .. group-tab:: Cython
-
- .. literalinclude:: ../../examples/userguide/language_basics/struct_union_enum.pyx
-
- See also :ref:`struct-union-enum-styles`
-
- .. note::
-
- Structs can be declared as ``cdef packed struct``, which has
- the same effect as the C directive ``#pragma pack(1)``.
-
- Declaring an enum as ``cpdef`` will create a :pep:`435`-style Python wrapper::
-
- cpdef enum CheeseState:
- hard = 1
- soft = 2
- runny = 3
-
- There is currently no special syntax for defining a constant, but you can use
- an anonymous :keyword:`enum` declaration for this purpose, for example,::
-
- cdef enum:
- tons_of_spam = 3
-
- .. note::
- the words ``struct``, ``union`` and ``enum`` are used only when
- defining a type, not when referring to it. For example, to declare a variable
- pointing to a ``Grail`` struct, you would write::
-
- cdef Grail *gp
-
- and not::
-
- cdef struct Grail *gp # WRONG
.. note::
@@ -197,46 +152,82 @@ are supported:
ctypedef int* IntPtr
+.. _structs:
+
+Structs, Unions, Enums
+----------------------
-You can create a C function by declaring it with :keyword:`cdef` or by decorating a Python function with ``@cfunc``:
+In addition to the basic types, C :keyword:`struct`, :keyword:`union` and :keyword:`enum`
+are supported:
.. tabs::
.. group-tab:: Pure Python
- .. code-block:: python
-
- @cython.cfunc
- def eggs(l: cython.ulong, f: cython.float) -> cython.int:
- ...
+ .. literalinclude:: ../../examples/userguide/language_basics/struct.py
.. group-tab:: Cython
- .. code-block:: cython
+ .. literalinclude:: ../../examples/userguide/language_basics/struct.pyx
- cdef int eggs(unsigned long l, float f):
- ...
+Structs can be declared as ``cdef packed struct``, which has
+the same effect as the C directive ``#pragma pack(1)``::
-You can read more about them in :ref:`python_functions_vs_c_functions`.
+ cdef packed struct StructArray:
+ int spam[4]
+ signed char eggs[5]
-Classes can be declared as :ref:`extension-types`. Those will
-have a behavior very close to python classes, but are faster because they use a ``struct``
-internally to store attributes.
-They are declared with the :keyword:`cdef` keyword or the ``@cclass`` class decorator.
+.. note::
+ This declaration removes the empty
+ space between members that C automatically to ensure that they're aligned in memory
+ (see `Wikipedia article <https://en.wikipedia.org/wiki/Data_structure_alignment>`_ for more details).
+ The main use is that numpy structured arrays store their data in packed form, so a ``cdef packed struct``
+ can be :ref:`used in a memoryview<using_memoryviews>` to match that.
-Here is a simple example:
+ Pure python mode does not support packed structs.
+
+The following example shows a declaration of unions:
.. tabs::
.. group-tab:: Pure Python
- .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py
+ .. literalinclude:: ../../examples/userguide/language_basics/union.py
.. group-tab:: Cython
- .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx
+ .. literalinclude:: ../../examples/userguide/language_basics/union.pyx
-You can read more about them in :ref:`extension-types`.
+Enums are created by ``cdef enum`` statement:
+
+.. literalinclude:: ../../examples/userguide/language_basics/enum.pyx
+
+
+.. note:: Currently, Pure Python mode does not support enums. (GitHub issue :issue:`4252`)
+
+Declaring an enum as ``cpdef`` will create a :pep:`435`-style Python wrapper::
+
+ cpdef enum CheeseState:
+ hard = 1
+ soft = 2
+ runny = 3
+
+There is currently no special syntax for defining a constant, but you can use
+an anonymous :keyword:`enum` declaration for this purpose, for example,::
+
+ cdef enum:
+ tons_of_spam = 3
+
+.. note::
+ In the Cython syntax, the words ``struct``, ``union`` and ``enum`` are used only when
+ defining a type, not when referring to it. For example, to declare a variable
+ pointing to a ``Grail`` struct, you would write::
+
+ cdef Grail *gp
+
+ and not::
+
+ cdef struct Grail *gp # WRONG
.. _typing_types:
@@ -326,12 +317,30 @@ and is typically what one wants).
If you want to use these numeric Python types simply omit the
type declaration and let them be objects.
+Extension Types
+---------------
+
It is also possible to declare :ref:`extension-types` (declared with ``cdef class`` or the ``@cclass`` decorator).
-This does allow subclasses. This typing is mostly used to access
-``cdef``/``@cfunc`` methods and attributes of the extension type.
+Those will have a behaviour very close to python classes (e.g. creating subclasses),
+but access to their members is faster from Cython code. Typing a variable
+as extension type is mostly used to access ``cdef``/``@cfunc`` methods and attributes of the extension type.
The C code uses a variable which is a pointer to a structure of the
specific type, something like ``struct MyExtensionTypeObject*``.
+Here is a simple example:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/extension_types/shrubbery.pyx
+
+You can read more about them in :ref:`extension-types`.
+
Grouping multiple C declarations
--------------------------------
@@ -459,7 +468,7 @@ passed in directly using a normal C function call.
C Functions declared using :keyword:`cdef` or the ``@cfunc`` decorator with a
Python object return type, like Python functions, will return a :keyword:`None`
value when execution leaves the function body without an explicit return value. This is in
-contrast to C/C++, which leaves the return value undefined.
+contrast to C/C++, which leaves the return value undefined.
In the case of non-Python object return types, the equivalent of zero is returned, for example, 0 for ``int``, :keyword:`False` for ``bint`` and :keyword:`NULL` for pointer types.
A more complete comparison of the pros and cons of these different method
@@ -634,7 +643,15 @@ parameters and has two required keyword parameters.
Function Pointers
-----------------
-Functions declared in a ``struct`` are automatically converted to function pointers.
+.. note:: Pointers to functions are currently not supported by pure Python mode. (GitHub issue :issue:`4279`)
+
+The following example shows declaring a ``ptr_add`` function pointer and assigning the ``add`` function to it:
+
+.. literalinclude:: ../../examples/userguide/language_basics/function_pointer.pyx
+
+Functions declared in a ``struct`` are automatically converted to function pointers:
+
+.. literalinclude:: ../../examples/userguide/language_basics/function_pointer_struct.pyx
For using error return values with function pointers, see the note at the bottom
of :ref:`error_return_values`.
@@ -652,16 +669,14 @@ through defined error return values. For functions that return a Python object
``NULL`` pointer, so any function returning a Python object has a well-defined
error return value.
-While this is always the case for C functions, functions
+While this is always the case for Python functions, functions
defined as C functions or ``cpdef``/``@ccall`` functions can return arbitrary C types,
-which do not have such a well-defined error return value. Thus, if an
-exception is detected in such a function, a warning message is printed,
-the exception is ignored, and the function returns immediately without
-propagating the exception to its caller.
+which do not have such a well-defined error return value.
+Extra care must be taken to ensure Python exceptions are correctly
+propagated from such functions.
-If you want such a C function to be able to propagate exceptions, you need
-to declare an exception return value for it as a contract with the caller.
-Here is an example
+A ``cdef`` function may be declared with an exception return value for it
+as a contract with the caller. Here is an example:
.. tabs::
@@ -684,7 +699,12 @@ Here is an example
With this declaration, whenever an exception occurs inside ``spam``, it will
immediately return with the value ``-1``. From the caller's side, whenever
a call to spam returns ``-1``, the caller will assume that an exception has
-occurred and can now process or propagate it.
+occurred and can now process or propagate it. Calling ``spam()`` is roughly translated to the following C code:
+
+.. code-block:: C
+
+ ret_val = spam();
+ if (ret_val == -1) goto error_handler;
When you declare an exception value for a function, you should never explicitly
or implicitly return that value. This includes empty :keyword:`return`
@@ -710,7 +730,7 @@ form of exception value declaration
def spam() -> cython.int:
...
- The keyword argument ``check=True`` indicates that the value ``-1`` _may_ signal an error.
+ The keyword argument ``check=True`` indicates that the value ``-1`` **may** signal an error.
.. group-tab:: Cython
@@ -719,11 +739,17 @@ form of exception value declaration
cdef int spam() except? -1:
...
- The ``?`` indicates that the value ``-1`` _may_ signal an error.
+ The ``?`` indicates that the value ``-1`` **may** signal an error.
In this case, Cython generates a call to :c:func:`PyErr_Occurred` if the exception value
is returned, to make sure it really received an exception and not just a normal
-result.
+result. Calling ``spam()`` is roughly translated to the following C code:
+
+
+.. code-block:: C
+
+ ret_val = spam();
+ if (ret_val == -1 && PyErr_Occurred()) goto error_handler;
There is also a third form of exception value declaration
@@ -735,18 +761,25 @@ There is also a third form of exception value declaration
@cython.cfunc
@cython.exceptval(check=True)
- def spam() -> cython.int:
+ def spam() -> cython.void:
...
.. group-tab:: Cython
.. code-block:: cython
- cdef int spam() except *:
+ cdef void spam() except *:
...
This form causes Cython to generate a call to :c:func:`PyErr_Occurred` after
-*every* call to spam, regardless of what value it returns. If you have a
+*every* call to spam, regardless of what value it returns. Calling ``spam()`` is roughly translated to the following C code:
+
+.. code-block:: C
+
+ spam()
+ if (PyErr_Occurred()) goto error_handler;
+
+If you have a
function returning ``void`` that needs to propagate errors, you will have to
use this form, since there isn't any error return value to test.
Otherwise, an explicit error return value allows the C compiler to generate
@@ -760,12 +793,47 @@ An external C++ function that may raise an exception can be declared with::
See :ref:`wrapping-cplusplus` for more details.
+Finally, if you are certain that your function should not raise an exception, (e.g., it
+does not use Python objects at all, or you plan to use it as a callback in C code that
+is unaware of Python exceptions), you can declare it as such using ``noexcept`` or by ``@cython.exceptval(check=False)``:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. code-block:: python
+
+ @cython.cfunc
+ @cython.exceptval(check=False)
+ def spam() -> cython.int:
+ ...
+
+ .. group-tab:: Cython
+
+ .. code-block:: cython
+
+ cdef int spam() noexcept:
+ ...
+
+If a ``noexcept`` function *does* finish with an exception then it will print a warning message but not allow the exception to propagate further.
+On the other hand, calling a ``noexcept`` function has zero overhead related to managing exceptions, unlike the previous declarations.
+
Some things to note:
+* ``cdef`` functions that are also ``extern`` are implicitly declared ``noexcept`` or ``@cython.exceptval(check=False)``.
+ In the uncommon case of external C/C++ functions that **can** raise Python exceptions,
+ e.g., external functions that use the Python C API, you should explicitly declare
+ them with an exception value.
+
+* ``cdef`` functions that are *not* ``extern`` are implicitly declared with a suitable
+ exception specification for the return type (e.g. ``except *`` or ``@cython.exceptval(check=True)`` for a ``void`` return
+ type, ``except? -1`` or ``@cython.exceptval(-1, check=True)`` for an ``int`` return type).
+
* Exception values can only be declared for functions returning a C integer,
enum, float or pointer type, and the value must be a constant expression.
Functions that return ``void``, or a struct/union by value, can only use
the ``except *`` or ``exceptval(check=True)`` form.
+
* The exception value specification is part of the signature of the function.
If you're passing a pointer to a function as a parameter or assigning it
to a variable, the declared type of the parameter or variable must have
@@ -776,6 +844,10 @@ Some things to note:
.. note:: Pointers to functions are currently not supported by pure Python mode. (GitHub issue :issue:`4279`)
+* If the returning type of a ``cdef`` function with ``except *`` or ``@cython.exceptval(check=True)`` is C integer,
+ enum, float or pointer type, Cython calls :c:func:`PyErr_Occurred` only when
+ dedicated value is returned instead of checking after every call of the function.
+
* You don't need to (and shouldn't) declare exception values for functions
which return Python objects. Remember that a function with no declared
return type implicitly returns a Python object. (Exceptions on such
@@ -1094,7 +1166,7 @@ direct equivalent in Python.
* There is an ``&`` operator in Cython, with the same semantics as in C.
In pure python mode, use the ``cython.address()`` function instead.
* The null C pointer is called ``NULL``, not ``0``. ``NULL`` is a reserved word in Cython
- and special object in pure python mode.
+ and ``cython.NULL`` is a special object in pure python mode.
* Type casts are written ``<type>value`` or ``cast(type, value)``, for example,
.. tabs::
diff --git a/docs/src/userguide/memoryviews.rst b/docs/src/userguide/memoryviews.rst
index 1a0a0b282..285cc67ea 100644
--- a/docs/src/userguide/memoryviews.rst
+++ b/docs/src/userguide/memoryviews.rst
@@ -42,6 +42,7 @@ This code should give the following output::
Memoryview sum of Cython array is 1351
Memoryview sum of C memoryview is 451
+.. _using_memoryviews:
Using memoryviews
=================
diff --git a/docs/src/userguide/migrating_to_cy30.rst b/docs/src/userguide/migrating_to_cy30.rst
index 357132887..4576ce864 100644
--- a/docs/src/userguide/migrating_to_cy30.rst
+++ b/docs/src/userguide/migrating_to_cy30.rst
@@ -172,3 +172,66 @@ rather than relying on the user to test and cast the type of each operand.
The old behaviour can be restored with the
:ref:`directive <compiler-directives>` ``c_api_binop_methods=True``.
More details are given in :ref:`arithmetic_methods`.
+
+Exception values and ``noexcept``
+=================================
+
+``cdef`` functions that are not ``extern`` now propagate Python
+exceptions by default, where previously they needed to explicitly be
+declated with an :ref:`exception value <error_return_values>` in order
+for them to do so. A new ``noexcept`` modifier can be used to declare
+``cdef`` functions that will not raise exceptions.
+
+In existing code, you should mainly look out for ``cdef`` functions
+that are declared without an exception value::
+
+ cdef int spam(int x):
+ pass
+
+If you left out the exception value by mistake, i.e., the function
+should propagate Python exceptions, then the new behaviour will take
+care of this for you, and correctly propagate any exceptions.
+This was a common mistake in Cython code and the main reason to change the behaviour.
+
+On the other hand, if you didn't declare an exception value because
+you want to avoid exceptions propagating out of this function, the new behaviour
+will result in slightly less efficient code being generated, now involving an exception check.
+To prevent that, you must declare the function explicitly as being
+``noexcept``::
+
+ cdef int spam(int x) noexcept:
+ pass
+
+The behaviour for ``cdef`` functions that are also ``extern`` is
+unchanged as ``extern`` functions are less likely to raise Python
+exceptions
+
+The behaviour for any ``cdef`` function that is declared with an
+explicit exception value (e.g., ``cdef int spam(int x) except -1``) is
+also unchanged.
+
+Annotation typing
+=================
+
+Cython 3 has made substantial improvements in recognising types in
+annotations and it is well worth reading
+:ref:`the pure Python tutorial<pep484_type_annotations>` to understand
+some of the improvements.
+
+To make it easier to handle cases where your interpretation of type
+annotations differs from Cython's, Cython 3 now supports setting the
+``annotation_typing`` :ref:`directive <compiler-directives>` on a
+per-class or per-function level.
+
+C++ postincrement/postdecrement operator
+========================================
+
+Cython 3 differentiates between pre/post-increment and pre/post-decrement
+operators (Cython 0.29 implemented both as pre(in/de)crement operator).
+This only has an effect when using ``cython.operator.postdecrement`` / ``cython.operator.postincrement``.
+When running into an error it is required to add the corresponding operator::
+
+ cdef cppclass Example:
+ Example operator++(int)
+ Example operator--(int)
+
diff --git a/docs/src/userguide/parallelism.rst b/docs/src/userguide/parallelism.rst
index e9d473e66..7cdae95b3 100644
--- a/docs/src/userguide/parallelism.rst
+++ b/docs/src/userguide/parallelism.rst
@@ -8,6 +8,9 @@
Using Parallelism
**********************************
+.. include::
+ ../two-syntax-variants-used
+
Cython supports native parallelism through the :py:mod:`cython.parallel`
module. To use this kind of parallelism, the GIL must be released
(see :ref:`Releasing the GIL <nogil>`).
@@ -87,7 +90,7 @@ It currently supports OpenMP, but later on more backends might be supported.
runtime:
The schedule and chunk size are taken from the runtime scheduling
variable, which can be set through the ``openmp.omp_set_schedule()``
- function call, or the OMP_SCHEDULE environment variable. Note that
+ function call, or the ``OMP_SCHEDULE`` environment variable. Note that
this essentially disables any static compile time optimisations of
the scheduling code itself and may therefore show a slightly worse
performance than when the same scheduling policy is statically
@@ -116,17 +119,27 @@ It currently supports OpenMP, but later on more backends might be supported.
Example with a reduction:
-.. literalinclude:: ../../examples/userguide/parallelism/simple_sum.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/parallelism/simple_sum.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/simple_sum.pyx
-Example with a :term:`typed memoryview<Typed memoryview>` (e.g. a NumPy array)::
+Example with a :term:`typed memoryview<Typed memoryview>` (e.g. a NumPy array)
- from cython.parallel import prange
+.. tabs::
- def func(double[:] x, double alpha):
- cdef Py_ssize_t i
+ .. group-tab:: Pure Python
- for i in prange(x.shape[0]):
- x[i] = alpha * x[i]
+ .. literalinclude:: ../../examples/userguide/parallelism/memoryview_sum.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/memoryview_sum.pyx
.. function:: parallel(num_threads=None)
@@ -137,29 +150,17 @@ Example with a :term:`typed memoryview<Typed memoryview>` (e.g. a NumPy array)::
is also private to the prange. Variables that are private in the parallel
block are unavailable after the parallel block.
- Example with thread-local buffers::
-
- from cython.parallel import parallel, prange
- from libc.stdlib cimport abort, malloc, free
+ Example with thread-local buffers
- cdef Py_ssize_t idx, i, n = 100
- cdef int * local_buf
- cdef size_t size = 10
+ .. tabs::
- with nogil, parallel():
- local_buf = <int *> malloc(sizeof(int) * size)
- if local_buf is NULL:
- abort()
+ .. group-tab:: Pure Python
- # populate our local buffer in a sequential loop
- for i in xrange(size):
- local_buf[i] = i * 2
+ .. literalinclude:: ../../examples/userguide/parallelism/parallel.py
- # share the work using the thread-local buffer(s)
- for i in prange(n, schedule='guided'):
- func(local_buf)
+ .. group-tab:: Cython
- free(local_buf)
+ .. literalinclude:: ../../examples/userguide/parallelism/parallel.pyx
Later on sections might be supported in parallel blocks, to distribute
code sections of work among threads.
@@ -174,9 +175,17 @@ Compiling
=========
To actually use the OpenMP support, you need to tell the C or C++ compiler to
-enable OpenMP. For gcc this can be done as follows in a setup.py:
+enable OpenMP. For gcc this can be done as follows in a ``setup.py``:
+
+.. tabs::
+
+ .. group-tab:: Pure Python
-.. literalinclude:: ../../examples/userguide/parallelism/setup.py
+ .. literalinclude:: ../../examples/userguide/parallelism/setup_py.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/setup_pyx.py
For Microsoft Visual C++ compiler, use ``'/openmp'`` instead of ``'-fopenmp'``.
@@ -188,13 +197,21 @@ The parallel with and prange blocks support the statements break, continue and
return in nogil mode. Additionally, it is valid to use a ``with gil`` block
inside these blocks, and have exceptions propagate from them.
However, because the blocks use OpenMP, they can not just be left, so the
-exiting procedure is best-effort. For prange() this means that the loop
+exiting procedure is best-effort. For ``prange()`` this means that the loop
body is skipped after the first break, return or exception for any subsequent
iteration in any thread. It is undefined which value shall be returned if
multiple different values may be returned, as the iterations are in no
particular order:
-.. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.py
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/breaking_loop.pyx
In the example above it is undefined whether an exception shall be raised,
whether it will simply break or whether it will return 2.
@@ -203,7 +220,17 @@ Using OpenMP Functions
======================
OpenMP functions can be used by cimporting ``openmp``:
-.. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.pyx
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.py
+ :lines: 3-
+
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/parallelism/cimport_openmp.pyx
+ :lines: 3-
.. rubric:: References
diff --git a/docs/src/userguide/sharing_declarations.rst b/docs/src/userguide/sharing_declarations.rst
index 70e29e2b2..6beceda57 100644
--- a/docs/src/userguide/sharing_declarations.rst
+++ b/docs/src/userguide/sharing_declarations.rst
@@ -6,6 +6,9 @@
Sharing Declarations Between Cython Modules
********************************************
+.. include::
+ ../two-syntax-variants-used
+
This section describes how to make C declarations, functions and extension
types in one Cython module available for use in another Cython module.
These facilities are closely modeled on the Python import mechanism,
@@ -17,13 +20,13 @@ Definition and Implementation files
A Cython module can be split into two parts: a definition file with a ``.pxd``
suffix, containing C declarations that are to be available to other Cython
-modules, and an implementation file with a ``.pyx`` suffix, containing
+modules, and an implementation file with a ``.pyx``/``.py`` suffix, containing
everything else. When a module wants to use something declared in another
module's definition file, it imports it using the :keyword:`cimport`
-statement.
+statement or using special :py:mod:`cython.cimports` package.
A ``.pxd`` file that consists solely of extern declarations does not need
-to correspond to an actual ``.pyx`` file or Python module. This can make it a
+to correspond to an actual ``.pyx``/``.py`` file or Python module. This can make it a
convenient place to put common declarations, for example declarations of
functions from an :ref:`external library <external-C-code>` that one
wants to use in several modules.
@@ -41,8 +44,8 @@ A definition file can contain:
It cannot contain the implementations of any C or Python functions, or any
Python class definitions, or any executable statements. It is needed when one
-wants to access :keyword:`cdef` attributes and methods, or to inherit from
-:keyword:`cdef` classes defined in this module.
+wants to access :keyword:`cdef`/``@cfunc`` attributes and methods, or to inherit from
+:keyword:`cdef`/``@cclass`` classes defined in this module.
.. note::
@@ -70,23 +73,45 @@ The cimport statement
The :keyword:`cimport` statement is used in a definition or
implementation file to gain access to names declared in another definition
file. Its syntax exactly parallels that of the normal Python import
-statement::
+statement. When pure python syntax is used, the same effect can be done by
+importing from special :py:mod:`cython.cimports` package. In later text the term
+to ``cimport`` refers to using both :keyword:`cimport` statement or
+:py:mod:`cython.cimports` package.
- cimport module [, module...]
+.. tabs::
- from module cimport name [as name] [, name [as name] ...]
+ .. group-tab:: Pure Python
-Here is an example. :file:`dishes.pxd` is a definition file which exports a
-C data type. :file:`restaurant.pyx` is an implementation file which imports and
-uses it.
+ .. code-block:: python
+
+ from cython.cimports.module import name [as name][, name [as name] ...]
+
+ .. group-tab:: Cython
-:file:`dishes.pxd`:
+ .. code-block:: cython
+
+ cimport module [, module...]
+
+ from module cimport name [as name] [, name [as name] ...]
+
+Here is an example. :file:`dishes.pxd` is a definition file which exports a
+C data type. :file:`restaurant.pyx`/:file:`restaurant.py` is an implementation file
+which imports and uses it.
.. literalinclude:: ../../examples/userguide/sharing_declarations/dishes.pxd
+ :caption: dishes.pxd
+
+.. tabs::
+
+ .. group-tab:: Pure Python
-:file:`restaurant.pyx`:
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.py
+ :caption: dishes.py
-.. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.pyx
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/restaurant.pyx
+ :caption: dishes.pyx
It is important to understand that the :keyword:`cimport` statement can only
be used to import C data types, C functions and variables, and extension
@@ -116,8 +141,8 @@ option to ``cythonize()``), as well as ``sys.path``.
Using ``package_data`` to install ``.pxd`` files in your ``setup.py`` script
allows other packages to cimport items from your module as a dependency.
-Also, whenever you compile a file :file:`modulename.pyx`, the corresponding
-definition file :file:`modulename.pxd` is first searched for along the
+Also, whenever you compile a file :file:`modulename.pyx`/:file:`modulename.py`,
+the corresponding definition file :file:`modulename.pxd` is first searched for along the
include path (but not ``sys.path``), and if found, it is processed before
processing the ``.pyx`` file.
@@ -132,16 +157,23 @@ for an imaginary module, and :keyword:`cimport` that module. You can then
refer to the C functions by qualifying them with the name of the module.
Here's an example:
-:file:`c_lunch.pxd`:
-
.. literalinclude:: ../../examples/userguide/sharing_declarations/c_lunch.pxd
+ :caption: c_lunch.pxd
+
+.. tabs::
-:file:`lunch.pyx`:
+ .. group-tab:: Pure Python
-.. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.pyx
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.py
+ :caption: lunch.py
-You don't need any :file:`c_lunch.pyx` file, because the only things defined
-in :file:`c_lunch.pxd` are extern C entities. There won't be any actual
+ .. group-tab:: Cython
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/lunch.pyx
+ :caption: lunch.pyx
+
+You don't need any :file:`c_lunch.pyx`/:file:`c_lunch.py` file, because the only
+things defined in :file:`c_lunch.pxd` are extern C entities. There won't be any actual
``c_lunch`` module at run time, but that doesn't matter; the
:file:`c_lunch.pxd` file has done its job of providing an additional namespace
at compile time.
@@ -154,24 +186,32 @@ C functions defined at the top level of a module can be made available via
:keyword:`cimport` by putting headers for them in the ``.pxd`` file, for
example:
-:file:`volume.pxd`:
-
.. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pxd
+ :caption: volume.pxd
-:file:`volume.pyx`:
+.. tabs::
-.. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pyx
+ .. group-tab:: Pure Python
-:file:`spammery.pyx`:
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.py
+ :caption: volume.py
-.. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.py
+ :caption: spammery.py
-.. note::
+ .. note::
+
+ Type definitions of function ``cube()`` in :file:`volume.py` are not provided
+ since they are used from .pxd definition file. See :ref:`augmenting_pxd` and
+ GitHub issue :issue:`4388`.
+
+ .. group-tab:: Cython
- When a module exports a C function in this way, an object appears in the
- module dictionary under the function's name. However, you can't make use of
- this object from Python, nor can you use it from Cython using a normal import
- statement; you have to use :keyword:`cimport`.
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/volume.pyx
+ :caption: volume.pyx
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/spammery.pyx
+ :caption: spammery.pyx
.. _sharing_extension_types:
@@ -193,34 +233,47 @@ Python methods.
Here is an example of a module which defines and exports an extension type,
and another module which uses it:
-:file:`shrubbing.pxd`:
-
.. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pxd
+ :caption: shrubbing.pxd
+
+.. tabs::
+
+ .. group-tab:: Pure Python
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.py
+ :caption: shrubbing.py
+
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.py
+ :caption: landscaping.py
-:file:`shrubbing.pyx`:
+ One would then need to compile both of these modules, e.g. using
-.. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pyx
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/setup_py.py
+ :caption: setup.py
-:file:`landscaping.pyx`:
+ .. group-tab:: Cython
-.. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.pyx
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/shrubbing.pyx
+ :caption: shrubbing.pyx
-One would then need to compile both of these modules, e.g. using
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/landscaping.pyx
+ :caption: landscaping.pyx
-:file:`setup.py`:
+ One would then need to compile both of these modules, e.g. using
-.. literalinclude:: ../../examples/userguide/sharing_declarations/setup.py
+ .. literalinclude:: ../../examples/userguide/sharing_declarations/setup_pyx.py
+ :caption: setup.py
Some things to note about this example:
-* There is a :keyword:`cdef` class Shrubbery declaration in both
- :file:`Shrubbing.pxd` and :file:`Shrubbing.pyx`. When the Shrubbing module
+* There is a :keyword:`cdef`/``@cclass`` class Shrubbery declaration in both
+ :file:`shrubbing.pxd` and :file:`shrubbing.pyx`. When the shrubbing module
is compiled, these two declarations are combined into one.
-* In Landscaping.pyx, the :keyword:`cimport` Shrubbing declaration allows us
- to refer to the Shrubbery type as :class:`Shrubbing.Shrubbery`. But it
- doesn't bind the name Shrubbing in Landscaping's module namespace at run
- time, so to access :func:`Shrubbing.standard_shrubbery` we also need to
- ``import Shrubbing``.
+* In :file:`landscaping.pyx`/:file:`landscaping.py`, the :keyword:`cimport` shrubbing
+ declaration allows us to refer to the Shrubbery type as :class:`shrubbing.Shrubbery`.
+ But it doesn't bind the name shrubbing in landscaping's module namespace at run
+ time, so to access :func:`shrubbing.standard_shrubbery` we also need to
+ ``import shrubbing``.
* One caveat if you use setuptools instead of distutils, the default
action when running ``python setup.py install`` is to create a zipped
``egg`` file which will not work with ``cimport`` for ``pxd`` files
@@ -234,8 +287,8 @@ Versioning
``.pxd`` files can be labelled with a minimum Cython version as part of
their file name, similar to the version tagging of ``.so`` files in PEP 3149.
-For example a file called :file:`Shrubbing.cython-30.pxd` will only be
-found by ``cimport Shrubbing`` on Cython 3.0 and higher. Cython will use the
+For example a file called :file:`shrubbing.cython-30.pxd` will only be
+found by ``cimport shrubbing`` on Cython 3.0 and higher. Cython will use the
file tagged with the highest compatible version number.
Note that versioned files that are distributed across different directories
diff --git a/docs/src/userguide/source_files_and_compilation.rst b/docs/src/userguide/source_files_and_compilation.rst
index edf51213e..d1c8f696c 100644
--- a/docs/src/userguide/source_files_and_compilation.rst
+++ b/docs/src/userguide/source_files_and_compilation.rst
@@ -12,17 +12,21 @@ file named :file:`primes.pyx`.
Cython code, unlike Python, must be compiled. This happens in two stages:
- * A ``.pyx`` file is compiled by Cython to a ``.c`` file.
+ * A ``.pyx`` (or ``.py``) file is compiled by Cython to a ``.c`` file.
* The ``.c`` file is compiled by a C compiler to a ``.so`` file (or a
``.pyd`` file on Windows)
-Once you have written your ``.pyx`` file, there are a couple of ways of turning it
-into an extension module.
+Once you have written your ``.pyx``/``.py`` file, there are a couple of ways
+how to turn it into an extension module.
The following sub-sections describe several ways to build your
extension modules, and how to pass directives to the Cython compiler.
+There are also a number of tools that process ``.pyx`` files apart from Cython, e.g.
+
+- Linting: https://pypi.org/project/cython-lint/
+
.. _compiling_command_line:
@@ -946,7 +950,8 @@ Cython code. Here is the list of currently supported directives:
Uses function argument annotations to determine the type of variables. Default
is True, but can be disabled. Since Python does not enforce types given in
annotations, setting to False gives greater compatibility with Python code.
- Must be set globally.
+ From Cython 3.0, ``annotation_typing`` can be set on a per-function or
+ per-class basis.
``emit_code_comments`` (True / False)
Copy the original source code line by line into C code comments in the generated
diff --git a/docs/src/userguide/special_methods.rst b/docs/src/userguide/special_methods.rst
index af702f3c3..e6635b502 100644
--- a/docs/src/userguide/special_methods.rst
+++ b/docs/src/userguide/special_methods.rst
@@ -3,6 +3,9 @@
Special Methods of Extension Types
===================================
+.. include::
+ ../two-syntax-variants-used
+
This page describes the special methods currently supported by Cython extension
types. A complete list of all the special methods appears in the table at the
bottom. Some of these methods behave differently from their Python
@@ -12,7 +15,8 @@ mention.
.. Note::
Everything said on this page applies only to extension types, defined
- with the :keyword:`cdef` class statement. It doesn't apply to classes defined with the
+ with the :keyword:`cdef` class statement or decorated using ``@cclass`` decorator.
+ It doesn't apply to classes defined with the
Python :keyword:`class` statement, where the normal Python rules apply.
.. _declaration:
@@ -20,7 +24,7 @@ mention.
Declaration
------------
Special methods of extension types must be declared with :keyword:`def`, not
-:keyword:`cdef`. This does not impact their performance--Python uses different
+:keyword:`cdef`/``@cfunc``. This does not impact their performance--Python uses different
calling conventions to invoke these special methods.
.. _docstrings:
@@ -225,19 +229,15 @@ Depending on the application, one way or the other may be better:
decorator specifically for ``cdef`` classes. (Normal Python classes can use
the original ``functools`` decorator.)
- .. code-block:: cython
+.. tabs::
+
+ .. group-tab:: Pure Python
- @cython.total_ordering
- cdef class ExtGe:
- cdef int x
+ .. literalinclude:: ../../examples/userguide/special_methods/total_ordering.py
- def __ge__(self, other):
- if not isinstance(other, ExtGe):
- return NotImplemented
- return self.x >= (<ExtGe>other).x
+ .. group-tab:: Cython
- def __eq__(self, other):
- return isinstance(other, ExtGe) and self.x == (<ExtGe>other).x
+ .. literalinclude:: ../../examples/userguide/special_methods/total_ordering.pyx
.. _the__next__method:
diff --git a/pyximport/_pyximport2.py b/pyximport/_pyximport2.py
new file mode 100644
index 000000000..00e88a8ac
--- /dev/null
+++ b/pyximport/_pyximport2.py
@@ -0,0 +1,620 @@
+"""
+Import hooks; when installed with the install() function, these hooks
+allow importing .pyx files as if they were Python modules.
+
+If you want the hook installed every time you run Python
+you can add it to your Python version by adding these lines to
+sitecustomize.py (which you can create from scratch in site-packages
+if it doesn't exist there or somewhere else on your python path)::
+
+ import pyximport
+ pyximport.install()
+
+For instance on the Mac with a non-system Python 2.3, you could create
+sitecustomize.py with only those two lines at
+/usr/local/lib/python2.3/site-packages/sitecustomize.py .
+
+A custom distutils.core.Extension instance and setup() args
+(Distribution) for for the build can be defined by a <modulename>.pyxbld
+file like:
+
+# examplemod.pyxbld
+def make_ext(modname, pyxfilename):
+ from distutils.extension import Extension
+ return Extension(name = modname,
+ sources=[pyxfilename, 'hello.c'],
+ include_dirs=['/myinclude'] )
+def make_setup_args():
+ return dict(script_args=["--compiler=mingw32"])
+
+Extra dependencies can be defined by a <modulename>.pyxdep .
+See README.
+
+Since Cython 0.11, the :mod:`pyximport` module also has experimental
+compilation support for normal Python modules. This allows you to
+automatically run Cython on every .pyx and .py module that Python
+imports, including parts of the standard library and installed
+packages. Cython will still fail to compile a lot of Python modules,
+in which case the import mechanism will fall back to loading the
+Python source modules instead. The .py import mechanism is installed
+like this::
+
+ pyximport.install(pyimport = True)
+
+Running this module as a top-level script will run a test and then print
+the documentation.
+
+This code is based on the Py2.3+ import protocol as described in PEP 302.
+"""
+
+import glob
+import imp
+import os
+import sys
+from zipimport import zipimporter, ZipImportError
+
+mod_name = "pyximport"
+
+PYX_EXT = ".pyx"
+PYXDEP_EXT = ".pyxdep"
+PYXBLD_EXT = ".pyxbld"
+
+DEBUG_IMPORT = False
+
+
+def _print(message, args):
+ if args:
+ message = message % args
+ print(message)
+
+
+def _debug(message, *args):
+ if DEBUG_IMPORT:
+ _print(message, args)
+
+
+def _info(message, *args):
+ _print(message, args)
+
+
+# Performance problem: for every PYX file that is imported, we will
+# invoke the whole distutils infrastructure even if the module is
+# already built. It might be more efficient to only do it when the
+# mod time of the .pyx is newer than the mod time of the .so but
+# the question is how to get distutils to tell me the name of the .so
+# before it builds it. Maybe it is easy...but maybe the performance
+# issue isn't real.
+def _load_pyrex(name, filename):
+ "Load a pyrex file given a name and filename."
+
+
+def get_distutils_extension(modname, pyxfilename, language_level=None):
+# try:
+# import hashlib
+# except ImportError:
+# import md5 as hashlib
+# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest()
+# modname = modname + extra
+ extension_mod,setup_args = handle_special_build(modname, pyxfilename)
+ if not extension_mod:
+ if not isinstance(pyxfilename, str):
+ # distutils is stupid in Py2 and requires exactly 'str'
+ # => encode accidentally coerced unicode strings back to str
+ pyxfilename = pyxfilename.encode(sys.getfilesystemencoding())
+ from distutils.extension import Extension
+ extension_mod = Extension(name = modname, sources=[pyxfilename])
+ if language_level is not None:
+ extension_mod.cython_directives = {'language_level': language_level}
+ return extension_mod,setup_args
+
+
+def handle_special_build(modname, pyxfilename):
+ special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT
+ ext = None
+ setup_args={}
+ if os.path.exists(special_build):
+ # globls = {}
+ # locs = {}
+ # execfile(special_build, globls, locs)
+ # ext = locs["make_ext"](modname, pyxfilename)
+ with open(special_build) as fid:
+ mod = imp.load_source("XXXX", special_build, fid)
+ make_ext = getattr(mod,'make_ext',None)
+ if make_ext:
+ ext = make_ext(modname, pyxfilename)
+ assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build
+ make_setup_args = getattr(mod, 'make_setup_args',None)
+ if make_setup_args:
+ setup_args = make_setup_args()
+ assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict"
+ % special_build)
+ assert set or setup_args, ("neither make_ext nor make_setup_args %s"
+ % special_build)
+ ext.sources = [os.path.join(os.path.dirname(special_build), source)
+ for source in ext.sources]
+ return ext, setup_args
+
+
+def handle_dependencies(pyxfilename):
+ testing = '_test_files' in globals()
+ dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT
+
+ # by default let distutils decide whether to rebuild on its own
+ # (it has a better idea of what the output file will be)
+
+ # but we know more about dependencies so force a rebuild if
+ # some of the dependencies are newer than the pyxfile.
+ if os.path.exists(dependfile):
+ with open(dependfile) as fid:
+ depends = fid.readlines()
+ depends = [depend.strip() for depend in depends]
+
+ # gather dependencies in the "files" variable
+ # the dependency file is itself a dependency
+ files = [dependfile]
+ for depend in depends:
+ fullpath = os.path.join(os.path.dirname(dependfile),
+ depend)
+ files.extend(glob.glob(fullpath))
+
+ # only for unit testing to see we did the right thing
+ if testing:
+ _test_files[:] = [] #$pycheck_no
+
+ # if any file that the pyxfile depends upon is newer than
+ # the pyx file, 'touch' the pyx file so that distutils will
+ # be tricked into rebuilding it.
+ for file in files:
+ from distutils.dep_util import newer
+ if newer(file, pyxfilename):
+ _debug("Rebuilding %s because of %s", pyxfilename, file)
+ filetime = os.path.getmtime(file)
+ os.utime(pyxfilename, (filetime, filetime))
+ if testing:
+ _test_files.append(file)
+
+
+def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None):
+ assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename
+ handle_dependencies(pyxfilename)
+
+ extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level)
+ build_in_temp = pyxargs.build_in_temp
+ sargs = pyxargs.setup_args.copy()
+ sargs.update(setup_args)
+ build_in_temp = sargs.pop('build_in_temp',build_in_temp)
+
+ from . import pyxbuild
+ olddir = os.getcwd()
+ common = ''
+ if pyxbuild_dir:
+ # Windows concantenates the pyxbuild_dir to the pyxfilename when
+ # compiling, and then complains that the filename is too long
+ common = os.path.commonprefix([pyxbuild_dir, pyxfilename])
+ if len(common) > 30:
+ pyxfilename = os.path.relpath(pyxfilename)
+ pyxbuild_dir = os.path.relpath(pyxbuild_dir)
+ os.chdir(common)
+ try:
+ so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
+ build_in_temp=build_in_temp,
+ pyxbuild_dir=pyxbuild_dir,
+ setup_args=sargs,
+ inplace=inplace,
+ reload_support=pyxargs.reload_support)
+ finally:
+ os.chdir(olddir)
+ so_path = os.path.join(common, so_path)
+ assert os.path.exists(so_path), "Cannot find: %s" % so_path
+
+ junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
+ junkstuff = glob.glob(junkpath)
+ for path in junkstuff:
+ if path != so_path:
+ try:
+ os.remove(path)
+ except IOError:
+ _info("Couldn't remove %s", path)
+
+ return so_path
+
+
+def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False,
+ build_inplace=False, language_level=None, so_path=None):
+ try:
+ if so_path is None:
+ if is_package:
+ module_name = name + '.__init__'
+ else:
+ module_name = name
+ so_path = build_module(module_name, pyxfilename, pyxbuild_dir,
+ inplace=build_inplace, language_level=language_level)
+ mod = imp.load_dynamic(name, so_path)
+ if is_package and not hasattr(mod, '__path__'):
+ mod.__path__ = [os.path.dirname(so_path)]
+ assert mod.__file__ == so_path, (mod.__file__, so_path)
+ except Exception as failure_exc:
+ _debug("Failed to load extension module: %r" % failure_exc)
+ if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'):
+ # try to fall back to normal import
+ mod = imp.load_source(name, pyxfilename)
+ assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename)
+ else:
+ tb = sys.exc_info()[2]
+ import traceback
+ exc = ImportError("Building module %s failed: %s" % (
+ name, traceback.format_exception_only(*sys.exc_info()[:2])))
+ if sys.version_info[0] >= 3:
+ raise exc.with_traceback(tb)
+ else:
+ exec("raise exc, None, tb", {'exc': exc, 'tb': tb})
+ return mod
+
+
+# import hooks
+
+class PyxImporter(object):
+ """A meta-path importer for .pyx files.
+ """
+ def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False,
+ language_level=None):
+ self.extension = extension
+ self.pyxbuild_dir = pyxbuild_dir
+ self.inplace = inplace
+ self.language_level = language_level
+
+ def find_module(self, fullname, package_path=None):
+ if fullname in sys.modules and not pyxargs.reload_support:
+ return None # only here when reload()
+
+ # package_path might be a _NamespacePath. Convert that into a list...
+ if package_path is not None and not isinstance(package_path, list):
+ package_path = list(package_path)
+ try:
+ fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path)
+ if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open!
+ if pathname and ty == imp.PKG_DIRECTORY:
+ pkg_file = os.path.join(pathname, '__init__'+self.extension)
+ if os.path.isfile(pkg_file):
+ return PyxLoader(fullname, pathname,
+ init_path=pkg_file,
+ pyxbuild_dir=self.pyxbuild_dir,
+ inplace=self.inplace,
+ language_level=self.language_level)
+ if pathname and pathname.endswith(self.extension):
+ return PyxLoader(fullname, pathname,
+ pyxbuild_dir=self.pyxbuild_dir,
+ inplace=self.inplace,
+ language_level=self.language_level)
+ if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next!
+ return None
+
+ # find .pyx fast, when .so/.pyd exist --inplace
+ pyxpath = os.path.splitext(pathname)[0]+self.extension
+ if os.path.isfile(pyxpath):
+ return PyxLoader(fullname, pyxpath,
+ pyxbuild_dir=self.pyxbuild_dir,
+ inplace=self.inplace,
+ language_level=self.language_level)
+
+ # .so/.pyd's on PATH should not be remote from .pyx's
+ # think no need to implement PyxArgs.importer_search_remote here?
+
+ except ImportError:
+ pass
+
+ # searching sys.path ...
+
+ #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path
+
+ mod_parts = fullname.split('.')
+ module_name = mod_parts[-1]
+ pyx_module_name = module_name + self.extension
+
+ # this may work, but it returns the file content, not its path
+ #import pkgutil
+ #pyx_source = pkgutil.get_data(package, pyx_module_name)
+
+ paths = package_path or sys.path
+ for path in paths:
+ pyx_data = None
+ if not path:
+ path = os.getcwd()
+ elif os.path.isfile(path):
+ try:
+ zi = zipimporter(path)
+ pyx_data = zi.get_data(pyx_module_name)
+ except (ZipImportError, IOError, OSError):
+ continue # Module not found.
+ # unzip the imported file into the build dir
+ # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file
+ path = self.pyxbuild_dir
+ elif not os.path.isabs(path):
+ path = os.path.abspath(path)
+
+ pyx_module_path = os.path.join(path, pyx_module_name)
+ if pyx_data is not None:
+ if not os.path.exists(path):
+ try:
+ os.makedirs(path)
+ except OSError:
+ # concurrency issue?
+ if not os.path.exists(path):
+ raise
+ with open(pyx_module_path, "wb") as f:
+ f.write(pyx_data)
+ elif not os.path.isfile(pyx_module_path):
+ continue # Module not found.
+
+ return PyxLoader(fullname, pyx_module_path,
+ pyxbuild_dir=self.pyxbuild_dir,
+ inplace=self.inplace,
+ language_level=self.language_level)
+
+ # not found, normal package, not a .pyx file, none of our business
+ _debug("%s not found" % fullname)
+ return None
+
+
+class PyImporter(PyxImporter):
+ """A meta-path importer for normal .py files.
+ """
+ def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None):
+ if language_level is None:
+ language_level = sys.version_info[0]
+ self.super = super(PyImporter, self)
+ self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace,
+ language_level=language_level)
+ self.uncompilable_modules = {}
+ self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild',
+ 'distutils']
+ self.blocked_packages = ['Cython.', 'distutils.']
+
+ def find_module(self, fullname, package_path=None):
+ if fullname in sys.modules:
+ return None
+ if any([fullname.startswith(pkg) for pkg in self.blocked_packages]):
+ return None
+ if fullname in self.blocked_modules:
+ # prevent infinite recursion
+ return None
+ if _lib_loader.knows(fullname):
+ return _lib_loader
+ _debug("trying import of module '%s'", fullname)
+ if fullname in self.uncompilable_modules:
+ path, last_modified = self.uncompilable_modules[fullname]
+ try:
+ new_last_modified = os.stat(path).st_mtime
+ if new_last_modified > last_modified:
+ # import would fail again
+ return None
+ except OSError:
+ # module is no longer where we found it, retry the import
+ pass
+
+ self.blocked_modules.append(fullname)
+ try:
+ importer = self.super.find_module(fullname, package_path)
+ if importer is not None:
+ if importer.init_path:
+ path = importer.init_path
+ real_name = fullname + '.__init__'
+ else:
+ path = importer.path
+ real_name = fullname
+ _debug("importer found path %s for module %s", path, real_name)
+ try:
+ so_path = build_module(
+ real_name, path,
+ pyxbuild_dir=self.pyxbuild_dir,
+ language_level=self.language_level,
+ inplace=self.inplace)
+ _lib_loader.add_lib(fullname, path, so_path,
+ is_package=bool(importer.init_path))
+ return _lib_loader
+ except Exception:
+ if DEBUG_IMPORT:
+ import traceback
+ traceback.print_exc()
+ # build failed, not a compilable Python module
+ try:
+ last_modified = os.stat(path).st_mtime
+ except OSError:
+ last_modified = 0
+ self.uncompilable_modules[fullname] = (path, last_modified)
+ importer = None
+ finally:
+ self.blocked_modules.pop()
+ return importer
+
+
+class LibLoader(object):
+ def __init__(self):
+ self._libs = {}
+
+ def load_module(self, fullname):
+ try:
+ source_path, so_path, is_package = self._libs[fullname]
+ except KeyError:
+ raise ValueError("invalid module %s" % fullname)
+ _debug("Loading shared library module '%s' from %s", fullname, so_path)
+ return load_module(fullname, source_path, so_path=so_path, is_package=is_package)
+
+ def add_lib(self, fullname, path, so_path, is_package):
+ self._libs[fullname] = (path, so_path, is_package)
+
+ def knows(self, fullname):
+ return fullname in self._libs
+
+_lib_loader = LibLoader()
+
+
+class PyxLoader(object):
+ def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None,
+ inplace=False, language_level=None):
+ _debug("PyxLoader created for loading %s from %s (init path: %s)",
+ fullname, path, init_path)
+ self.fullname = fullname
+ self.path, self.init_path = path, init_path
+ self.pyxbuild_dir = pyxbuild_dir
+ self.inplace = inplace
+ self.language_level = language_level
+
+ def load_module(self, fullname):
+ assert self.fullname == fullname, (
+ "invalid module, expected %s, got %s" % (
+ self.fullname, fullname))
+ if self.init_path:
+ # package
+ #print "PACKAGE", fullname
+ module = load_module(fullname, self.init_path,
+ self.pyxbuild_dir, is_package=True,
+ build_inplace=self.inplace,
+ language_level=self.language_level)
+ module.__path__ = [self.path]
+ else:
+ #print "MODULE", fullname
+ module = load_module(fullname, self.path,
+ self.pyxbuild_dir,
+ build_inplace=self.inplace,
+ language_level=self.language_level)
+ return module
+
+
+#install args
+class PyxArgs(object):
+ build_dir=True
+ build_in_temp=True
+ setup_args={} #None
+
+##pyxargs=None
+
+
+def _have_importers():
+ has_py_importer = False
+ has_pyx_importer = False
+ for importer in sys.meta_path:
+ if isinstance(importer, PyxImporter):
+ if isinstance(importer, PyImporter):
+ has_py_importer = True
+ else:
+ has_pyx_importer = True
+
+ return has_py_importer, has_pyx_importer
+
+
+def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
+ setup_args=None, reload_support=False,
+ load_py_module_on_import_failure=False, inplace=False,
+ language_level=None):
+ """ Main entry point for pyxinstall.
+
+ Call this to install the ``.pyx`` import hook in
+ your meta-path for a single Python process. If you want it to be
+ installed whenever you use Python, add it to your ``sitecustomize``
+ (as described above).
+
+ :param pyximport: If set to False, does not try to import ``.pyx`` files.
+
+ :param pyimport: You can pass ``pyimport=True`` to also
+ install the ``.py`` import hook
+ in your meta-path. Note, however, that it is rather experimental,
+ will not work at all for some ``.py`` files and packages, and will
+ heavily slow down your imports due to search and compilation.
+ Use at your own risk.
+
+ :param build_dir: By default, compiled modules will end up in a ``.pyxbld``
+ directory in the user's home directory. Passing a different path
+ as ``build_dir`` will override this.
+
+ :param build_in_temp: If ``False``, will produce the C files locally. Working
+ with complex dependencies and debugging becomes more easy. This
+ can principally interfere with existing files of the same name.
+
+ :param setup_args: Dict of arguments for Distribution.
+ See ``distutils.core.setup()``.
+
+ :param reload_support: Enables support for dynamic
+ ``reload(my_module)``, e.g. after a change in the Cython code.
+ Additional files ``<so_path>.reloadNN`` may arise on that account, when
+ the previously loaded module file cannot be overwritten.
+
+ :param load_py_module_on_import_failure: If the compilation of a ``.py``
+ file succeeds, but the subsequent import fails for some reason,
+ retry the import with the normal ``.py`` module instead of the
+ compiled module. Note that this may lead to unpredictable results
+ for modules that change the system state during their import, as
+ the second import will rerun these modifications in whatever state
+ the system was left after the import of the compiled module
+ failed.
+
+ :param inplace: Install the compiled module
+ (``.so`` for Linux and Mac / ``.pyd`` for Windows)
+ next to the source file.
+
+ :param language_level: The source language level to use: 2 or 3.
+ The default is to use the language level of the current Python
+ runtime for .py files and Py2 for ``.pyx`` files.
+ """
+ if setup_args is None:
+ setup_args = {}
+ if not build_dir:
+ build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld')
+
+ global pyxargs
+ pyxargs = PyxArgs() #$pycheck_no
+ pyxargs.build_dir = build_dir
+ pyxargs.build_in_temp = build_in_temp
+ pyxargs.setup_args = (setup_args or {}).copy()
+ pyxargs.reload_support = reload_support
+ pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
+
+ has_py_importer, has_pyx_importer = _have_importers()
+ py_importer, pyx_importer = None, None
+
+ if pyimport and not has_py_importer:
+ py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace,
+ language_level=language_level)
+ # make sure we import Cython before we install the import hook
+ import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize
+ sys.meta_path.insert(0, py_importer)
+
+ if pyximport and not has_pyx_importer:
+ pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace,
+ language_level=language_level)
+ sys.meta_path.append(pyx_importer)
+
+ return py_importer, pyx_importer
+
+
+def uninstall(py_importer, pyx_importer):
+ """
+ Uninstall an import hook.
+ """
+ try:
+ sys.meta_path.remove(py_importer)
+ except ValueError:
+ pass
+
+ try:
+ sys.meta_path.remove(pyx_importer)
+ except ValueError:
+ pass
+
+
+# MAIN
+
+def show_docs():
+ import __main__
+ __main__.__name__ = mod_name
+ for name in dir(__main__):
+ item = getattr(__main__, name)
+ try:
+ setattr(item, "__module__", mod_name)
+ except (AttributeError, TypeError):
+ pass
+ help(__main__)
+
+
+if __name__ == '__main__':
+ show_docs()
diff --git a/pyximport/_pyximport3.py b/pyximport/_pyximport3.py
new file mode 100644
index 000000000..4fa811f8a
--- /dev/null
+++ b/pyximport/_pyximport3.py
@@ -0,0 +1,478 @@
+"""
+Import hooks; when installed with the install() function, these hooks
+allow importing .pyx files as if they were Python modules.
+
+If you want the hook installed every time you run Python
+you can add it to your Python version by adding these lines to
+sitecustomize.py (which you can create from scratch in site-packages
+if it doesn't exist there or somewhere else on your python path)::
+
+ import pyximport
+ pyximport.install()
+
+For instance on the Mac with a non-system Python 2.3, you could create
+sitecustomize.py with only those two lines at
+/usr/local/lib/python2.3/site-packages/sitecustomize.py .
+
+A custom distutils.core.Extension instance and setup() args
+(Distribution) for for the build can be defined by a <modulename>.pyxbld
+file like:
+
+# examplemod.pyxbld
+def make_ext(modname, pyxfilename):
+ from distutils.extension import Extension
+ return Extension(name = modname,
+ sources=[pyxfilename, 'hello.c'],
+ include_dirs=['/myinclude'] )
+def make_setup_args():
+ return dict(script_args=["--compiler=mingw32"])
+
+Extra dependencies can be defined by a <modulename>.pyxdep .
+See README.
+
+Since Cython 0.11, the :mod:`pyximport` module also has experimental
+compilation support for normal Python modules. This allows you to
+automatically run Cython on every .pyx and .py module that Python
+imports, including parts of the standard library and installed
+packages. Cython will still fail to compile a lot of Python modules,
+in which case the import mechanism will fall back to loading the
+Python source modules instead. The .py import mechanism is installed
+like this::
+
+ pyximport.install(pyimport = True)
+
+Running this module as a top-level script will run a test and then print
+the documentation.
+"""
+
+import glob
+import importlib
+import os
+import sys
+from importlib.abc import MetaPathFinder
+from importlib.machinery import ExtensionFileLoader, SourceFileLoader
+from importlib.util import spec_from_file_location
+
+mod_name = "pyximport"
+
+PY_EXT = ".py"
+PYX_EXT = ".pyx"
+PYXDEP_EXT = ".pyxdep"
+PYXBLD_EXT = ".pyxbld"
+
+DEBUG_IMPORT = False
+
+
+def _print(message, args):
+ if args:
+ message = message % args
+ print(message)
+
+
+def _debug(message, *args):
+ if DEBUG_IMPORT:
+ _print(message, args)
+
+
+def _info(message, *args):
+ _print(message, args)
+
+
+def load_source(file_path):
+ import importlib.util
+ from importlib.machinery import SourceFileLoader
+ spec = importlib.util.spec_from_file_location("XXXX", file_path, loader=SourceFileLoader("XXXX", file_path))
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ return module
+
+
+def get_distutils_extension(modname, pyxfilename, language_level=None):
+# try:
+# import hashlib
+# except ImportError:
+# import md5 as hashlib
+# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest()
+# modname = modname + extra
+ extension_mod,setup_args = handle_special_build(modname, pyxfilename)
+ if not extension_mod:
+ if not isinstance(pyxfilename, str):
+ # distutils is stupid in Py2 and requires exactly 'str'
+ # => encode accidentally coerced unicode strings back to str
+ pyxfilename = pyxfilename.encode(sys.getfilesystemencoding())
+ from distutils.extension import Extension
+ extension_mod = Extension(name = modname, sources=[pyxfilename])
+ if language_level is not None:
+ extension_mod.cython_directives = {'language_level': language_level}
+ return extension_mod,setup_args
+
+
+def handle_special_build(modname, pyxfilename):
+ special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT
+ ext = None
+ setup_args={}
+ if os.path.exists(special_build):
+ # globls = {}
+ # locs = {}
+ # execfile(special_build, globls, locs)
+ # ext = locs["make_ext"](modname, pyxfilename)
+ mod = load_source(special_build)
+ make_ext = getattr(mod,'make_ext',None)
+ if make_ext:
+ ext = make_ext(modname, pyxfilename)
+ assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build
+ make_setup_args = getattr(mod, 'make_setup_args',None)
+ if make_setup_args:
+ setup_args = make_setup_args()
+ assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict"
+ % special_build)
+ assert set or setup_args, ("neither make_ext nor make_setup_args %s"
+ % special_build)
+ ext.sources = [os.path.join(os.path.dirname(special_build), source)
+ for source in ext.sources]
+ return ext, setup_args
+
+
+def handle_dependencies(pyxfilename):
+ testing = '_test_files' in globals()
+ dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT
+
+ # by default let distutils decide whether to rebuild on its own
+ # (it has a better idea of what the output file will be)
+
+ # but we know more about dependencies so force a rebuild if
+ # some of the dependencies are newer than the pyxfile.
+ if os.path.exists(dependfile):
+ with open(dependfile) as fid:
+ depends = fid.readlines()
+ depends = [depend.strip() for depend in depends]
+
+ # gather dependencies in the "files" variable
+ # the dependency file is itself a dependency
+ files = [dependfile]
+ for depend in depends:
+ fullpath = os.path.join(os.path.dirname(dependfile),
+ depend)
+ files.extend(glob.glob(fullpath))
+
+ # only for unit testing to see we did the right thing
+ if testing:
+ _test_files[:] = [] #$pycheck_no
+
+ # if any file that the pyxfile depends upon is newer than
+ # the pyx file, 'touch' the pyx file so that distutils will
+ # be tricked into rebuilding it.
+ for file in files:
+ from distutils.dep_util import newer
+ if newer(file, pyxfilename):
+ _debug("Rebuilding %s because of %s", pyxfilename, file)
+ filetime = os.path.getmtime(file)
+ os.utime(pyxfilename, (filetime, filetime))
+ if testing:
+ _test_files.append(file)
+
+
+def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None):
+ assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename
+ handle_dependencies(pyxfilename)
+
+ extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level)
+ build_in_temp = pyxargs.build_in_temp
+ sargs = pyxargs.setup_args.copy()
+ sargs.update(setup_args)
+ build_in_temp = sargs.pop('build_in_temp',build_in_temp)
+
+ from . import pyxbuild
+ olddir = os.getcwd()
+ common = ''
+ if pyxbuild_dir:
+ # Windows concantenates the pyxbuild_dir to the pyxfilename when
+ # compiling, and then complains that the filename is too long
+ common = os.path.commonprefix([pyxbuild_dir, pyxfilename])
+ if len(common) > 30:
+ pyxfilename = os.path.relpath(pyxfilename)
+ pyxbuild_dir = os.path.relpath(pyxbuild_dir)
+ os.chdir(common)
+ try:
+ so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
+ build_in_temp=build_in_temp,
+ pyxbuild_dir=pyxbuild_dir,
+ setup_args=sargs,
+ inplace=inplace,
+ reload_support=pyxargs.reload_support)
+ finally:
+ os.chdir(olddir)
+ so_path = os.path.join(common, so_path)
+ assert os.path.exists(so_path), "Cannot find: %s" % so_path
+
+ junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
+ junkstuff = glob.glob(junkpath)
+ for path in junkstuff:
+ if path != so_path:
+ try:
+ os.remove(path)
+ except IOError:
+ _info("Couldn't remove %s", path)
+
+ return so_path
+
+
+# import hooks
+
+class PyxImportMetaFinder(MetaPathFinder):
+
+ def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, language_level=None):
+ self.pyxbuild_dir = pyxbuild_dir
+ self.inplace = inplace
+ self.language_level = language_level
+ self.extension = extension
+
+ def find_spec(self, fullname, path, target=None):
+ if not path:
+ path = [os.getcwd()] # top level import --
+ if "." in fullname:
+ *parents, name = fullname.split(".")
+ else:
+ name = fullname
+ for entry in path:
+ if os.path.isdir(os.path.join(entry, name)):
+ # this module has child modules
+ filename = os.path.join(entry, name, "__init__" + self.extension)
+ submodule_locations = [os.path.join(entry, name)]
+ else:
+ filename = os.path.join(entry, name + self.extension)
+ submodule_locations = None
+ if not os.path.exists(filename):
+ continue
+
+ return spec_from_file_location(
+ fullname, filename,
+ loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level),
+ submodule_search_locations=submodule_locations)
+
+ return None # we don't know how to import this
+
+
+class PyImportMetaFinder(MetaPathFinder):
+
+ def __init__(self, extension=PY_EXT, pyxbuild_dir=None, inplace=False, language_level=None):
+ self.pyxbuild_dir = pyxbuild_dir
+ self.inplace = inplace
+ self.language_level = language_level
+ self.extension = extension
+ self.uncompilable_modules = {}
+ self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild',
+ 'distutils', 'cython']
+ self.blocked_packages = ['Cython.', 'distutils.']
+
+ def find_spec(self, fullname, path, target=None):
+ if fullname in sys.modules:
+ return None
+ if any([fullname.startswith(pkg) for pkg in self.blocked_packages]):
+ return None
+ if fullname in self.blocked_modules:
+ # prevent infinite recursion
+ return None
+
+ self.blocked_modules.append(fullname)
+ name = fullname
+ if not path:
+ path = [os.getcwd()] # top level import --
+ try:
+ for entry in path:
+ if os.path.isdir(os.path.join(entry, name)):
+ # this module has child modules
+ filename = os.path.join(entry, name, "__init__" + self.extension)
+ submodule_locations = [os.path.join(entry, name)]
+ else:
+ filename = os.path.join(entry, name + self.extension)
+ submodule_locations = None
+ if not os.path.exists(filename):
+ continue
+
+ return spec_from_file_location(
+ fullname, filename,
+ loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level),
+ submodule_search_locations=submodule_locations)
+ finally:
+ self.blocked_modules.pop()
+
+ return None # we don't know how to import this
+
+
+class PyxImportLoader(ExtensionFileLoader):
+
+ def __init__(self, filename, pyxbuild_dir, inplace, language_level):
+ module_name = os.path.splitext(os.path.basename(filename))[0]
+ super().__init__(module_name, filename)
+ self._pyxbuild_dir = pyxbuild_dir
+ self._inplace = inplace
+ self._language_level = language_level
+
+ def create_module(self, spec):
+ try:
+ so_path = build_module(spec.name, pyxfilename=spec.origin, pyxbuild_dir=self._pyxbuild_dir,
+ inplace=self._inplace, language_level=self._language_level)
+ self.path = so_path
+ spec.origin = so_path
+ return super().create_module(spec)
+ except Exception as failure_exc:
+ _debug("Failed to load extension module: %r" % failure_exc)
+ if pyxargs.load_py_module_on_import_failure and spec.origin.endswith(PY_EXT):
+ spec = importlib.util.spec_from_file_location(spec.name, spec.origin,
+ loader=SourceFileLoader(spec.name, spec.origin))
+ mod = importlib.util.module_from_spec(spec)
+ assert mod.__file__ in (spec.origin, spec.origin + 'c', spec.origin + 'o'), (mod.__file__, spec.origin)
+ return mod
+ else:
+ tb = sys.exc_info()[2]
+ import traceback
+ exc = ImportError("Building module %s failed: %s" % (
+ spec.name, traceback.format_exception_only(*sys.exc_info()[:2])))
+ raise exc.with_traceback(tb)
+
+ def exec_module(self, module):
+ try:
+ return super().exec_module(module)
+ except Exception as failure_exc:
+ import traceback
+ _debug("Failed to load extension module: %r" % failure_exc)
+ raise ImportError("Executing module %s failed %s" % (
+ module.__file__, traceback.format_exception_only(*sys.exc_info()[:2])))
+
+
+#install args
+class PyxArgs(object):
+ build_dir=True
+ build_in_temp=True
+ setup_args={} #None
+
+
+def _have_importers():
+ has_py_importer = False
+ has_pyx_importer = False
+ for importer in sys.meta_path:
+ if isinstance(importer, PyxImportMetaFinder):
+ if isinstance(importer, PyImportMetaFinder):
+ has_py_importer = True
+ else:
+ has_pyx_importer = True
+
+ return has_py_importer, has_pyx_importer
+
+
+def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
+ setup_args=None, reload_support=False,
+ load_py_module_on_import_failure=False, inplace=False,
+ language_level=None):
+ """ Main entry point for pyxinstall.
+
+ Call this to install the ``.pyx`` import hook in
+ your meta-path for a single Python process. If you want it to be
+ installed whenever you use Python, add it to your ``sitecustomize``
+ (as described above).
+
+ :param pyximport: If set to False, does not try to import ``.pyx`` files.
+
+ :param pyimport: You can pass ``pyimport=True`` to also
+ install the ``.py`` import hook
+ in your meta-path. Note, however, that it is rather experimental,
+ will not work at all for some ``.py`` files and packages, and will
+ heavily slow down your imports due to search and compilation.
+ Use at your own risk.
+
+ :param build_dir: By default, compiled modules will end up in a ``.pyxbld``
+ directory in the user's home directory. Passing a different path
+ as ``build_dir`` will override this.
+
+ :param build_in_temp: If ``False``, will produce the C files locally. Working
+ with complex dependencies and debugging becomes more easy. This
+ can principally interfere with existing files of the same name.
+
+ :param setup_args: Dict of arguments for Distribution.
+ See ``distutils.core.setup()``.
+
+ :param reload_support: Enables support for dynamic
+ ``reload(my_module)``, e.g. after a change in the Cython code.
+ Additional files ``<so_path>.reloadNN`` may arise on that account, when
+ the previously loaded module file cannot be overwritten.
+
+ :param load_py_module_on_import_failure: If the compilation of a ``.py``
+ file succeeds, but the subsequent import fails for some reason,
+ retry the import with the normal ``.py`` module instead of the
+ compiled module. Note that this may lead to unpredictable results
+ for modules that change the system state during their import, as
+ the second import will rerun these modifications in whatever state
+ the system was left after the import of the compiled module
+ failed.
+
+ :param inplace: Install the compiled module
+ (``.so`` for Linux and Mac / ``.pyd`` for Windows)
+ next to the source file.
+
+ :param language_level: The source language level to use: 2 or 3.
+ The default is to use the language level of the current Python
+ runtime for .py files and Py2 for ``.pyx`` files.
+ """
+ if setup_args is None:
+ setup_args = {}
+ if not build_dir:
+ build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld')
+
+ global pyxargs
+ pyxargs = PyxArgs() #$pycheck_no
+ pyxargs.build_dir = build_dir
+ pyxargs.build_in_temp = build_in_temp
+ pyxargs.setup_args = (setup_args or {}).copy()
+ pyxargs.reload_support = reload_support
+ pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
+
+ has_py_importer, has_pyx_importer = _have_importers()
+ py_importer, pyx_importer = None, None
+
+ if pyimport and not has_py_importer:
+ py_importer = PyImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace,
+ language_level=language_level)
+ # make sure we import Cython before we install the import hook
+ import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize
+ sys.meta_path.insert(0, py_importer)
+
+ if pyximport and not has_pyx_importer:
+ pyx_importer = PyxImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace,
+ language_level=language_level)
+ sys.meta_path.append(pyx_importer)
+
+ return py_importer, pyx_importer
+
+
+def uninstall(py_importer, pyx_importer):
+ """
+ Uninstall an import hook.
+ """
+ try:
+ sys.meta_path.remove(py_importer)
+ except ValueError:
+ pass
+
+ try:
+ sys.meta_path.remove(pyx_importer)
+ except ValueError:
+ pass
+
+
+# MAIN
+
+def show_docs():
+ import __main__
+ __main__.__name__ = mod_name
+ for name in dir(__main__):
+ item = getattr(__main__, name)
+ try:
+ setattr(item, "__module__", mod_name)
+ except (AttributeError, TypeError):
+ pass
+ help(__main__)
+
+
+if __name__ == '__main__':
+ show_docs()
diff --git a/pyximport/pyximport.py b/pyximport/pyximport.py
index b2077826a..9d575815a 100644
--- a/pyximport/pyximport.py
+++ b/pyximport/pyximport.py
@@ -1,606 +1,11 @@
-"""
-Import hooks; when installed with the install() function, these hooks
-allow importing .pyx files as if they were Python modules.
-
-If you want the hook installed every time you run Python
-you can add it to your Python version by adding these lines to
-sitecustomize.py (which you can create from scratch in site-packages
-if it doesn't exist there or somewhere else on your python path)::
-
- import pyximport
- pyximport.install()
-
-For instance on the Mac with a non-system Python 2.3, you could create
-sitecustomize.py with only those two lines at
-/usr/local/lib/python2.3/site-packages/sitecustomize.py .
-
-A custom distutils.core.Extension instance and setup() args
-(Distribution) for for the build can be defined by a <modulename>.pyxbld
-file like:
-
-# examplemod.pyxbld
-def make_ext(modname, pyxfilename):
- from distutils.extension import Extension
- return Extension(name = modname,
- sources=[pyxfilename, 'hello.c'],
- include_dirs=['/myinclude'] )
-def make_setup_args():
- return dict(script_args=["--compiler=mingw32"])
-
-Extra dependencies can be defined by a <modulename>.pyxdep .
-See README.
-
-Since Cython 0.11, the :mod:`pyximport` module also has experimental
-compilation support for normal Python modules. This allows you to
-automatically run Cython on every .pyx and .py module that Python
-imports, including parts of the standard library and installed
-packages. Cython will still fail to compile a lot of Python modules,
-in which case the import mechanism will fall back to loading the
-Python source modules instead. The .py import mechanism is installed
-like this::
-
- pyximport.install(pyimport = True)
-
-Running this module as a top-level script will run a test and then print
-the documentation.
-
-This code is based on the Py2.3+ import protocol as described in PEP 302.
-"""
-
-import glob
-import imp
-import os
+from __future__ import absolute_import
import sys
-from zipimport import zipimporter, ZipImportError
-
-mod_name = "pyximport"
-
-PYX_EXT = ".pyx"
-PYXDEP_EXT = ".pyxdep"
-PYXBLD_EXT = ".pyxbld"
-
-DEBUG_IMPORT = False
-
-
-def _print(message, args):
- if args:
- message = message % args
- print(message)
-
-
-def _debug(message, *args):
- if DEBUG_IMPORT:
- _print(message, args)
-
-
-def _info(message, *args):
- _print(message, args)
-
-
-# Performance problem: for every PYX file that is imported, we will
-# invoke the whole distutils infrastructure even if the module is
-# already built. It might be more efficient to only do it when the
-# mod time of the .pyx is newer than the mod time of the .so but
-# the question is how to get distutils to tell me the name of the .so
-# before it builds it. Maybe it is easy...but maybe the performance
-# issue isn't real.
-def _load_pyrex(name, filename):
- "Load a pyrex file given a name and filename."
-
-
-def get_distutils_extension(modname, pyxfilename, language_level=None):
-# try:
-# import hashlib
-# except ImportError:
-# import md5 as hashlib
-# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest()
-# modname = modname + extra
- extension_mod,setup_args = handle_special_build(modname, pyxfilename)
- if not extension_mod:
- if not isinstance(pyxfilename, str):
- # distutils is stupid in Py2 and requires exactly 'str'
- # => encode accidentally coerced unicode strings back to str
- pyxfilename = pyxfilename.encode(sys.getfilesystemencoding())
- from distutils.extension import Extension
- extension_mod = Extension(name = modname, sources=[pyxfilename])
- if language_level is not None:
- extension_mod.cython_directives = {'language_level': language_level}
- return extension_mod,setup_args
-
-
-def handle_special_build(modname, pyxfilename):
- special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT
- ext = None
- setup_args={}
- if os.path.exists(special_build):
- # globls = {}
- # locs = {}
- # execfile(special_build, globls, locs)
- # ext = locs["make_ext"](modname, pyxfilename)
- with open(special_build) as fid:
- mod = imp.load_source("XXXX", special_build, fid)
- make_ext = getattr(mod,'make_ext',None)
- if make_ext:
- ext = make_ext(modname, pyxfilename)
- assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build
- make_setup_args = getattr(mod, 'make_setup_args',None)
- if make_setup_args:
- setup_args = make_setup_args()
- assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict"
- % special_build)
- assert set or setup_args, ("neither make_ext nor make_setup_args %s"
- % special_build)
- ext.sources = [os.path.join(os.path.dirname(special_build), source)
- for source in ext.sources]
- return ext, setup_args
-
-
-def handle_dependencies(pyxfilename):
- testing = '_test_files' in globals()
- dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT
-
- # by default let distutils decide whether to rebuild on its own
- # (it has a better idea of what the output file will be)
-
- # but we know more about dependencies so force a rebuild if
- # some of the dependencies are newer than the pyxfile.
- if os.path.exists(dependfile):
- with open(dependfile) as fid:
- depends = fid.readlines()
- depends = [depend.strip() for depend in depends]
-
- # gather dependencies in the "files" variable
- # the dependency file is itself a dependency
- files = [dependfile]
- for depend in depends:
- fullpath = os.path.join(os.path.dirname(dependfile),
- depend)
- files.extend(glob.glob(fullpath))
-
- # only for unit testing to see we did the right thing
- if testing:
- _test_files[:] = [] #$pycheck_no
-
- # if any file that the pyxfile depends upon is newer than
- # the pyx file, 'touch' the pyx file so that distutils will
- # be tricked into rebuilding it.
- for file in files:
- from distutils.dep_util import newer
- if newer(file, pyxfilename):
- _debug("Rebuilding %s because of %s", pyxfilename, file)
- filetime = os.path.getmtime(file)
- os.utime(pyxfilename, (filetime, filetime))
- if testing:
- _test_files.append(file)
-
-
-def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None):
- assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename
- handle_dependencies(pyxfilename)
-
- extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level)
- build_in_temp = pyxargs.build_in_temp
- sargs = pyxargs.setup_args.copy()
- sargs.update(setup_args)
- build_in_temp = sargs.pop('build_in_temp',build_in_temp)
-
- from . import pyxbuild
- so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
- build_in_temp=build_in_temp,
- pyxbuild_dir=pyxbuild_dir,
- setup_args=sargs,
- inplace=inplace,
- reload_support=pyxargs.reload_support)
- assert os.path.exists(so_path), "Cannot find: %s" % so_path
-
- junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
- junkstuff = glob.glob(junkpath)
- for path in junkstuff:
- if path != so_path:
- try:
- os.remove(path)
- except IOError:
- _info("Couldn't remove %s", path)
-
- return so_path
-
-
-def load_module(name, pyxfilename, pyxbuild_dir=None, is_package=False,
- build_inplace=False, language_level=None, so_path=None):
- try:
- if so_path is None:
- if is_package:
- module_name = name + '.__init__'
- else:
- module_name = name
- so_path = build_module(module_name, pyxfilename, pyxbuild_dir,
- inplace=build_inplace, language_level=language_level)
- mod = imp.load_dynamic(name, so_path)
- if is_package and not hasattr(mod, '__path__'):
- mod.__path__ = [os.path.dirname(so_path)]
- assert mod.__file__ == so_path, (mod.__file__, so_path)
- except Exception as failure_exc:
- _debug("Failed to load extension module: %r" % failure_exc)
- if pyxargs.load_py_module_on_import_failure and pyxfilename.endswith('.py'):
- # try to fall back to normal import
- mod = imp.load_source(name, pyxfilename)
- assert mod.__file__ in (pyxfilename, pyxfilename+'c', pyxfilename+'o'), (mod.__file__, pyxfilename)
- else:
- tb = sys.exc_info()[2]
- import traceback
- exc = ImportError("Building module %s failed: %s" % (
- name, traceback.format_exception_only(*sys.exc_info()[:2])))
- if sys.version_info[0] >= 3:
- raise exc.with_traceback(tb)
- else:
- exec("raise exc, None, tb", {'exc': exc, 'tb': tb})
- return mod
-
-
-# import hooks
-
-class PyxImporter(object):
- """A meta-path importer for .pyx files.
- """
- def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False,
- language_level=None):
- self.extension = extension
- self.pyxbuild_dir = pyxbuild_dir
- self.inplace = inplace
- self.language_level = language_level
-
- def find_module(self, fullname, package_path=None):
- if fullname in sys.modules and not pyxargs.reload_support:
- return None # only here when reload()
-
- # package_path might be a _NamespacePath. Convert that into a list...
- if package_path is not None and not isinstance(package_path, list):
- package_path = list(package_path)
- try:
- fp, pathname, (ext,mode,ty) = imp.find_module(fullname,package_path)
- if fp: fp.close() # Python should offer a Default-Loader to avoid this double find/open!
- if pathname and ty == imp.PKG_DIRECTORY:
- pkg_file = os.path.join(pathname, '__init__'+self.extension)
- if os.path.isfile(pkg_file):
- return PyxLoader(fullname, pathname,
- init_path=pkg_file,
- pyxbuild_dir=self.pyxbuild_dir,
- inplace=self.inplace,
- language_level=self.language_level)
- if pathname and pathname.endswith(self.extension):
- return PyxLoader(fullname, pathname,
- pyxbuild_dir=self.pyxbuild_dir,
- inplace=self.inplace,
- language_level=self.language_level)
- if ty != imp.C_EXTENSION: # only when an extension, check if we have a .pyx next!
- return None
-
- # find .pyx fast, when .so/.pyd exist --inplace
- pyxpath = os.path.splitext(pathname)[0]+self.extension
- if os.path.isfile(pyxpath):
- return PyxLoader(fullname, pyxpath,
- pyxbuild_dir=self.pyxbuild_dir,
- inplace=self.inplace,
- language_level=self.language_level)
-
- # .so/.pyd's on PATH should not be remote from .pyx's
- # think no need to implement PyxArgs.importer_search_remote here?
-
- except ImportError:
- pass
-
- # searching sys.path ...
-
- #if DEBUG_IMPORT: print "SEARCHING", fullname, package_path
-
- mod_parts = fullname.split('.')
- module_name = mod_parts[-1]
- pyx_module_name = module_name + self.extension
-
- # this may work, but it returns the file content, not its path
- #import pkgutil
- #pyx_source = pkgutil.get_data(package, pyx_module_name)
-
- paths = package_path or sys.path
- for path in paths:
- pyx_data = None
- if not path:
- path = os.getcwd()
- elif os.path.isfile(path):
- try:
- zi = zipimporter(path)
- pyx_data = zi.get_data(pyx_module_name)
- except (ZipImportError, IOError, OSError):
- continue # Module not found.
- # unzip the imported file into the build dir
- # FIXME: can interfere with later imports if build dir is in sys.path and comes before zip file
- path = self.pyxbuild_dir
- elif not os.path.isabs(path):
- path = os.path.abspath(path)
-
- pyx_module_path = os.path.join(path, pyx_module_name)
- if pyx_data is not None:
- if not os.path.exists(path):
- try:
- os.makedirs(path)
- except OSError:
- # concurrency issue?
- if not os.path.exists(path):
- raise
- with open(pyx_module_path, "wb") as f:
- f.write(pyx_data)
- elif not os.path.isfile(pyx_module_path):
- continue # Module not found.
-
- return PyxLoader(fullname, pyx_module_path,
- pyxbuild_dir=self.pyxbuild_dir,
- inplace=self.inplace,
- language_level=self.language_level)
-
- # not found, normal package, not a .pyx file, none of our business
- _debug("%s not found" % fullname)
- return None
-
-
-class PyImporter(PyxImporter):
- """A meta-path importer for normal .py files.
- """
- def __init__(self, pyxbuild_dir=None, inplace=False, language_level=None):
- if language_level is None:
- language_level = sys.version_info[0]
- self.super = super(PyImporter, self)
- self.super.__init__(extension='.py', pyxbuild_dir=pyxbuild_dir, inplace=inplace,
- language_level=language_level)
- self.uncompilable_modules = {}
- self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild',
- 'distutils']
- self.blocked_packages = ['Cython.', 'distutils.']
-
- def find_module(self, fullname, package_path=None):
- if fullname in sys.modules:
- return None
- if any([fullname.startswith(pkg) for pkg in self.blocked_packages]):
- return None
- if fullname in self.blocked_modules:
- # prevent infinite recursion
- return None
- if _lib_loader.knows(fullname):
- return _lib_loader
- _debug("trying import of module '%s'", fullname)
- if fullname in self.uncompilable_modules:
- path, last_modified = self.uncompilable_modules[fullname]
- try:
- new_last_modified = os.stat(path).st_mtime
- if new_last_modified > last_modified:
- # import would fail again
- return None
- except OSError:
- # module is no longer where we found it, retry the import
- pass
-
- self.blocked_modules.append(fullname)
- try:
- importer = self.super.find_module(fullname, package_path)
- if importer is not None:
- if importer.init_path:
- path = importer.init_path
- real_name = fullname + '.__init__'
- else:
- path = importer.path
- real_name = fullname
- _debug("importer found path %s for module %s", path, real_name)
- try:
- so_path = build_module(
- real_name, path,
- pyxbuild_dir=self.pyxbuild_dir,
- language_level=self.language_level,
- inplace=self.inplace)
- _lib_loader.add_lib(fullname, path, so_path,
- is_package=bool(importer.init_path))
- return _lib_loader
- except Exception:
- if DEBUG_IMPORT:
- import traceback
- traceback.print_exc()
- # build failed, not a compilable Python module
- try:
- last_modified = os.stat(path).st_mtime
- except OSError:
- last_modified = 0
- self.uncompilable_modules[fullname] = (path, last_modified)
- importer = None
- finally:
- self.blocked_modules.pop()
- return importer
-
-
-class LibLoader(object):
- def __init__(self):
- self._libs = {}
-
- def load_module(self, fullname):
- try:
- source_path, so_path, is_package = self._libs[fullname]
- except KeyError:
- raise ValueError("invalid module %s" % fullname)
- _debug("Loading shared library module '%s' from %s", fullname, so_path)
- return load_module(fullname, source_path, so_path=so_path, is_package=is_package)
-
- def add_lib(self, fullname, path, so_path, is_package):
- self._libs[fullname] = (path, so_path, is_package)
-
- def knows(self, fullname):
- return fullname in self._libs
-
-_lib_loader = LibLoader()
-
-
-class PyxLoader(object):
- def __init__(self, fullname, path, init_path=None, pyxbuild_dir=None,
- inplace=False, language_level=None):
- _debug("PyxLoader created for loading %s from %s (init path: %s)",
- fullname, path, init_path)
- self.fullname = fullname
- self.path, self.init_path = path, init_path
- self.pyxbuild_dir = pyxbuild_dir
- self.inplace = inplace
- self.language_level = language_level
-
- def load_module(self, fullname):
- assert self.fullname == fullname, (
- "invalid module, expected %s, got %s" % (
- self.fullname, fullname))
- if self.init_path:
- # package
- #print "PACKAGE", fullname
- module = load_module(fullname, self.init_path,
- self.pyxbuild_dir, is_package=True,
- build_inplace=self.inplace,
- language_level=self.language_level)
- module.__path__ = [self.path]
- else:
- #print "MODULE", fullname
- module = load_module(fullname, self.path,
- self.pyxbuild_dir,
- build_inplace=self.inplace,
- language_level=self.language_level)
- return module
-
-
-#install args
-class PyxArgs(object):
- build_dir=True
- build_in_temp=True
- setup_args={} #None
-
-##pyxargs=None
-
-
-def _have_importers():
- has_py_importer = False
- has_pyx_importer = False
- for importer in sys.meta_path:
- if isinstance(importer, PyxImporter):
- if isinstance(importer, PyImporter):
- has_py_importer = True
- else:
- has_pyx_importer = True
-
- return has_py_importer, has_pyx_importer
-
-
-def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
- setup_args=None, reload_support=False,
- load_py_module_on_import_failure=False, inplace=False,
- language_level=None):
- """ Main entry point for pyxinstall.
-
- Call this to install the ``.pyx`` import hook in
- your meta-path for a single Python process. If you want it to be
- installed whenever you use Python, add it to your ``sitecustomize``
- (as described above).
-
- :param pyximport: If set to False, does not try to import ``.pyx`` files.
-
- :param pyimport: You can pass ``pyimport=True`` to also
- install the ``.py`` import hook
- in your meta-path. Note, however, that it is rather experimental,
- will not work at all for some ``.py`` files and packages, and will
- heavily slow down your imports due to search and compilation.
- Use at your own risk.
-
- :param build_dir: By default, compiled modules will end up in a ``.pyxbld``
- directory in the user's home directory. Passing a different path
- as ``build_dir`` will override this.
-
- :param build_in_temp: If ``False``, will produce the C files locally. Working
- with complex dependencies and debugging becomes more easy. This
- can principally interfere with existing files of the same name.
-
- :param setup_args: Dict of arguments for Distribution.
- See ``distutils.core.setup()``.
-
- :param reload_support: Enables support for dynamic
- ``reload(my_module)``, e.g. after a change in the Cython code.
- Additional files ``<so_path>.reloadNN`` may arise on that account, when
- the previously loaded module file cannot be overwritten.
-
- :param load_py_module_on_import_failure: If the compilation of a ``.py``
- file succeeds, but the subsequent import fails for some reason,
- retry the import with the normal ``.py`` module instead of the
- compiled module. Note that this may lead to unpredictable results
- for modules that change the system state during their import, as
- the second import will rerun these modifications in whatever state
- the system was left after the import of the compiled module
- failed.
-
- :param inplace: Install the compiled module
- (``.so`` for Linux and Mac / ``.pyd`` for Windows)
- next to the source file.
-
- :param language_level: The source language level to use: 2 or 3.
- The default is to use the language level of the current Python
- runtime for .py files and Py2 for ``.pyx`` files.
- """
- if setup_args is None:
- setup_args = {}
- if not build_dir:
- build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld')
-
- global pyxargs
- pyxargs = PyxArgs() #$pycheck_no
- pyxargs.build_dir = build_dir
- pyxargs.build_in_temp = build_in_temp
- pyxargs.setup_args = (setup_args or {}).copy()
- pyxargs.reload_support = reload_support
- pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
-
- has_py_importer, has_pyx_importer = _have_importers()
- py_importer, pyx_importer = None, None
-
- if pyimport and not has_py_importer:
- py_importer = PyImporter(pyxbuild_dir=build_dir, inplace=inplace,
- language_level=language_level)
- # make sure we import Cython before we install the import hook
- import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize
- sys.meta_path.insert(0, py_importer)
-
- if pyximport and not has_pyx_importer:
- pyx_importer = PyxImporter(pyxbuild_dir=build_dir, inplace=inplace,
- language_level=language_level)
- sys.meta_path.append(pyx_importer)
-
- return py_importer, pyx_importer
-
-
-def uninstall(py_importer, pyx_importer):
- """
- Uninstall an import hook.
- """
- try:
- sys.meta_path.remove(py_importer)
- except ValueError:
- pass
-
- try:
- sys.meta_path.remove(pyx_importer)
- except ValueError:
- pass
-
-
-# MAIN
-
-def show_docs():
- import __main__
- __main__.__name__ = mod_name
- for name in dir(__main__):
- item = getattr(__main__, name)
- try:
- setattr(item, "__module__", mod_name)
- except (AttributeError, TypeError):
- pass
- help(__main__)
+if sys.version_info < (3, 5):
+ # _pyximport3 module requires at least Python 3.5
+ from pyximport._pyximport2 import install, uninstall, show_docs
+else:
+ from pyximport._pyximport3 import install, uninstall, show_docs
if __name__ == '__main__':
show_docs()
diff --git a/runtests.py b/runtests.py
index 72608882a..7b0b74c30 100755
--- a/runtests.py
+++ b/runtests.py
@@ -294,57 +294,60 @@ def update_openmp_extension(ext):
return EXCLUDE_EXT
-def update_cpp11_extension(ext):
- """
- update cpp11 extensions that will run on versions of gcc >4.8
- """
- gcc_version = get_gcc_version(ext.language)
- already_has_std = any(ca for ca in ext.extra_compile_args if "-std" in ca)
- if gcc_version:
- compiler_version = gcc_version.group(1)
- if float(compiler_version) > 4.8 and not already_has_std:
- ext.extra_compile_args.append("-std=c++11")
- return ext
+def update_cpp_extension(cpp_std, min_gcc_version=None, min_clang_version=None, min_macos_version=None):
+ def _update_cpp_extension(ext):
+ """
+ Update cpp[cpp_std] extensions that will run on minimum versions of gcc / clang / macos.
+ """
+ # If the extension provides a -std=... option, assume that whatever C compiler we use
+ # will probably be ok with it.
+ already_has_std = any(
+ ca for ca in ext.extra_compile_args
+ if "-std" in ca and "-stdlib" not in ca
+ )
+ use_gcc = use_clang = already_has_std
- clang_version = get_clang_version(ext.language)
- if clang_version:
- if not already_has_std:
- ext.extra_compile_args.append("-std=c++11")
- if sys.platform == "darwin":
- ext.extra_compile_args.append("-stdlib=libc++")
- ext.extra_compile_args.append("-mmacosx-version-min=10.7")
- return ext
+ # check for a usable gcc version
+ gcc_version = get_gcc_version(ext.language)
+ if gcc_version:
+ if cpp_std >= 17 and sys.version_info[0] < 3:
+ # The Python 2.7 headers contain the 'register' modifier
+ # which gcc warns about in C++17 mode.
+ ext.extra_compile_args.append('-Wno-register')
+ if not already_has_std:
+ compiler_version = gcc_version.group(1)
+ if not min_gcc_version or float(compiler_version) >= float(min_gcc_version):
+ use_gcc = True
+ ext.extra_compile_args.append("-std=c++%s" % cpp_std)
+
+ if use_gcc:
+ return ext
- return EXCLUDE_EXT
+ # check for a usable clang version
+ clang_version = get_clang_version(ext.language)
+ if clang_version:
+ if cpp_std >= 17 and sys.version_info[0] < 3:
+ # The Python 2.7 headers contain the 'register' modifier
+ # which clang warns about in C++17 mode.
+ ext.extra_compile_args.append('-Wno-register')
+ if not already_has_std:
+ compiler_version = clang_version.group(1)
+ if not min_clang_version or float(compiler_version) >= float(min_clang_version):
+ use_clang = True
+ ext.extra_compile_args.append("-std=c++%s" % cpp_std)
+ if sys.platform == "darwin":
+ ext.extra_compile_args.append("-stdlib=libc++")
+ if min_macos_version is not None:
+ ext.extra_compile_args.append("-mmacosx-version-min=" + min_macos_version)
+
+ if use_clang:
+ return ext
-def update_cpp17_extension(ext):
- """
- update cpp17 extensions that will run on versions of gcc >=5.0
- """
- gcc_version = get_gcc_version(ext.language)
- if gcc_version:
- compiler_version = gcc_version.group(1)
- if sys.version_info[0] < 3:
- # The Python 2.7 headers contain the 'register' modifier
- # which gcc warns about in C++17 mode.
- ext.extra_compile_args.append('-Wno-register')
- if float(compiler_version) >= 5.0:
- ext.extra_compile_args.append("-std=c++17")
- return ext
+ # no usable C compiler found => exclude the extension
+ return EXCLUDE_EXT
- clang_version = get_clang_version(ext.language)
- if clang_version:
- ext.extra_compile_args.append("-std=c++17")
- if sys.version_info[0] < 3:
- # The Python 2.7 headers contain the 'register' modifier
- # which clang warns about in C++17 mode.
- ext.extra_compile_args.append('-Wno-register')
- if sys.platform == "darwin":
- ext.extra_compile_args.append("-stdlib=libc++")
- ext.extra_compile_args.append("-mmacosx-version-min=10.13")
- return ext
+ return _update_cpp_extension
- return EXCLUDE_EXT
def require_gcc(version):
def check(ext):
@@ -438,8 +441,9 @@ EXT_EXTRAS = {
'tag:numpy' : update_numpy_extension,
'tag:openmp': update_openmp_extension,
'tag:gdb': update_gdb_extension,
- 'tag:cpp11': update_cpp11_extension,
- 'tag:cpp17': update_cpp17_extension,
+ 'tag:cpp11': update_cpp_extension(11, min_gcc_version="4.9", min_macos_version="10.7"),
+ 'tag:cpp17': update_cpp_extension(17, min_gcc_version="5.0", min_macos_version="10.13"),
+ 'tag:cpp20': update_cpp_extension(20, min_gcc_version="11.0", min_clang_version="13.0", min_macos_version="10.13"),
'tag:trace' : update_linetrace_extension,
'tag:bytesformat': exclude_extension_in_pyver((3, 3), (3, 4)), # no %-bytes formatting
'tag:no-macos': exclude_extension_on_platform('darwin'),
@@ -467,6 +471,7 @@ VER_DEP_MODULES = {
'compile.extsetslice',
'compile.extdelslice',
'run.special_methods_T561_py2',
+ 'run.builtin_type_inheritance_T608_py2only',
]),
(3,3) : (operator.lt, lambda x: x in ['build.package_compilation',
'build.cythonize_pep420_namespace',
@@ -477,6 +482,7 @@ VER_DEP_MODULES = {
(3,4): (operator.lt, lambda x: x in ['run.py34_signature',
'run.test_unicode', # taken from Py3.7, difficult to backport
'run.pep442_tp_finalize',
+ 'run.pep442_tp_finalize_cimport',
]),
(3,4,999): (operator.gt, lambda x: x in ['run.initial_file_path',
]),
@@ -491,6 +497,7 @@ VER_DEP_MODULES = {
]),
(3,7): (operator.lt, lambda x: x in ['run.pycontextvar',
'run.pep557_dataclasses', # dataclasses module
+ 'run.test_dataclasses',
]),
}
@@ -1426,6 +1433,8 @@ class CythonCompileTestCase(unittest.TestCase):
def _match_output(self, expected_output, actual_output, write):
try:
for expected, actual in zip(expected_output, actual_output):
+ if expected != actual and '\\' in actual and os.sep == '\\' and '/' in expected and '\\' not in expected:
+ expected = expected.replace('/', '\\')
self.assertEqual(expected, actual)
if len(actual_output) < len(expected_output):
expected = expected_output[len(actual_output)]
@@ -1926,6 +1935,8 @@ class EndToEndTest(unittest.TestCase):
os.chdir(self.old_dir)
def _try_decode(self, content):
+ if not isinstance(content, bytes):
+ return content
try:
return content.decode()
except UnicodeDecodeError:
@@ -1965,6 +1976,10 @@ class EndToEndTest(unittest.TestCase):
for c, o, e in zip(cmd, out, err):
sys.stderr.write("[%d] %s\n%s\n%s\n\n" % (
self.shard_num, c, self._try_decode(o), self._try_decode(e)))
+ sys.stderr.write("Final directory layout of '%s':\n%s\n\n" % (
+ self.name,
+ '\n'.join(os.path.join(dirpath, filename) for dirpath, dirs, files in os.walk(".") for filename in files),
+ ))
self.assertEqual(0, res, "non-zero exit status, last output was:\n%r\n-- stdout:%s\n-- stderr:%s\n" % (
' '.join(command), self._try_decode(out[-1]), self._try_decode(err[-1])))
self.success = True
@@ -2531,12 +2546,17 @@ def configure_cython(options):
CompilationOptions, \
default_options as pyrex_default_options
from Cython.Compiler.Options import _directive_defaults as directive_defaults
+
from Cython.Compiler import Errors
Errors.LEVEL = 0 # show all warnings
+
from Cython.Compiler import Options
Options.generate_cleanup_code = 3 # complete cleanup code
+
from Cython.Compiler import DebugFlags
DebugFlags.debug_temp_code_comments = 1
+ DebugFlags.debug_no_exception_intercept = 1 # provide better crash output in CI runs
+
pyrex_default_options['formal_grammar'] = options.use_formal_grammar
if options.profile:
directive_defaults['profile'] = True
@@ -2701,7 +2721,8 @@ def runtests(options, cmd_args, coverage=None):
('graal_bugs.txt', IS_GRAAL),
('limited_api_bugs.txt', options.limited_api),
('windows_bugs.txt', sys.platform == 'win32'),
- ('cygwin_bugs.txt', sys.platform == 'cygwin')
+ ('cygwin_bugs.txt', sys.platform == 'cygwin'),
+ ('windows_bugs_39.txt', sys.platform == 'win32' and sys.version_info[:2] == (3, 9))
]
exclude_selectors += [
diff --git a/setup.py b/setup.py
index 8d5089c18..26beca2f4 100755
--- a/setup.py
+++ b/setup.py
@@ -94,16 +94,16 @@ def compile_cython_modules(profile=False, coverage=False, compile_minimal=False,
"Cython.Plex.Machines",
"Cython.Plex.Transitions",
"Cython.Plex.DFA",
+ "Cython.Compiler.Code",
"Cython.Compiler.FusedNode",
+ "Cython.Compiler.Parsing",
"Cython.Tempita._tempita",
"Cython.StringIOTree",
"Cython.Utils",
])
if compile_more and not compile_minimal:
compiled_modules.extend([
- "Cython.Compiler.Code",
"Cython.Compiler.Lexicon",
- "Cython.Compiler.Parsing",
"Cython.Compiler.Pythran",
"Cython.Build.Dependencies",
"Cython.Compiler.ParseTreeTransforms",
@@ -183,37 +183,27 @@ def compile_cython_modules(profile=False, coverage=False, compile_minimal=False,
setup_args['ext_modules'] = extensions
-cython_profile = '--cython-profile' in sys.argv
-if cython_profile:
- sys.argv.remove('--cython-profile')
+def check_option(name):
+ cli_arg = "--" + name
+ if cli_arg in sys.argv:
+ sys.argv.remove(cli_arg)
+ return True
-cython_coverage = '--cython-coverage' in sys.argv
-if cython_coverage:
- sys.argv.remove('--cython-coverage')
+ env_var = name.replace("-", "_").upper()
+ if os.environ.get(env_var) == "true":
+ return True
-try:
- sys.argv.remove("--cython-compile-all")
- cython_compile_more = True
-except ValueError:
- cython_compile_more = False
+ return False
-try:
- sys.argv.remove("--cython-compile-minimal")
- cython_compile_minimal = True
-except ValueError:
- cython_compile_minimal = False
-try:
- sys.argv.remove("--cython-with-refnanny")
- cython_with_refnanny = True
-except ValueError:
- cython_with_refnanny = False
+cython_profile = check_option('cython-profile')
+cython_coverage = check_option('cython-coverage')
+cython_with_refnanny = check_option('cython-with-refnanny')
-try:
- sys.argv.remove("--no-cython-compile")
- compile_cython_itself = False
-except ValueError:
- compile_cython_itself = True
+compile_cython_itself = not check_option('no-cython-compile')
+if compile_cython_itself:
+ cython_compile_more = check_option('cython-compile-all')
+ cython_compile_minimal = check_option('cython-compile-minimal')
setup_args.update(setuptools_extra_args)
@@ -283,7 +273,7 @@ def run_build():
.. _Pyrex: https://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/
"""),
- license='Apache',
+ license='Apache-2.0',
classifiers=[
dev_status(version),
"Intended Audience :: Developers",
@@ -308,6 +298,13 @@ def run_build():
"Topic :: Software Development :: Compilers",
"Topic :: Software Development :: Libraries :: Python Modules"
],
+ project_urls={
+ "Documentation": "https://cython.readthedocs.io/",
+ "Donate": "https://cython.readthedocs.io/en/latest/src/donating.html",
+ "Source Code": "https://github.com/cython/cython",
+ "Bug Tracker": "https://github.com/cython/cython/issues",
+ "User Group": "https://groups.google.com/g/cython-users",
+ },
scripts=scripts,
packages=packages,
diff --git a/test-requirements-27.txt b/test-requirements-27.txt
index a3ad0439e..b518c2570 100644
--- a/test-requirements-27.txt
+++ b/test-requirements-27.txt
@@ -26,6 +26,7 @@ jupyter-console==5.2.0
jupyter-core==4.6.3
line-profiler==3.1.0
MarkupSafe==1.1.1
+maturin==0.7.6; os_name == "nt" # actually 0.9.4, but it's not available; pywinpty dependency
mistune==0.8.4
nbconvert==5.6.1
nbformat==4.4.0
@@ -44,6 +45,7 @@ Pygments==2.5.2
pyparsing==2.4.7
pyrsistent==0.15.7
python-dateutil==2.8.1
+pywinpty==0.5.7 # terminado dependency (pywinpty>=0.5)
pyzmq==16.0.4
qtconsole==4.7.7
QtPy==1.9.0
@@ -60,3 +62,4 @@ wcwidth==0.2.5
webencodings==0.5.1
widgetsnbextension==3.5.1
zipp==1.2.0
+mock==3.0.5
diff --git a/test-requirements-34.txt b/test-requirements-34.txt
index 8697eff4b..8a48d1ae6 100644
--- a/test-requirements-34.txt
+++ b/test-requirements-34.txt
@@ -1,3 +1,3 @@
-numpy < 1.19.0
+numpy<1.16.0
coverage
pycodestyle
diff --git a/test-requirements-cpython.txt b/test-requirements-cpython.txt
index 1cfae040b..28db037b2 100644
--- a/test-requirements-cpython.txt
+++ b/test-requirements-cpython.txt
@@ -1,3 +1,4 @@
jupyter
+pytest # needed by IPython/Jupyter integration tests
line_profiler
setuptools<60
diff --git a/test-requirements-pypy27.txt b/test-requirements-pypy27.txt
new file mode 100644
index 000000000..6d4f83bca
--- /dev/null
+++ b/test-requirements-pypy27.txt
@@ -0,0 +1,3 @@
+-r test-requirements.txt
+enum34==1.1.10
+mock==3.0.5
diff --git a/tests/buffers/bufaccess.pyx b/tests/buffers/bufaccess.pyx
index 6b0b4ac30..3144f613d 100644
--- a/tests/buffers/bufaccess.pyx
+++ b/tests/buffers/bufaccess.pyx
@@ -10,7 +10,7 @@
from __future__ import unicode_literals
from cpython.object cimport PyObject
-from cpython.ref cimport Py_INCREF, Py_DECREF
+from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR
cimport cython
import sys
@@ -1005,6 +1005,46 @@ def assign_to_object(object[object] buf, int idx, obj):
buf[idx] = obj
@testcase
+def check_object_nulled_1d(MockBuffer[object, ndim=1] buf, int idx, obj):
+ """
+ See comments on printbuf_object above.
+
+ >>> a = object()
+ >>> rc1 = get_refcount(a)
+ >>> A = ObjectMockBuffer(None, [a, a])
+ >>> check_object_nulled_1d(A, 0, a)
+ >>> check_object_nulled_1d(A, 1, a)
+ >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,))
+ >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride
+ >>> get_refcount(a) == rc1
+ True
+ """
+ cdef PyObject **data = <PyObject **>buf.buffer
+ Py_CLEAR(data[idx])
+ res = buf[idx] # takes None
+ buf[idx] = obj
+ return res
+
+@testcase
+def check_object_nulled_2d(MockBuffer[object, ndim=2] buf, int idx1, int idx2, obj):
+ """
+ See comments on printbuf_object above.
+
+ >>> a = object()
+ >>> rc1 = get_refcount(a)
+ >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2))
+ >>> check_object_nulled_2d(A, 0, 0, a)
+ >>> check_object_nulled_2d(A, 1, 1, a)
+ >>> get_refcount(a) == rc1
+ True
+ """
+ cdef PyObject **data = <PyObject **>buf.buffer
+ Py_CLEAR(data[idx1 + 2*idx2])
+ res = buf[idx1, idx2] # takes None
+ buf[idx1, idx2] = obj
+ return res
+
+@testcase
def assign_temporary_to_object(object[object] buf):
"""
See comments on printbuf_object above.
diff --git a/tests/bugs.txt b/tests/bugs.txt
index e853b4526..27458889c 100644
--- a/tests/bugs.txt
+++ b/tests/bugs.txt
@@ -6,7 +6,6 @@ class_attribute_init_values_T18
unsignedbehaviour_T184
missing_baseclass_in_predecl_T262
cfunc_call_tuple_args_T408
-genexpr_iterable_lookup_T600
generator_expressions_in_class
for_from_pyvar_loop_T601
temp_sideeffects_T654 # not really a bug, Cython warns about it
diff --git a/tests/build/cythonize_options.srctree b/tests/build/cythonize_options.srctree
index fcef9645b..0dc7f724f 100644
--- a/tests/build/cythonize_options.srctree
+++ b/tests/build/cythonize_options.srctree
@@ -49,5 +49,5 @@ def mod_int_c(int a, int b):
assert mod_int_c(-1, 10) < 0
# unraisable exceptions should produce a warning
-cdef int no_exc_propagate():
+cdef int no_exc_propagate() noexcept:
raise TypeError()
diff --git a/tests/build/depfile_package_cython.srctree b/tests/build/depfile_package_cython.srctree
new file mode 100644
index 000000000..ccb1dc230
--- /dev/null
+++ b/tests/build/depfile_package_cython.srctree
@@ -0,0 +1,61 @@
+"""
+PYTHON -c 'import os; os.makedirs("builddir/pkg/sub")'
+CYTHON -M pkg/test.pyx -o builddir/pkg/test.c
+CYTHON --depfile pkg/sub/test.pyx -o builddir/pkg/sub/test.c
+PYTHON check.py
+"""
+
+######## check.py ########
+
+import os.path
+
+def pkgpath(*args):
+ return os.path.join('pkg', *args)
+
+with open(os.path.join("builddir", "pkg", "test.c.dep"), "r") as f:
+ contents = f.read().replace("\\\n", " ").replace("\n", " ")
+
+assert sorted(contents.split()) == sorted([os.path.join('builddir', 'pkg', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('test.pxd'), pkgpath('test.pyx')]), contents
+
+
+with open(os.path.join("builddir", "pkg", "sub", "test.c.dep"), "r") as f:
+ contents = f.read().replace("\\\n", " ").replace("\n", " ")
+
+contents = [os.path.relpath(entry, '.')
+ if os.path.isabs(entry) else entry for entry in contents.split()]
+assert sorted(contents) == sorted([os.path.join('builddir', 'pkg', 'sub', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('sub', 'test.pyx'), pkgpath('test.pxd')]), contents # last is really one level up
+
+######## pkg/__init__.py ########
+
+
+######## pkg/test.pyx ########
+
+TEST = "pkg.test"
+
+include "sub/incl.pxi"
+
+cdef object get_str():
+ return TEST
+
+
+######## pkg/test.pxd ########
+
+cdef object get_str()
+
+
+######## pkg/sub/__init__.py ########
+
+
+######## pkg/sub/test.pyx ########
+# cython: language_level=3
+
+from ..test cimport get_str
+
+include 'incl.pxi'
+
+TEST = 'pkg.sub.test'
+
+
+######## pkg/sub/incl.pxi ########
+
+pass
diff --git a/tests/build/depfile_package.srctree b/tests/build/depfile_package_cythonize.srctree
index d96294b27..d68e82ece 100644
--- a/tests/build/depfile_package.srctree
+++ b/tests/build/depfile_package_cythonize.srctree
@@ -7,10 +7,13 @@ PYTHON package_test.py
import os.path
+def pkgpath(*args):
+ return os.path.join('pkg', *args)
+
with open(os.path.join("pkg", "test.c.dep"), "r") as f:
contents = f.read().replace("\\\n", " ").replace("\n", " ")
-assert sorted(contents.split()) == sorted(['test.c:', os.path.join('sub', 'incl.pxi'), 'test.pxd', 'test.pyx']), contents
+assert sorted(contents.split()) == sorted([pkgpath('test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('test.pxd'), pkgpath('test.pyx')]), contents
with open(os.path.join("pkg", "sub", "test.c.dep"), "r") as f:
@@ -18,7 +21,7 @@ with open(os.path.join("pkg", "sub", "test.c.dep"), "r") as f:
contents = [os.path.relpath(entry, '.')
if os.path.isabs(entry) else entry for entry in contents.split()]
-assert sorted(contents) == sorted(['test.c:', 'incl.pxi', 'test.pyx', os.path.join('pkg', 'test.pxd')]), contents # last is really one level up
+assert sorted(contents) == sorted([pkgpath('sub', 'test.c:'), pkgpath('sub', 'incl.pxi'), pkgpath('sub', 'test.pyx'), pkgpath('test.pxd')]), contents # last is really one level up
######## pkg/__init__.py ########
diff --git a/tests/compile/branch_hints.pyx b/tests/compile/branch_hints.pyx
index 575ee6cba..e6bd0b5c3 100644
--- a/tests/compile/branch_hints.pyx
+++ b/tests/compile/branch_hints.pyx
@@ -82,7 +82,7 @@ def if_elif_raise_else_raise(x):
"//IfClauseNode[@branch_hint = 'likely']",
"//IfClauseNode[not(@branch_hint)]",
)
-cpdef int nogil_if_raise(int x) nogil except -1:
+cpdef int nogil_if_raise(int x) except -1 nogil:
if x:
raise TypeError()
elif not x:
diff --git a/tests/compile/buildenv.pyx b/tests/compile/buildenv.pyx
index 01f29883a..0f06ccc71 100644
--- a/tests/compile/buildenv.pyx
+++ b/tests/compile/buildenv.pyx
@@ -36,6 +36,8 @@ cdef extern from *:
cdef int CYTHON_COMPILING_IN_CPYTHON
cdef int CYTHON_COMPILING_IN_LIMITED_API
cdef int CYTHON_COMPILING_IN_PYPY
+ cdef int CYTHON_COMPILING_IN_GRAAL
+ cdef int CYTHON_COMPILING_IN_NOGIL
cdef int CYTHON_USE_PYLONG_INTERNALS
cdef int CYTHON_USE_PYLIST_INTERNALS
cdef int CYTHON_USE_UNICODE_INTERNALS
@@ -79,6 +81,8 @@ PY_VERSION_HEX 0x{PY_VERSION_HEX:X}
CYTHON_COMPILING_IN_CPYTHON {CYTHON_COMPILING_IN_CPYTHON}
CYTHON_COMPILING_IN_LIMITED_API {CYTHON_COMPILING_IN_LIMITED_API}
CYTHON_COMPILING_IN_PYPY {CYTHON_COMPILING_IN_PYPY}
+CYTHON_COMPILING_IN_GRAAL {CYTHON_COMPILING_IN_GRAAL}
+CYTHON_COMPILING_IN_NOGIL {CYTHON_COMPILING_IN_NOGIL}
CYTHON_USE_PYLONG_INTERNALS {CYTHON_USE_PYLONG_INTERNALS}
CYTHON_USE_PYLIST_INTERNALS {CYTHON_USE_PYLIST_INTERNALS}
diff --git a/tests/compile/c_directives.pyx b/tests/compile/c_directives.pyx
index 0ede90ba8..ee19e652f 100644
--- a/tests/compile/c_directives.pyx
+++ b/tests/compile/c_directives.pyx
@@ -2,6 +2,8 @@
# cython: boundscheck = False
# cython: ignoreme = OK
# cython: warn.undeclared = False
+# cython: test_assert_c_code_has = Generated by Cython
+# cython: test_fail_if_c_code_has = Generated by Python
# This testcase is most useful if you inspect the generated C file
diff --git a/tests/compile/cpp_nogil.pyx b/tests/compile/cpp_nogil.pyx
index 1007054dc..658dc37cb 100644
--- a/tests/compile/cpp_nogil.pyx
+++ b/tests/compile/cpp_nogil.pyx
@@ -19,5 +19,5 @@ with nogil:
# We can override nogil methods as with gil methods.
cdef cppclass WithGilSubclass(NoGilTest1):
- void doSomething() with gil:
+ void doSomething() noexcept with gil:
print "have the gil"
diff --git a/tests/compile/declarations.srctree b/tests/compile/declarations.srctree
index babf2e4e3..bfbbcd4b3 100644
--- a/tests/compile/declarations.srctree
+++ b/tests/compile/declarations.srctree
@@ -40,7 +40,7 @@ cdef extern int a(int[][3], int[][3][5])
cdef void f():
cdef void *p=NULL
global ifnp, cpa
- ifnp = <int (*)()>p
+ ifnp = <int (*)() noexcept>p
cdef char *g():
pass
diff --git a/tests/compile/excvalcheck.h b/tests/compile/excvalcheck.h
index 4c92acd2b..ba7a760e1 100644
--- a/tests/compile/excvalcheck.h
+++ b/tests/compile/excvalcheck.h
@@ -1,12 +1,6 @@
-#ifdef __cplusplus
-extern "C" {
-#endif
extern DL_EXPORT(int) spam(void);
extern DL_EXPORT(void) grail(void);
extern DL_EXPORT(char *)tomato(void);
-#ifdef __cplusplus
-}
-#endif
int spam(void) {return 0;}
void grail(void) {return;}
diff --git a/tests/compile/fromimport.pyx b/tests/compile/fromimport.pyx
index 46f7b5442..e84b26a97 100644
--- a/tests/compile/fromimport.pyx
+++ b/tests/compile/fromimport.pyx
@@ -6,10 +6,34 @@ def f():
from spam import eggs as ova
from . import spam
from ... import spam
+ from .. . import spam
+ from . .. import spam
+ from . . . import spam
from .. import spam, foo
+ from . . import spam, foo
from ... import spam, foobar
+ from .. . import spam, foobar
+ from . .. import spam, foobar
+ from . . . import spam, foobar
from .spam import foo
+ from . spam import foo
from ...spam import foo, bar
+ from .. . spam import foo, bar
+ from . .. spam import foo, bar
+ from . . . spam import foo, bar
from ...spam.foo import bar
+ from ... spam.foo import bar
+ from .. . spam.foo import bar
+ from . .. spam.foo import bar
+ from . . . spam.foo import bar
from ...spam.foo import foo, bar
+ from ... spam.foo import foo, bar
+ from .. . spam.foo import foo, bar
+ from . .. spam.foo import foo, bar
+ from . . . spam.foo import foo, bar
from ...spam.foo import (foo, bar)
+ from ... spam.foo import (foo, bar)
+ from .. . spam.foo import (foo, bar)
+ from .. . spam.foo import (foo, bar)
+ from . .. spam.foo import (foo, bar)
+ from . . . spam.foo import (foo, bar)
diff --git a/tests/compile/fromimport_star.pyx b/tests/compile/fromimport_star.pyx
index 6c19476b7..80542dddb 100644
--- a/tests/compile/fromimport_star.pyx
+++ b/tests/compile/fromimport_star.pyx
@@ -2,5 +2,12 @@
from spam import *
from ...spam.foo import *
+from ... spam.foo import *
+from .. . spam.foo import *
+from . . . spam.foo import *
+from . .. spam.foo import *
from . import *
from ... import *
+from .. . import *
+from . .. import *
+from . . . import *
diff --git a/tests/compile/fused_buffers.pyx b/tests/compile/fused_buffers.pyx
new file mode 100644
index 000000000..73b0315ed
--- /dev/null
+++ b/tests/compile/fused_buffers.pyx
@@ -0,0 +1,16 @@
+# mode: compile
+
+# cython: test_assert_c_code_has = __Pyx_ImportNumPyArrayTypeIfAvailable
+# cython: test_assert_c_code_has = ndarray
+
+# counterpart test to fused_no_numpy - buffer types are compared against Numpy
+# dtypes as a quick test. fused_no_numpy tests that the mechanism isn't
+# accidentally generated, while this just confirms that the same mechanism is
+# still in use
+
+ctypedef fused IntOrFloat:
+ int
+ float
+
+def f(IntOrFloat[:] x):
+ return x
diff --git a/tests/compile/fused_no_numpy.pyx b/tests/compile/fused_no_numpy.pyx
new file mode 100644
index 000000000..efb49c322
--- /dev/null
+++ b/tests/compile/fused_no_numpy.pyx
@@ -0,0 +1,13 @@
+# mode: compile
+
+# cython: test_fail_if_c_code_has = __Pyx_ImportNumPyArrayTypeIfAvailable
+
+ctypedef fused IntOrFloat:
+ int
+ float
+
+# This function does not use buffers so has no reason to import numpy to
+# look up dtypes. fused_buffers.pyx is the corresponding test for the case
+# where numpy is imported
+def f(IntOrFloat x):
+ return x
diff --git a/tests/compile/fused_redeclare_T3111.pyx b/tests/compile/fused_redeclare_T3111.pyx
index 04862ae88..bc3d54a99 100644
--- a/tests/compile/fused_redeclare_T3111.pyx
+++ b/tests/compile/fused_redeclare_T3111.pyx
@@ -27,10 +27,10 @@ _WARNINGS = """
36:10: 'cpdef_cname_method' redeclared
# from MemoryView.pyx
-958:29: Ambiguous exception value, same as default return value: 0
-958:29: Ambiguous exception value, same as default return value: 0
-983:46: Ambiguous exception value, same as default return value: 0
-983:46: Ambiguous exception value, same as default return value: 0
-1073:29: Ambiguous exception value, same as default return value: 0
-1073:29: Ambiguous exception value, same as default return value: 0
+977:29: Ambiguous exception value, same as default return value: 0
+977:29: Ambiguous exception value, same as default return value: 0
+1018:46: Ambiguous exception value, same as default return value: 0
+1018:46: Ambiguous exception value, same as default return value: 0
+1108:29: Ambiguous exception value, same as default return value: 0
+1108:29: Ambiguous exception value, same as default return value: 0
"""
diff --git a/tests/compile/module_name_arg.srctree b/tests/compile/module_name_arg.srctree
new file mode 100644
index 000000000..81e75b008
--- /dev/null
+++ b/tests/compile/module_name_arg.srctree
@@ -0,0 +1,52 @@
+# Test that we can set module name with --module-name arg to cython
+CYTHON a.pyx
+CYTHON --module-name w b.pyx
+CYTHON --module-name my_module.submod.x c.pyx
+PYTHON setup.py build_ext --inplace
+PYTHON checks.py
+
+######## checks.py ########
+
+from importlib import import_module
+
+try:
+ exc = ModuleNotFoundError
+except NameError:
+ exc = ImportError
+
+for module_name, should_import in (
+ ('a', True),
+ ('b', False),
+ ('w', True),
+ ('my_module.submod.x', True),
+ ('c', False),
+ ):
+ try:
+ import_module(module_name)
+ except exc:
+ if should_import:
+ assert False, "Cannot import module " + module_name
+ else:
+ if not should_import:
+ assert False, ("Can import module " + module_name +
+ " but import should not be possible")
+
+
+######## setup.py ########
+
+from distutils.core import setup
+from distutils.extension import Extension
+
+setup(
+ ext_modules = [
+ Extension("a", ["a.c"]),
+ Extension("w", ["b.c"]),
+ Extension("my_module.submod.x", ["c.c"]),
+ ],
+)
+
+######## a.pyx ########
+######## b.pyx ########
+######## c.pyx ########
+######## my_module/__init__.py ########
+######## my_module/submod/__init__.py ########
diff --git a/tests/compile/nogil.h b/tests/compile/nogil.h
index 42878109b..764a3fc8a 100644
--- a/tests/compile/nogil.h
+++ b/tests/compile/nogil.h
@@ -1,25 +1,13 @@
-#ifdef __cplusplus
-extern "C" {
-#endif
extern DL_EXPORT(void) e1(void);
extern DL_EXPORT(int*) e2(void);
-#ifdef __cplusplus
-}
-#endif
void e1(void) {return;}
int* e2(void) {return 0;}
-#ifdef __cplusplus
-extern "C" {
-#endif
extern DL_EXPORT(PyObject *) g(PyObject*);
extern DL_EXPORT(void) g2(PyObject*);
-#ifdef __cplusplus
-}
-#endif
PyObject *g(PyObject* o) {if (o) {}; return 0;}
void g2(PyObject* o) {if (o) {}; return;}
diff --git a/tests/compile/publicapi_pxd_mix.pxd b/tests/compile/publicapi_pxd_mix.pxd
index 09452f116..414274d45 100644
--- a/tests/compile/publicapi_pxd_mix.pxd
+++ b/tests/compile/publicapi_pxd_mix.pxd
@@ -61,7 +61,7 @@ cdef public api void bar3()
cdef inline void* spam (object o) except NULL: return NULL
cdef void* spam0(object o) except NULL
cdef public void* spam1(object o) except NULL
-cdef api void* spam2(object o) nogil except NULL
+cdef api void* spam2(object o) except NULL nogil
cdef public api void* spam3(object o) except NULL with gil
# --
diff --git a/tests/compile/publicapi_pxd_mix.pyx b/tests/compile/publicapi_pxd_mix.pyx
index 588f6b79c..dd748053f 100644
--- a/tests/compile/publicapi_pxd_mix.pyx
+++ b/tests/compile/publicapi_pxd_mix.pyx
@@ -15,7 +15,7 @@ cdef public api void bar3(): pass
cdef void* spam0(object o) except NULL: return NULL
cdef public void* spam1(object o) except NULL: return NULL
-cdef api void* spam2(object o) nogil except NULL: return NULL
+cdef api void* spam2(object o) except NULL nogil: return NULL
cdef public api void* spam3(object o) except NULL with gil: return NULL
cdef int i0 = 0 # XXX This should not be required!
diff --git a/tests/compile/pxd_mangling_names.srctree b/tests/compile/pxd_mangling_names.srctree
new file mode 100644
index 000000000..3797fc0f9
--- /dev/null
+++ b/tests/compile/pxd_mangling_names.srctree
@@ -0,0 +1,46 @@
+# mode: compile
+# ticket: 2940
+
+PYTHON setup.py build_ext --inplace
+PYTHON -c "import a; a.test()"
+
+######## setup.py ########
+
+from Cython.Build import cythonize
+from Cython.Distutils.extension import Extension
+from distutils.core import setup
+
+setup(
+ ext_modules=cythonize([Extension("a", ["a.py", "b.c"])]),
+)
+
+######## a.pxd ########
+
+cdef public int foo()
+
+cdef extern from "b.h":
+ cpdef int bar()
+
+######## a.py ########
+
+def foo():
+ return 42
+
+def test():
+ assert bar() == 42
+
+######## b.h ########
+
+#ifndef B_H
+#define B_H
+
+int bar();
+
+#endif
+
+######## b.c ########
+
+#include "a.h"
+
+int bar() { return foo(); }
+
diff --git a/tests/errors/builtin_type_inheritance.pyx b/tests/errors/builtin_type_inheritance.pyx
index 1c6ad31e1..a85f7a133 100644
--- a/tests/errors/builtin_type_inheritance.pyx
+++ b/tests/errors/builtin_type_inheritance.pyx
@@ -8,11 +8,9 @@ cdef class MyTuple(tuple):
cdef class MyBytes(bytes):
pass
-cdef class MyStr(str): # only in Py2, but can't know that during compilation
- pass
+# str is also included in this in Py2, but checked at runtime instead
_ERRORS = """
5:19: inheritance from PyVarObject types like 'tuple' is not currently supported
8:19: inheritance from PyVarObject types like 'bytes' is not currently supported
-11:17: inheritance from PyVarObject types like 'str' is not currently supported
"""
diff --git a/tests/errors/cfuncptr.pyx b/tests/errors/cfuncptr.pyx
new file mode 100644
index 000000000..f07ef2167
--- /dev/null
+++ b/tests/errors/cfuncptr.pyx
@@ -0,0 +1,36 @@
+# mode: error
+
+cdef int exceptmaybeminus2(int bad) except ?-2:
+ if bad:
+ raise RuntimeError
+ else:
+ return 0
+
+def fail_exceptmaybeminus2(bad):
+ cdef int (*fptr_a)(int) except -2
+ cdef int (*fptr_b)(int) except -1
+ cdef int (*fptr_c)(int) except ?-1
+ fptr_a = exceptmaybeminus2
+ fptr_b = exceptmaybeminus2
+ fptr_c = exceptmaybeminus2
+
+cdef extern from *:
+ # define this as extern since Cython converts internal "except*" to "except -1"
+ cdef int exceptstar(int bad) except *
+
+def fail_exceptstar(bad):
+ cdef int (*fptr_a)(int) noexcept
+ cdef int (*fptr_b)(int) except -1
+ cdef int (*fptr_c)(int) except ?-1
+ fptr_a = exceptstar
+ fptr_b = exceptstar
+ fptr_c = exceptstar
+
+_ERRORS = """
+13:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -2'
+14:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except -1'
+15:13: Cannot assign type 'int (int) except? -2' to 'int (*)(int) except? -1'
+25:13: Cannot assign type 'int (int) except *' to 'int (*)(int) noexcept'
+26:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except -1'
+27:13: Cannot assign type 'int (int) except *' to 'int (*)(int) except? -1'
+"""
diff --git a/tests/errors/cpp_increment.pyx b/tests/errors/cpp_increment.pyx
new file mode 100644
index 000000000..45e978d95
--- /dev/null
+++ b/tests/errors/cpp_increment.pyx
@@ -0,0 +1,33 @@
+# mode: error
+
+cimport cython
+
+cdef extern from *:
+ cdef cppclass Foo:
+ Foo operator++()
+ Foo operator--()
+
+ cdef cppclass Bar:
+ Bar operator++(int)
+ Bar operator--(int)
+
+cdef void foo():
+ cdef Foo f
+ cdef Bar b
+ cython.operator.postincrement(f)
+ cython.operator.postincrement(b)
+ cython.operator.postdecrement(f)
+ cython.operator.postdecrement(b)
+
+ cython.operator.preincrement(f)
+ cython.operator.preincrement(b)
+ cython.operator.predecrement(f)
+ cython.operator.predecrement(b)
+
+
+_ERRORS = u"""
+17:19: No 'operator++(int)' declared for postfix '++' (operand type is 'Foo')
+19:19: No 'operator--(int)' declared for postfix '--' (operand type is 'Foo')
+23:19: No match for 'operator++' (operand type is 'Bar')
+25:19: No match for 'operator--' (operand type is 'Bar')
+"""
diff --git a/tests/errors/cpp_object_template.pyx b/tests/errors/cpp_object_template.pyx
index 029293e39..e1a15c905 100644
--- a/tests/errors/cpp_object_template.pyx
+++ b/tests/errors/cpp_object_template.pyx
@@ -18,7 +18,7 @@ def memview():
vmv.push_back(array.array("i", [1,2,3]))
_ERRORS = u"""
-10:16: Python object type 'Python object' cannot be used as a template argument
-12:16: Python object type 'A' cannot be used as a template argument
-17:19: Reference-counted type 'int[:]' cannot be used as a template argument
+10:15: Python object type 'Python object' cannot be used as a template argument
+12:15: Python object type 'A' cannot be used as a template argument
+17:15: Reference-counted type 'int[:]' cannot be used as a template argument
"""
diff --git a/tests/errors/dataclass_e1.pyx b/tests/errors/dataclass_e1.pyx
index 39337ba6d..95d67ad7d 100644
--- a/tests/errors/dataclass_e1.pyx
+++ b/tests/errors/dataclass_e1.pyx
@@ -1,5 +1,5 @@
# mode: error
-
+# tag: warnings
cimport cython
@cython.dataclasses.dataclass(1, shouldnt_be_here=True, init=5, unsafe_hash=True)
diff --git a/tests/errors/dataclass_e5.pyx b/tests/errors/dataclass_e5.pyx
new file mode 100644
index 000000000..e86adf47e
--- /dev/null
+++ b/tests/errors/dataclass_e5.pyx
@@ -0,0 +1,21 @@
+# mode: error
+# tag: warnings
+
+cimport cython
+
+@cython.dataclasses.dataclass
+cdef class C:
+ a: int
+ b: long
+ c: Py_ssize_t
+ d: float
+ e: double
+
+
+_WARNINGS = """
+9:7: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
+10:7: Found C type 'Py_ssize_t' in a Python annotation. Did you mean to use a Python type?
+10:7: Unknown type declaration in annotation, ignoring
+12:7: Found C type 'double' in a Python annotation. Did you mean to use a Python type?
+12:7: Unknown type declaration in annotation, ignoring
+"""
diff --git a/tests/errors/dataclass_e6.pyx b/tests/errors/dataclass_e6.pyx
new file mode 100644
index 000000000..64dc1ae05
--- /dev/null
+++ b/tests/errors/dataclass_e6.pyx
@@ -0,0 +1,23 @@
+# mode: error
+
+from cython.dataclasses cimport dataclass
+
+@dataclass
+cdef class BaseDataclass:
+ a: str = "value"
+
+@dataclass
+cdef class MainDataclass(BaseDataclass):
+ a: str = "new value"
+
+cdef class Intermediate(BaseDataclass):
+ pass
+
+@dataclass
+cdef class AnotherDataclass(Intermediate):
+ a: str = "ooops"
+
+_ERRORS = """
+11:4: Cannot redeclare inherited fields in Cython dataclasses
+18:4: Cannot redeclare inherited fields in Cython dataclasses
+"""
diff --git a/tests/errors/dataclass_w1.pyx b/tests/errors/dataclass_w1.pyx
new file mode 100644
index 000000000..c0d9790e2
--- /dev/null
+++ b/tests/errors/dataclass_w1.pyx
@@ -0,0 +1,13 @@
+# mode: compile
+# tag: warnings
+
+from dataclass_w1_othermod cimport SomeBase
+from cython.dataclasses cimport dataclass
+
+@dataclass
+cdef class DC(SomeBase):
+ a: str = ""
+
+_WARNINGS = """
+8:5: Cannot reliably handle Cython dataclasses with base types in external modules since it is not possible to tell what fields they have
+"""
diff --git a/tests/errors/dataclass_w1_othermod.pxd b/tests/errors/dataclass_w1_othermod.pxd
new file mode 100644
index 000000000..02dddf492
--- /dev/null
+++ b/tests/errors/dataclass_w1_othermod.pxd
@@ -0,0 +1,3 @@
+# Extern class for test "dataclass_w1"
+cdef class SomeBase:
+ pass
diff --git a/tests/errors/e_decorators.pyx b/tests/errors/e_decorators.pyx
deleted file mode 100644
index 33ef2355d..000000000
--- a/tests/errors/e_decorators.pyx
+++ /dev/null
@@ -1,12 +0,0 @@
-# mode: error
-
-class A:
- pass
-
-@A().a
-def f():
- pass
-
-_ERRORS = u"""
-6:4: Expected a newline after decorator
-"""
diff --git a/tests/errors/e_excvalfunctype.pyx b/tests/errors/e_excvalfunctype.pyx
index a1d978322..25cae47c6 100644
--- a/tests/errors/e_excvalfunctype.pyx
+++ b/tests/errors/e_excvalfunctype.pyx
@@ -1,7 +1,7 @@
# mode: error
ctypedef int (*spamfunc)(int, char *) except 42
-ctypedef int (*grailfunc)(int, char *)
+ctypedef int (*grailfunc)(int, char *) noexcept
cdef grailfunc grail
cdef spamfunc spam
diff --git a/tests/errors/e_invalid_special_cython_modules.py b/tests/errors/e_invalid_special_cython_modules.py
new file mode 100644
index 000000000..950df5c1c
--- /dev/null
+++ b/tests/errors/e_invalid_special_cython_modules.py
@@ -0,0 +1,42 @@
+# mode: error
+# tag: pure, import, cimport
+
+# nok
+
+import cython.imports.libc as libc_import
+import cython.cimports.labc as labc_cimport
+
+from cython.imports import libc
+from cython.cimport.libc import math
+from cython.imports.libc import math
+from cython.cimports.labc import math
+
+import cython.paralel
+import cython.parrallel
+
+import cython.dataclass
+
+# ok
+from cython.cimports.libc import math
+from cython.cimports.libc.math import ceil
+
+
+def libc_math_ceil(x):
+ """
+ >>> libc_math_ceil(1.5)
+ [2, 2]
+ """
+ return [int(n) for n in [ceil(x), math.ceil(x)]]
+
+
+_ERRORS = """
+6:7: 'cython.imports.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ?
+7:7: 'labc.pxd' not found
+9:0: 'cython.imports' is not a valid cython.* module. Did you mean 'cython.cimports' ?
+10:0: 'cython.cimport.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ?
+11:0: 'cython.imports.libc' is not a valid cython.* module. Did you mean 'cython.cimports' ?
+12:0: 'labc/math.pxd' not found
+14:7: 'cython.paralel' is not a valid cython.* module. Did you mean 'cython.parallel' ?
+15:7: 'cython.parrallel' is not a valid cython.* module. Did you mean 'cython.parallel' ?
+17:7: 'cython.dataclass' is not a valid cython.* module. Did you mean 'cython.dataclasses' ?
+"""
diff --git a/tests/errors/e_nogilfunctype.pyx b/tests/errors/e_nogilfunctype.pyx
index ccac37b7e..ac06af27e 100644
--- a/tests/errors/e_nogilfunctype.pyx
+++ b/tests/errors/e_nogilfunctype.pyx
@@ -10,7 +10,7 @@ fp = f
fp = <fp_t>f
_ERRORS = u"""
-9:5: Cannot assign type 'void (void)' to 'void (*)(void) nogil'
+9:5: Cannot assign type 'void (void) noexcept' to 'void (*)(void) noexcept nogil'
"""
_WARNINGS = """
diff --git a/tests/errors/e_pure_cimports.pyx b/tests/errors/e_pure_cimports.pyx
index 231a95959..ef81182ad 100644
--- a/tests/errors/e_pure_cimports.pyx
+++ b/tests/errors/e_pure_cimports.pyx
@@ -1,7 +1,7 @@
# mode: error
# tag: pure, import, cimport
-import cython.cimportsy # FIXME: not currently an error?
+import cython.cimportsy
import cython.cimports
import cython.cimports.libc
@@ -20,6 +20,7 @@ from cython.cimports cimport libc
_ERRORS = """
+4:7: 'cython.cimportsy' is not a valid cython.* module. Did you mean 'cython.cimports' ?
6:7: Cannot cimport the 'cython.cimports' package directly, only submodules.
7:7: Python cimports must use 'from cython.cimports... import ...' or 'import ... as ...', not just 'import ...'
8:7: Cannot cimport the 'cython.cimports' package directly, only submodules.
diff --git a/tests/errors/e_relative_cimport.pyx b/tests/errors/e_relative_cimport.pyx
index 36a134411..709cbd71d 100644
--- a/tests/errors/e_relative_cimport.pyx
+++ b/tests/errors/e_relative_cimport.pyx
@@ -9,7 +9,7 @@ from . cimport e_relative_cimport
_ERRORS="""
4:0: relative cimport beyond main package is not allowed
-5:0: relative cimport beyond main package is not allowed
+5:0: relative cimport from non-package directory is not allowed
6:0: relative cimport beyond main package is not allowed
-7:0: relative cimport beyond main package is not allowed
+7:0: relative cimport from non-package directory is not allowed
"""
diff --git a/tests/errors/e_typing_errors.pyx b/tests/errors/e_typing_errors.pyx
new file mode 100644
index 000000000..832f68d90
--- /dev/null
+++ b/tests/errors/e_typing_errors.pyx
@@ -0,0 +1,59 @@
+# mode: error
+
+import cython
+
+try:
+ from typing import Optional, ClassVar
+except ImportError:
+ pass
+
+
+# not OK
+
+def optional_cython_types(Optional[cython.int] i, Optional[cython.double] d, Optional[cython.float] f,
+ Optional[cython.complex] c, Optional[cython.long] l, Optional[cython.longlong] ll):
+ pass
+
+
+MyStruct = cython.struct(a=cython.int, b=cython.double)
+
+def optional_cstruct(Optional[MyStruct] x):
+ pass
+
+
+def optional_pytypes(Optional[int] i, Optional[float] f, Optional[complex] c, Optional[long] l):
+ pass
+
+
+cdef ClassVar[list] x
+
+
+# OK
+
+def optional_memoryview(double[:] d, Optional[double[:]] o):
+ pass
+
+
+cdef class Cls(object):
+ cdef ClassVar[list] x
+
+
+
+_ERRORS = """
+13:42: typing.Optional[...] cannot be applied to non-Python type int
+13:66: typing.Optional[...] cannot be applied to non-Python type double
+13:93: typing.Optional[...] cannot be applied to non-Python type float
+14:42: typing.Optional[...] cannot be applied to non-Python type double complex
+14:70: typing.Optional[...] cannot be applied to non-Python type long
+14:95: typing.Optional[...] cannot be applied to non-Python type long long
+24:30: typing.Optional[...] cannot be applied to non-Python type int
+24:47: typing.Optional[...] cannot be applied to non-Python type float
+24:87: typing.Optional[...] cannot be applied to non-Python type long
+
+20:30: typing.Optional[...] cannot be applied to non-Python type MyStruct
+
+28:20: Modifier 'typing.ClassVar' is not allowed here.
+
+# FIXME: this should be ok :-?
+33:52: typing.Optional[...] cannot be applied to non-Python type double[:]
+"""
diff --git a/tests/errors/e_typing_optional.py b/tests/errors/e_typing_optional.py
index e75638e00..6facfeea4 100644
--- a/tests/errors/e_typing_optional.py
+++ b/tests/errors/e_typing_optional.py
@@ -8,11 +8,10 @@ except ImportError:
pass
-def optional_pytypes(i: Optional[int], f: Optional[float]):
- pass
-
+# not OK
-def optional_cython_types(i: Optional[cython.int], d: Optional[cython.double], f: Optional[cython.float]):
+def optional_cython_types(i: Optional[cython.int], d: Optional[cython.double], f: Optional[cython.float],
+ c: Optional[cython.complex], l: Optional[cython.long], ll: Optional[cython.longlong]):
pass
@@ -22,13 +21,23 @@ def optional_cstruct(x: Optional[MyStruct]):
pass
+# OK
+
+def optional_pytypes(i: Optional[int], f: Optional[float], c: Optional[complex], l: Optional[long]):
+ pass
+
+
+def optional_memoryview(d: double[:], o: Optional[double[:]]):
+ pass
+
+
_ERRORS = """
-15:29: Only Python type arguments can use typing.Optional[...]
-15:54: Only Python type arguments can use typing.Optional[...]
-15:82: Only Python type arguments can use typing.Optional[...]
-21:24: Only Python type arguments can use typing.Optional[...]
-
-# FIXME: these should be allowed!
-11:24: Only Python type arguments can use typing.Optional[...]
-11:42: Only Python type arguments can use typing.Optional[...]
+13:44: typing.Optional[...] cannot be applied to non-Python type int
+13:69: typing.Optional[...] cannot be applied to non-Python type double
+13:97: typing.Optional[...] cannot be applied to non-Python type float
+14:44: typing.Optional[...] cannot be applied to non-Python type double complex
+14:73: typing.Optional[...] cannot be applied to non-Python type long
+14:100: typing.Optional[...] cannot be applied to non-Python type long long
+
+20:33: typing.Optional[...] cannot be applied to non-Python type MyStruct
"""
diff --git a/tests/errors/incomplete_varadic.pyx b/tests/errors/incomplete_varadic.pyx
new file mode 100644
index 000000000..1695a874d
--- /dev/null
+++ b/tests/errors/incomplete_varadic.pyx
@@ -0,0 +1,8 @@
+# mode: error
+
+cdef error_time(bool its_fine, .):
+ pass
+
+_ERRORS = u"""
+3: 31: Expected an identifier, found '.'
+"""
diff --git a/tests/errors/nogil.pyx b/tests/errors/nogil.pyx
index aa3011d00..dfdebeebd 100644
--- a/tests/errors/nogil.pyx
+++ b/tests/errors/nogil.pyx
@@ -90,7 +90,7 @@ def bare_pyvar_name(object x):
with nogil:
x
-cdef int fstrings(int x, object obj) nogil except -1:
+cdef int fstrings(int x, object obj) except -1 nogil:
f"" # allowed
f"a" # allowed
f"a"f"b" # allowed
diff --git a/tests/errors/nogilfunctype.pyx b/tests/errors/nogilfunctype.pyx
index 91127bee4..c0ca2bb15 100644
--- a/tests/errors/nogilfunctype.pyx
+++ b/tests/errors/nogilfunctype.pyx
@@ -12,5 +12,5 @@ gp = g
fp = f
_ERRORS = u"""
-12:5: Cannot assign type 'void (void)' to 'void (*)(void) nogil'
+12:5: Cannot assign type 'void (void) noexcept' to 'void (*)(void) noexcept nogil'
"""
diff --git a/tests/errors/w_uninitialized.pyx b/tests/errors/w_uninitialized.pyx
index c2046ce19..066f9ed5b 100644
--- a/tests/errors/w_uninitialized.pyx
+++ b/tests/errors/w_uninitialized.pyx
@@ -127,10 +127,10 @@ _ERRORS = """
66:10: local variable 'foo' referenced before assignment
71:14: local variable 'exc' referenced before assignment
71:19: local variable 'msg' referenced before assignment
-78:4: local variable 'decorator' referenced before assignment
+78:5: local variable 'decorator' referenced before assignment
85:16: local variable 'default' referenced before assignment
91:14: local variable 'bar' referenced before assignment
-97:4: local variable 'decorator' referenced before assignment
+97:5: local variable 'decorator' referenced before assignment
104:24: local variable 'Meta' referenced before assignment
110:15: local variable 'args' referenced before assignment
110:23: local variable 'kwargs' referenced before assignment
diff --git a/tests/macos_cpp_bugs.txt b/tests/macos_cpp_bugs.txt
index e5be6475a..e4c4cc608 100644
--- a/tests/macos_cpp_bugs.txt
+++ b/tests/macos_cpp_bugs.txt
@@ -12,3 +12,4 @@ cpp_stl_algo_comparison_ops
cpp_stl_algo_permutation_ops
cpp_stl_algo_sorted_ranges_set_ops
cpp_stl_algo_sorted_ranges_other_ops
+cpp_stl_bit_cpp20
diff --git a/tests/memoryview/cythonarray.pyx b/tests/memoryview/cythonarray.pyx
index 0dc823581..15d61d086 100644
--- a/tests/memoryview/cythonarray.pyx
+++ b/tests/memoryview/cythonarray.pyx
@@ -130,7 +130,7 @@ cdef int *getp(int dim1=10, int dim2=10, dim3=1) except NULL:
return p
-cdef void callback_free_data(void *p):
+cdef void callback_free_data(void *p) noexcept:
print 'callback free data called'
free(p)
@@ -286,3 +286,39 @@ def test_char_array_in_python_api(*shape):
arr = array(shape=shape, itemsize=sizeof(char), format='c', mode='c')
arr[:] = b'x'
return arr
+
+def test_is_Sequence():
+ """
+ >>> test_is_Sequence()
+ 1
+ 1
+ True
+ """
+ import sys
+ if sys.version_info < (3, 3):
+ from collections import Sequence
+ else:
+ from collections.abc import Sequence
+
+ arr = array(shape=(5,), itemsize=sizeof(char), format='c', mode='c')
+ for i in range(arr.shape[0]):
+ arr[i] = f'{i}'.encode('ascii')
+ print(arr.count(b'1')) # test for presence of added collection method
+ print(arr.index(b'1')) # test for presence of added collection method
+
+ if sys.version_info >= (3, 10):
+ # test structural pattern match in Python
+ # (because Cython hasn't implemented it yet, and because the details
+ # of what Python considers a sequence are important)
+ globs = {'arr': arr}
+ exec("""
+match arr:
+ case [*_]:
+ res = True
+ case _:
+ res = False
+""", globs)
+ assert globs['res']
+
+ return isinstance(arr, Sequence)
+
diff --git a/tests/memoryview/memoryview.pyx b/tests/memoryview/memoryview.pyx
index bb8b73780..2c5de40b5 100644
--- a/tests/memoryview/memoryview.pyx
+++ b/tests/memoryview/memoryview.pyx
@@ -443,7 +443,9 @@ def type_infer(double[:, :] arg):
@cython.test_fail_if_path_exists("//CoerceToPyTypeNode")
def memview_iter(double[:, :] arg):
"""
- memview_iter(DoubleMockBuffer("C", range(6), (2,3)))
+ >>> memview_iter(DoubleMockBuffer("C", range(6), (2,3)))
+ acquired C
+ released C
True
"""
cdef double total = 0
@@ -1205,3 +1207,36 @@ def test_conversion_failures():
assert get_refcount(dmb) == dmb_before, "before %s after %s" % (dmb_before, get_refcount(dmb))
else:
assert False, "Conversion should fail!"
+
+def test_is_Sequence(double[:] a):
+ """
+ >>> test_is_Sequence(DoubleMockBuffer(None, range(6), shape=(6,)))
+ 1
+ 1
+ True
+ """
+ if sys.version_info < (3, 3):
+ from collections import Sequence
+ else:
+ from collections.abc import Sequence
+
+ for i in range(a.shape[0]):
+ a[i] = i
+ print(a.count(1.0)) # test for presence of added collection method
+ print(a.index(1.0)) # test for presence of added collection method
+
+ if sys.version_info >= (3, 10):
+ # test structural pattern match in Python
+ # (because Cython hasn't implemented it yet, and because the details
+ # of what Python considers a sequence are important)
+ globs = {'arr': a}
+ exec("""
+match arr:
+ case [*_]:
+ res = True
+ case _:
+ res = False
+""", globs)
+ assert globs['res']
+
+ return isinstance(<object>a, Sequence)
diff --git a/tests/memoryview/memoryview_acq_count.srctree b/tests/memoryview/memoryview_acq_count.srctree
index e7e6dfc69..3bc2f1cc9 100644
--- a/tests/memoryview/memoryview_acq_count.srctree
+++ b/tests/memoryview/memoryview_acq_count.srctree
@@ -35,7 +35,7 @@ cdef Py_ssize_t i
for i in prange(1000000, nogil=True, num_threads=16):
use_slice(m[::2])
-cdef int use_slice(int[:] m) nogil except -1:
+cdef int use_slice(int[:] m) except -1 nogil:
cdef int[:] m2 = m[1:]
m = m2[:-1]
del m, m2
diff --git a/tests/memoryview/memslice.pyx b/tests/memoryview/memslice.pyx
index 4e06c4f41..0de47d9b6 100644
--- a/tests/memoryview/memslice.pyx
+++ b/tests/memoryview/memslice.pyx
@@ -7,7 +7,7 @@
from __future__ import unicode_literals
from cpython.object cimport PyObject
-from cpython.ref cimport Py_INCREF, Py_DECREF
+from cpython.ref cimport Py_INCREF, Py_DECREF, Py_CLEAR
cimport cython
from cython cimport view
@@ -23,6 +23,12 @@ if sys.version_info[0] < 3:
else:
import builtins
+try:
+ from Cython.Tests.this_module_does_not_exist import *
+except ImportError:
+ # Fails, but the existence of "import *" interacted badly with some utility code
+ pass
+
def testcase(func):
@wraps(func)
@@ -1134,6 +1140,49 @@ def assign_temporary_to_object(object[:] buf):
"""
buf[1] = {3-2: 2+(2*4)-2}
+@testcase
+def check_object_nulled_1d(object[:] buf, int idx, obj):
+ """
+ See comments on printbuf_object above.
+
+ >>> a = object()
+ >>> rc1 = get_refcount(a)
+ >>> A = ObjectMockBuffer(None, [a, a])
+ >>> check_object_nulled_1d(A, 0, a)
+ >>> check_object_nulled_1d(A, 1, a)
+ >>> A = ObjectMockBuffer(None, [a, a, a, a], strides=(2,))
+ >>> check_object_nulled_1d(A, 0, a) # only 0 due to stride
+ >>> get_refcount(a) == rc1
+ True
+ """
+ cdef ObjectMockBuffer omb = buf.base
+ cdef PyObject **data = <PyObject**>(omb.buffer)
+ Py_CLEAR(data[idx])
+ res = buf[idx] # takes None
+ buf[idx] = obj
+ return res
+
+@testcase
+def check_object_nulled_2d(object[:, ::1] buf, int idx1, int idx2, obj):
+ """
+ See comments on printbuf_object above.
+
+ >>> a = object()
+ >>> rc1 = get_refcount(a)
+ >>> A = ObjectMockBuffer(None, [a, a, a, a], shape=(2, 2))
+ >>> check_object_nulled_2d(A, 0, 0, a)
+ >>> check_object_nulled_2d(A, 1, 1, a)
+ >>> get_refcount(a) == rc1
+ True
+ """
+ cdef ObjectMockBuffer omb = buf.base
+ cdef PyObject **data = <PyObject**>(omb.buffer)
+ Py_CLEAR(data[idx1 + 2*idx2])
+ res = buf[idx1, idx2] # takes None
+ buf[idx1, idx2] = obj
+ return res
+
+
#
# Test __cythonbufferdefaults__
#
@@ -1679,7 +1728,7 @@ def test_oob():
print a[:, 20]
-cdef int nogil_oob(int[:, :] a) nogil except 0:
+cdef int nogil_oob(int[:, :] a) except 0 nogil:
a[100, 9:]
return 1
@@ -1723,7 +1772,7 @@ def test_nogil_oob2():
a[100, 9:]
@cython.boundscheck(False)
-cdef int cdef_nogil(int[:, :] a) nogil except 0:
+cdef int cdef_nogil(int[:, :] a) except 0 nogil:
cdef int i, j
cdef int[:, :] b = a[::-1, 3:10:2]
for i in range(b.shape[0]):
@@ -2517,6 +2566,7 @@ def test_const_buffer(const int[:] a):
print(a[0])
print(c[-1])
+
@testcase
def test_loop(int[:] a, throw_exception):
"""
@@ -2539,6 +2589,7 @@ def test_loop(int[:] a, throw_exception):
raise ValueError()
print(sum)
+
@testcase
def test_loop_reassign(int[:] a):
"""
@@ -2551,13 +2602,99 @@ def test_loop_reassign(int[:] a):
3
4
5
- released A
15
+ released A
"""
cdef int sum = 0
for ai in a:
sum += ai
print(ai)
a = None # this should not mess up the loop though!
- # release happens here, when the loop temp is released
print(sum)
+ # release happens in the wrapper function
+
+
+@testcase
+def test_arg_in_closure(int [:] a):
+ """
+ >>> A = IntMockBuffer("A", range(6), shape=(6,))
+ >>> inner = test_arg_in_closure(A)
+ acquired A
+ >>> inner()
+ (0, 1)
+
+ The assignment below is just to avoid printing what was collected
+ >>> del inner; ignore_me = gc.collect()
+ released A
+ """
+ def inner():
+ return (a[0], a[1])
+ return inner
+
+
+cdef arg_in_closure_cdef(int [:] a):
+ def inner():
+ return (a[0], a[1])
+ return inner
+
+def test_arg_in_closure_cdef(a):
+ """
+ >>> A = IntMockBuffer("A", range(6), shape=(6,))
+ >>> inner = test_arg_in_closure_cdef(A)
+ acquired A
+ >>> inner()
+ (0, 1)
+
+ The assignment below is just to avoid printing what was collected
+ >>> del inner; ignore_me = gc.collect()
+ released A
+ """
+ return arg_in_closure_cdef(a)
+
+
+@testcase
+def test_local_in_closure(a):
+ """
+ >>> A = IntMockBuffer("A", range(6), shape=(6,))
+ >>> inner = test_local_in_closure(A)
+ acquired A
+ >>> inner()
+ (0, 1)
+
+ The assignment below is just to avoid printing what was collected
+ >>> del inner; ignore_me = gc.collect()
+ released A
+ """
+ cdef int[:] a_view = a
+ def inner():
+ return (a_view[0], a_view[1])
+ return inner
+
+@testcase
+def test_local_in_generator_expression(a, initialize, execute_now):
+ """
+ >>> A1 = IntMockBuffer("A1", range(6), shape=(6,))
+ >>> A2 = IntMockBuffer("A2", range(6), shape=(6,))
+ >>> test_local_in_generator_expression(A1, initialize=False, execute_now=False) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ ...
+ UnboundLocalError...
+
+ >>> test_local_in_generator_expression(A1, initialize=True, execute_now=True)
+ acquired A1
+ released A1
+ True
+
+ >>> genexp = test_local_in_generator_expression(A2, initialize=True, execute_now=False)
+ acquired A2
+ >>> sum(genexp)
+ released A2
+ 2
+ """
+ cdef int[:] a_view
+ if initialize:
+ a_view = a
+ if execute_now:
+ return any(ai > 3 for ai in a_view)
+ else:
+ return (ai > 3 for ai in a_view)
diff --git a/tests/memoryview/numpy_memoryview.pyx b/tests/memoryview/numpy_memoryview.pyx
index 350e94489..2af6bfea4 100644
--- a/tests/memoryview/numpy_memoryview.pyx
+++ b/tests/memoryview/numpy_memoryview.pyx
@@ -248,7 +248,7 @@ cdef extern from "bufaccess.h":
ctypedef unsigned int td_h_ushort # Defined as unsigned short
ctypedef td_h_short td_h_cy_short
-cdef void dealloc_callback(void *data):
+cdef void dealloc_callback(void *data) noexcept:
print "deallocating..."
def build_numarray(array array):
diff --git a/tests/pypy2_bugs.txt b/tests/pypy2_bugs.txt
index 200f0dcf3..1ac25918f 100644
--- a/tests/pypy2_bugs.txt
+++ b/tests/pypy2_bugs.txt
@@ -16,8 +16,9 @@ run.partial_circular_import
# https://foss.heptapod.net/pypy/pypy/issues/3185
run.language_level
run.pure_pxd
+compile.pxd_mangling_names
-# Silly error with doctest matching slightly different string outputs rather than
+# Silly error with doctest matching slightly different string outputs rather than
# an actual bug but one I can't easily resolve
run.with_gil
diff --git a/tests/pypy_bugs.txt b/tests/pypy_bugs.txt
index 1004a93e4..5a27265ee 100644
--- a/tests/pypy_bugs.txt
+++ b/tests/pypy_bugs.txt
@@ -61,3 +61,6 @@ run.exttype_dealloc
# bugs in cpyext
run.special_methods_T561
run.special_methods_T561_py2
+
+# unicode is a PyVarObject on PyPy3
+run.builtin_type_inheritance_T608
diff --git a/tests/run/annotate_html.pyx b/tests/run/annotate_html.pyx
index 3db7bf190..e98891b4f 100644
--- a/tests/run/annotate_html.pyx
+++ b/tests/run/annotate_html.pyx
@@ -1,3 +1,6 @@
+# cython: test_assert_c_code_has = Generated by Cython
+# cython: test_assert_c_code_has = goto __pyx_L0;\n
+
"""
>>> from codecs import open
>>> import os.path as os_path
diff --git a/tests/run/annotation_typing.pyx b/tests/run/annotation_typing.pyx
index 03900061a..4b6b2da16 100644
--- a/tests/run/annotation_typing.pyx
+++ b/tests/run/annotation_typing.pyx
@@ -11,14 +11,14 @@ except ImportError:
pass
-def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'float'} = 4) -> list:
+def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'type': 'long int'} = 4) -> list:
"""
>>> old_dict_syntax([1])
- ('list object', 'int', 'long', 'float')
- [1, 2, 3, 4.0]
+ ('list object', 'Python object', 'long', 'long')
+ [1, 2, 3, 4]
>>> old_dict_syntax([1], 3)
- ('list object', 'int', 'long', 'float')
- [1, 3, 3, 4.0]
+ ('list object', 'Python object', 'long', 'long')
+ [1, 3, 3, 4]
>>> old_dict_syntax(123)
Traceback (most recent call last):
TypeError: Argument 'a' has incorrect type (expected list, got int)
@@ -33,7 +33,7 @@ def old_dict_syntax(a: list, b: "int" = 2, c: {'ctype': 'long int'} = 3, d: {'ty
return a
-def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, o: Optional[tuple] = ()) -> list:
+def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()) -> list:
"""
>>> pytypes_def([1])
('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
@@ -60,7 +60,7 @@ def pytypes_def(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None,
return a
-cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None, o: Optional[tuple] = ()):
+cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None, o: Optional[tuple] = ()):
"""
>>> pytypes_cpdef([1])
('list object', 'Python object', 'Python object', 'double', 'list object', 'tuple object')
@@ -87,7 +87,7 @@ cpdef pytypes_cpdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = No
return a
-cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4, n: list = None):
+cdef c_pytypes_cdef(a: list, b: int = 2, c: long = 3, d: float = 4.0, n: list = None):
print(typeof(a), typeof(b), typeof(c), typeof(d), typeof(n))
a.append(b)
a.append(c)
@@ -111,6 +111,15 @@ def pytypes_cdef(a, b=2, c=3, d=4):
return c_pytypes_cdef(a, b, c, d)
+def pyint(a: int):
+ """
+ >>> large_int = eval('0x'+'F'*64) # definitely bigger than C int64
+ >>> pyint(large_int) == large_int
+ True
+ """
+ return a
+
+
def ctypes_def(a: list, b: cython.int = 2, c: cython.long = 3, d: cython.float = 4) -> list:
"""
>>> ctypes_def([1])
@@ -278,24 +287,28 @@ class LateClass(object):
pass
-def py_float_default(price : float=None, ndigits=4):
+def py_float_default(price : Optional[float]=None, ndigits=4):
"""
Python default arguments should prevent C type inference.
>>> py_float_default()
(None, 4)
- >>> py_float_default(2)
- (2, 4)
+ >>> py_float_default(None)
+ (None, 4)
+ >>> py_float_default(2) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...float...
>>> py_float_default(2.0)
(2.0, 4)
- >>> py_float_default(2, 3)
- (2, 3)
+ >>> py_float_default(2, 3) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...float...
"""
return price, ndigits
cdef class ClassAttribute:
- cls_attr : float = 1.
+ cls_attr : cython.float = 1.
@cython.cfunc
@@ -325,6 +338,36 @@ class HasPtr:
return f"HasPtr({self.a[0]}, {self.b})"
+@cython.annotation_typing(False)
+def turn_off_typing(x: float, d: dict):
+ """
+ >>> turn_off_typing('not a float', []) # ignore the typing
+ ('Python object', 'Python object', 'not a float', [])
+ """
+ return typeof(x), typeof(d), x, d
+
+
+@cython.annotation_typing(False)
+cdef class ClassTurnOffTyping:
+ x: float
+ d: dict
+
+ def get_var_types(self, arg: float):
+ """
+ >>> ClassTurnOffTyping().get_var_types(1.0)
+ ('Python object', 'Python object', 'Python object')
+ """
+ return typeof(self.x), typeof(self.d), typeof(arg)
+
+ @cython.annotation_typing(True)
+ def and_turn_it_back_on_again(self, arg: float):
+ """
+ >>> ClassTurnOffTyping().and_turn_it_back_on_again(1.0)
+ ('Python object', 'Python object', 'double')
+ """
+ return typeof(self.x), typeof(self.d), typeof(arg)
+
+
_WARNINGS = """
14:32: Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.
14:47: Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly.
@@ -332,16 +375,20 @@ _WARNINGS = """
14:77: Dicts should no longer be used as type annotations. Use 'cython.int' etc. directly.
14:85: Python type declaration in signature annotation does not refer to a Python type
14:85: Strings should no longer be used for type declarations. Use 'cython.int' etc. directly.
-36:64: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
-63:68: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
-90:68: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
-274:44: Unknown type declaration in annotation, ignoring
-281:29: Ambiguous types in annotation, ignoring
-298:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute?
+36:40: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
+36:66: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
+63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
+63:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
+90:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
+90:70: PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.
+283:44: Unknown type declaration in annotation, ignoring
+311:15: Annotation ignored since class-level attributes must be Python objects. Were you trying to set up an instance attribute?
+# DUPLICATE:
+63:44: Found Python 2.x type 'long' in a Python annotation. Did you mean to use 'cython.long'?
# BUG:
63:6: 'pytypes_cpdef' redeclared
-146:0: 'struct_io' redeclared
-181:0: 'struct_convert' redeclared
-200:0: 'exception_default' redeclared
-231:0: 'exception_default_uint' redeclared
+155:0: 'struct_io' redeclared
+190:0: 'struct_convert' redeclared
+209:0: 'exception_default' redeclared
+240:0: 'exception_default_uint' redeclared
"""
diff --git a/tests/run/append.pyx b/tests/run/append.pyx
index 1976780d5..dcc3fe7c9 100644
--- a/tests/run/append.pyx
+++ b/tests/run/append.pyx
@@ -1,3 +1,5 @@
+cimport cython
+
class A:
def append(self, x):
print u"appending", x
@@ -94,3 +96,35 @@ def method_name():
'append'
"""
return [].append.__name__
+
+@cython.test_assert_path_exists(
+ '//PythonCapiCallNode')
+def append_optimized(probably_list):
+ """
+ >>> l = []
+ >>> append_optimized(l)
+ >>> l
+ [1]
+ """
+ probably_list.append(1)
+
+cdef class AppendBug:
+ # https://github.com/cython/cython/issues/4828
+ # if the attribute "append" is found it shouldn't be replaced with
+ # __Pyx_PyObject_Append
+ cdef object append
+ def __init__(self, append):
+ self.append = append
+
+@cython.test_fail_if_path_exists(
+ '//PythonCapiCallNode')
+def specific_attribute(AppendBug a):
+ """
+ >>> def append_to_default_arg(a, arg=[]):
+ ... arg.append(a)
+ ... return arg
+ >>> specific_attribute(AppendBug(append_to_default_arg))
+ >>> append_to_default_arg(None)
+ [1, None]
+ """
+ a.append(1)
diff --git a/tests/run/binop_reverse_methods_GH2056.pyx b/tests/run/binop_reverse_methods_GH2056.pyx
index 4938f0d15..43bfcde86 100644
--- a/tests/run/binop_reverse_methods_GH2056.pyx
+++ b/tests/run/binop_reverse_methods_GH2056.pyx
@@ -30,6 +30,12 @@ class Base(object):
'Base.__rpow__(Base(), 2, None)'
>>> pow(Base(), 2, 100)
'Base.__pow__(Base(), 2, 100)'
+ >>> Base() // 1
+ True
+ >>> set() // Base()
+ True
+
+ # version dependent tests for @ and / are external
"""
implemented: cython.bint
@@ -67,6 +73,44 @@ class Base(object):
def __repr__(self):
return "%s()" % (self.__class__.__name__)
+ # The following methods were missed from the initial implementation
+ # that typed 'self'. These tests are a quick test to confirm that
+ # but not the full binop behaviour
+ def __matmul__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __rmatmul__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __truediv__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __rtruediv__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __floordiv__(self, other):
+ return cython.typeof(self) == 'Base'
+
+ def __rfloordiv__(self, other):
+ return cython.typeof(self) == 'Base'
+
+
+if sys.version_info >= (3, 5):
+ __doc__ += """
+ >>> Base() @ 1
+ True
+ >>> set() @ Base()
+ True
+ """
+
+if sys.version_info >= (3, 0):
+ __doc__ += """
+ >>> Base() / 1
+ True
+ >>> set() / Base()
+ True
+ """
+
@cython.c_api_binop_methods(False)
@cython.cclass
diff --git a/tests/run/builtin_abs.pyx b/tests/run/builtin_abs.pyx
index 59f3a93c4..e0b31b7e1 100644
--- a/tests/run/builtin_abs.pyx
+++ b/tests/run/builtin_abs.pyx
@@ -63,7 +63,7 @@ def int_abs(int a):
@cython.overflowcheck(True)
@cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']",
"//ReturnStatNode//NameNode[@entry.cname = 'abs']")
-cdef int c_int_abs(int a) nogil except *:
+cdef int c_int_abs(int a) except * nogil:
return abs(a)
def test_c_int_abs(int a):
@@ -125,7 +125,7 @@ def long_abs(long a):
@cython.overflowcheck(True)
@cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']",
"//ReturnStatNode//NameNode[@entry.cname = 'labs']")
-cdef long c_long_abs(long a) nogil except *:
+cdef long c_long_abs(long a) except * nogil:
return abs(a)
def test_c_long_abs(long a):
@@ -189,7 +189,7 @@ def long_long_abs(long long a):
@cython.overflowcheck(True)
@cython.test_assert_path_exists("//ReturnStatNode//NameNode[@entry.name = 'abs']",
"//ReturnStatNode//NameNode[@entry.cname = '__Pyx_abs_longlong']")
-cdef long long c_long_long_abs(long long a) nogil except *:
+cdef long long c_long_long_abs(long long a) except * nogil:
return abs(a)
def test_c_long_long_abs(long long a):
diff --git a/tests/run/builtin_type_inheritance_T608.pyx b/tests/run/builtin_type_inheritance_T608.pyx
index 1214b6841..d03558a25 100644
--- a/tests/run/builtin_type_inheritance_T608.pyx
+++ b/tests/run/builtin_type_inheritance_T608.pyx
@@ -1,42 +1,6 @@
# ticket: t608
-cdef class MyInt(int):
- """
- >>> MyInt(2) == 2
- True
- >>> MyInt(2).attr is None
- True
- """
- cdef readonly object attr
-
-cdef class MyInt2(int):
- """
- >>> MyInt2(2) == 2
- True
- >>> MyInt2(2).attr is None
- True
- >>> MyInt2(2).test(3)
- 5
- """
- cdef readonly object attr
-
- def test(self, arg):
- return self._test(arg)
-
- cdef _test(self, arg):
- return self + arg
-
-cdef class MyInt3(MyInt2):
- """
- >>> MyInt3(2) == 2
- True
- >>> MyInt3(2).attr is None
- True
- >>> MyInt3(2).test(3)
- 6
- """
- cdef _test(self, arg):
- return self + arg + 1
+# see "builtin_type_inheritance_T608_py2only.pyx" for inheritance from int
cdef class MyFloat(float):
"""
diff --git a/tests/run/builtin_type_inheritance_T608_py2only.pyx b/tests/run/builtin_type_inheritance_T608_py2only.pyx
new file mode 100644
index 000000000..b10a2610a
--- /dev/null
+++ b/tests/run/builtin_type_inheritance_T608_py2only.pyx
@@ -0,0 +1,42 @@
+# ticket: t608
+
+# This only works reliably in Python2. In Python3 ints are variable-sized.
+# You get away with it for small ints but it's a bad idea
+
+cdef class MyInt(int):
+ """
+ >>> MyInt(2) == 2
+ True
+ >>> MyInt(2).attr is None
+ True
+ """
+ cdef readonly object attr
+
+cdef class MyInt2(int):
+ """
+ >>> MyInt2(2) == 2
+ True
+ >>> MyInt2(2).attr is None
+ True
+ >>> MyInt2(2).test(3)
+ 5
+ """
+ cdef readonly object attr
+
+ def test(self, arg):
+ return self._test(arg)
+
+ cdef _test(self, arg):
+ return self + arg
+
+cdef class MyInt3(MyInt2):
+ """
+ >>> MyInt3(2) == 2
+ True
+ >>> MyInt3(2).attr is None
+ True
+ >>> MyInt3(2).test(3)
+ 6
+ """
+ cdef _test(self, arg):
+ return self + arg + 1
diff --git a/tests/run/bytearray_iter.py b/tests/run/bytearray_iter.py
index 1865f057b..60df9fcc1 100644
--- a/tests/run/bytearray_iter.py
+++ b/tests/run/bytearray_iter.py
@@ -88,3 +88,18 @@ def modifying_reversed_bytearray_iter(x):
for a in reversed(x):
print(chr(a))
del x[0]
+
+# ticket: 3473
+
+def test_bytearray_iteration(src):
+ """
+ >>> src = b'123'
+ >>> test_bytearray_iteration(src)
+ 49
+ 50
+ 51
+ """
+
+ data = bytearray(src)
+ for elem in data:
+ print(elem)
diff --git a/tests/run/c_file_validation.srctree b/tests/run/c_file_validation.srctree
new file mode 100644
index 000000000..cceb014ac
--- /dev/null
+++ b/tests/run/c_file_validation.srctree
@@ -0,0 +1,72 @@
+"""
+PYTHON run_test.py
+"""
+
+######## run_test.py ########
+
+import os
+from collections import defaultdict
+from os.path import basename, splitext
+
+from Cython.Compiler.Options import CompilationOptions
+from Cython.Compiler.Main import compile as cython_compile
+from Cython.Compiler.Options import default_options
+
+
+def validate_file(filename):
+ module_name = basename(filename)
+ c_file = splitext(filename)[0] + '.c'
+
+ options = CompilationOptions(
+ default_options,
+ language_level="3",
+ evaluate_tree_assertions=True,
+ )
+ result = cython_compile(filename, options=options)
+ return result.num_errors
+
+
+counts = defaultdict(int)
+failed = False
+
+for filename in sorted(os.listdir(".")):
+ if "run_test" in filename:
+ continue
+
+ print("Testing '%s'" % filename)
+ num_errors = validate_file(filename)
+ print(num_errors, filename)
+ counts[num_errors] += 1
+
+ if '_ok' in filename:
+ if num_errors > 0:
+ failed = True
+ print("ERROR: Compilation failed: %s (%s errors)" % (filename, num_errors))
+ else:
+ if num_errors == 0:
+ failed = True
+ print("ERROR: Expected failure, but compilation succeeded: %s" % filename)
+
+assert counts == {0: 2, 1: 2}, counts
+assert not failed
+
+
+######## assert_ok.py ########
+
+# cython: test_assert_c_code_has = Generated by Cython
+# cython: test_assert_c_code_has = CYTHON_HEX_VERSION
+
+
+######## assert_missing.py ########
+
+# cython: test_assert_c_code_has = Generated by Python
+
+
+######## fail_if_ok.py ########
+
+# cython: test_fail_if_c_code_has = Generated by Python
+
+
+######## fail_if_found.py ########
+
+# cython: test_fail_if_c_code_has = Generated by Cython
diff --git a/tests/run/cdef_class_dataclass.pyx b/tests/run/cdef_class_dataclass.pyx
index 326fd0210..7be88f695 100644
--- a/tests/run/cdef_class_dataclass.pyx
+++ b/tests/run/cdef_class_dataclass.pyx
@@ -127,8 +127,8 @@ cdef class ContainsNonPyFields:
"""
mystruct: S = cython.dataclasses.field(compare=False)
mystruct_ptr: S_ptr = field(init=False, repr=False, default_factory=malloc_a_struct)
- memview: int[:, ::1] = field(default=create_array((3,1), "c"), # mutable so not great but OK for a test
- compare=False)
+ memview: cython.int[:, ::1] = field(default=create_array((3,1), "c"), # mutable so not great but OK for a test
+ compare=False)
def __dealloc__(self):
free(self.mystruct_ptr)
@@ -154,8 +154,8 @@ cdef class InitClassVars:
True
"""
a: cython.int = 0
- b1: InitVar[double] = 1.0
- b2: py_dataclasses.InitVar[double] = 1.0
+ b1: InitVar[cython.double] = 1.0
+ b2: py_dataclasses.InitVar[cython.double] = 1.0
c1: ClassVar[float] = 2.0
c2: typing.ClassVar[float] = 2.0
cdef InitVar[cython.int] d1
@@ -206,7 +206,7 @@ cdef class TestVisibility:
"""
cdef double a
a = 1.0
- b: double = 2.0
+ b: cython.double = 2.0
cdef public double c
c = 3.0
cdef public object d
@@ -222,8 +222,12 @@ cdef class TestFrozen:
Traceback (most recent call last):
AttributeError: attribute 'a' of '...TestFrozen' objects is not writable
"""
- a: double = 2.0
+ a: cython.double = 2.0
+def get_dataclass_initvar():
+ return py_dataclasses.InitVar
+
+
@dataclass(kw_only=True)
cdef class TestKwOnly:
"""
@@ -248,13 +252,14 @@ cdef class TestKwOnly:
TypeError: __init__() needs keyword-only argument b
"""
- a: double = 2.0
- b: long
+ a: cython.double = 2.0
+ b: cython.long
+
import sys
if sys.version_info >= (3, 7):
__doc__ = """
- >>> from dataclasses import Field, is_dataclass, fields
+ >>> from dataclasses import Field, is_dataclass, fields, InitVar
# It uses the types from the standard library where available
>>> all(isinstance(v, Field) for v in BasicDataclass.__dataclass_fields__.values())
@@ -275,4 +280,6 @@ if sys.version_info >= (3, 7):
['a', 'b', 'c', 'd']
>>> [ f.name for f in fields(InitClassVars)]
['a']
+ >>> get_dataclass_initvar() == InitVar
+ True
"""
diff --git a/tests/run/cdef_setitem_T284.pyx b/tests/run/cdef_setitem_T284.pyx
index 389b8c409..871afb892 100644
--- a/tests/run/cdef_setitem_T284.pyx
+++ b/tests/run/cdef_setitem_T284.pyx
@@ -24,9 +24,9 @@ def with_external_list(list L):
"""
>>> with_external_list([1,2,3])
[1, -10, 3]
- >>> with_external_list(None)
+ >>> with_external_list(None) # doctest: +ELLIPSIS
Traceback (most recent call last):
- TypeError: 'NoneType' object is not subscriptable
+ TypeError: 'NoneType' object ...
"""
ob = 1L
L[ob] = -10
diff --git a/tests/run/cfunc_convert.pyx b/tests/run/cfunc_convert.pyx
index 6db0765d4..89e09ea36 100644
--- a/tests/run/cfunc_convert.pyx
+++ b/tests/run/cfunc_convert.pyx
@@ -74,7 +74,7 @@ def test_global():
>>> global_csqrt.__doc__
'wrap(x: float) -> float'
>>> test_global()
- double (double) nogil
+ double (double) noexcept nogil
Python object
"""
print cython.typeof(sqrt)
@@ -266,3 +266,28 @@ def make_map():
"f2": cfunc_dup_f2,
}
return map
+
+
+cdef class HasCdefFunc:
+ cdef int x
+ def __init__(self, x):
+ self.x = x
+
+ cdef int func(self, int y):
+ return self.x + y
+
+def test_unbound_methods():
+ """
+ >>> f = test_unbound_methods()
+ >>> f(HasCdefFunc(1), 2)
+ 3
+ """
+ return HasCdefFunc.func
+
+def test_bound_methods():
+ """
+ >>> f = test_bound_methods()
+ >>> f(2)
+ 3
+ """
+ return HasCdefFunc(1).func
diff --git a/tests/run/cfuncptr.pyx b/tests/run/cfuncptr.pyx
index b7018cce0..cb3b32184 100644
--- a/tests/run/cfuncptr.pyx
+++ b/tests/run/cfuncptr.pyx
@@ -46,15 +46,49 @@ cdef int exceptminus2(int bad) except -2:
else:
return 0
-def call_exceptminus2(bad):
+def call_exceptminus2_through_exceptstar_pointer(bad):
"""
- >>> call_exceptminus2(True)
+ >>> call_exceptminus2_through_exceptstar_pointer(True)
Traceback (most recent call last):
...
RuntimeError
- >>> call_exceptminus2(False)
+ >>> call_exceptminus2_through_exceptstar_pointer(False)
0
"""
cdef int (*fptr)(int) except * # GH4770 - should not be treated as except? -1
fptr = exceptminus2
return fptr(bad)
+
+def call_exceptminus2_through_exceptmaybeminus2_pointer(bad):
+ """
+ >>> call_exceptminus2_through_exceptmaybeminus2_pointer(True)
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+ >>> call_exceptminus2_through_exceptmaybeminus2_pointer(False)
+ 0
+ """
+ cdef int (*fptr)(int) except ?-2 # exceptions should be compatible
+ fptr = exceptminus2
+ return fptr(bad)
+
+cdef int noexcept_func(): # noexcept
+ return 0
+
+def call_noexcept_func_except_star():
+ """
+ >>> call_noexcept_func_except_star()
+ 0
+ """
+ cdef int (*fptr)() except *
+ fptr = noexcept_func # exception specifications are compatible
+ return fptr()
+
+def call_noexcept_func_except_check():
+ """
+ >>> call_noexcept_func_except_check()
+ 0
+ """
+ cdef int (*fptr)() except ?-1
+ fptr = noexcept_func # exception specifications are compatible
+ return fptr()
diff --git a/tests/run/complex_numbers_T305.pyx b/tests/run/complex_numbers_T305.pyx
index acbc0a5fa..8ca98ff50 100644
--- a/tests/run/complex_numbers_T305.pyx
+++ b/tests/run/complex_numbers_T305.pyx
@@ -80,6 +80,8 @@ def test_pow(double complex z, double complex w, tol=None):
True
>>> test_pow(-0.5, 1j, tol=1e-15)
True
+ >>> test_pow(-1, 0.5, tol=1e-15)
+ True
"""
if tol is None:
return z**w
@@ -264,3 +266,87 @@ cpdef double complex complex_retval():
1j
"""
return 1j
+
+def stress_test():
+ """
+ Run the main operations on 1000 pseudo-random numbers to
+ try to spot anything accidentally missed from the test cases
+ (doesn't cover inf and NaN as inputs though)
+ >>> stress_test()
+ """
+ cdef double complex x
+ cdef double complex y
+
+ from random import Random
+ from math import ldexp
+ r = Random()
+ r.seed("I'm a seed") # try to make the test somewhat reproducible
+
+ # copied from https://docs.python.org/3/library/random.html#recipes
+ # gets evenly distributed random numbers
+ def full_random():
+ mantissa = 0x10_0000_0000_0000 | r.getrandbits(52)
+ exponent = -53
+ x = 0
+ while not x:
+ x = r.getrandbits(32)
+ exponent += x.bit_length() - 32
+ return ldexp(mantissa, exponent)
+
+ for n in range(1, 1001):
+ if n % 50 == 0:
+ # strategical insert some 0 values
+ a = 0
+ else:
+ a = full_random()
+ if n % 51 == 0:
+ b = 0
+ else:
+ b = full_random()
+ if n % 52 == 0:
+ c = 0
+ else:
+ c = full_random()
+ if n % 53 == 0:
+ d = 0
+ else:
+ d = full_random()
+
+ x= a+1j*b
+ y = c+1j*d
+ py_dict = dict(x=x, y=y)
+
+ sum_ = x+y
+ sum_py = eval("x+y", py_dict)
+ delta_sum = abs(sum_/sum_py - 1)
+ assert delta_sum < 1e-15, f"{x} {y} {sum_} {sum_py} {delta_sum}"
+
+ minus = x-y
+ minus_py = eval("x-y", py_dict)
+ delta_minus = abs(minus/minus_py - 1)
+ assert delta_minus < 1e-15, f"{x} {y} {minus} {minus_py} {delta_minus}"
+
+ times = x*y
+ times_py = eval("x*y", py_dict)
+ delta_times = abs(times/times_py - 1)
+ assert delta_times < 1e-15, f"{x} {y} {times} {times_py} {delta_times}"
+
+ divide = x/y
+ divide_py = eval("x/y", py_dict)
+ delta_divide = abs(divide/divide_py - 1)
+ assert delta_divide < 1e-15, f"{x} {y} {divide} {divide_py} {delta_divide}"
+
+ divide2 = y/x
+ divide2_py = eval("y/x", py_dict)
+ delta_divide2 = abs(divide2/divide2_py - 1)
+ assert delta_divide2 < 1e-15, f"{x} {y} {divide2} {divide2_py} {delta_divide2}"
+
+ pow_ = x**y
+ pow_py = eval("x**y", py_dict)
+ delta_pow = abs(pow_/pow_py - 1)
+ assert delta_pow < 1e-15, f"{x} {y} {pow_} {pow_py} {delta_pow}"
+
+ pow2 = y**x
+ pow2_py = eval("y**x", py_dict)
+ delta_pow2 = abs(pow2/pow2_py - 1)
+ assert delta_pow2 < 1e-15, f"{x} {y} {pow2} {pow2_py} {delta_pow2}"
diff --git a/tests/run/coverage_cmd_src_pkg_layout.srctree b/tests/run/coverage_cmd_src_pkg_layout.srctree
new file mode 100644
index 000000000..e2c58691a
--- /dev/null
+++ b/tests/run/coverage_cmd_src_pkg_layout.srctree
@@ -0,0 +1,177 @@
+# mode: run
+# tag: coverage,trace
+
+"""
+PYTHON -m pip install .
+PYTHON setup.py build_ext --inplace
+PYTHON -m coverage run --source=pkg coverage_test.py
+PYTHON collect_coverage.py
+"""
+
+######## setup.py ########
+
+from setuptools import Extension, find_packages, setup
+from Cython.Build import cythonize
+
+MODULES = [
+ Extension("pkg.module1", ["src/pkg/module1.pyx"]),
+ ]
+
+setup(
+ name="pkg",
+ zip_safe=False,
+ packages=find_packages('src'),
+ package_data={'pkg': ['*.pxd', '*.pyx']},
+ package_dir={'': 'src'},
+ ext_modules= cythonize(MODULES)
+ )
+
+
+######## .coveragerc ########
+[run]
+plugins = Cython.Coverage
+
+######## src/pkg/__init__.py ########
+
+######## src/pkg/module1.pyx ########
+# cython: linetrace=True
+# distutils: define_macros=CYTHON_TRACE=1
+
+def func1(int a, int b):
+ cdef int x = 1 # 5
+ c = func2(a) + b # 6
+ return x + c # 7
+
+
+def func2(int a):
+ return a * 2 # 11
+
+######## coverage_test.py ########
+
+import os.path
+from pkg import module1
+
+
+assert not any(
+ module1.__file__.endswith(ext)
+ for ext in '.py .pyc .pyo .pyw .pyx .pxi'.split()
+), module.__file__
+
+
+def run_coverage(module):
+ assert module.func1(1, 2) == (1 * 2) + 2 + 1
+ assert module.func2(2) == 2 * 2
+
+
+if __name__ == '__main__':
+ run_coverage(module1)
+
+
+######## collect_coverage.py ########
+
+import re
+import sys
+import os
+import os.path
+import subprocess
+from glob import iglob
+
+
+def run_coverage_command(*command):
+ env = dict(os.environ, LANG='', LC_ALL='C')
+ process = subprocess.Popen(
+ [sys.executable, '-m', 'coverage'] + list(command),
+ stdout=subprocess.PIPE, env=env)
+ stdout, _ = process.communicate()
+ return stdout
+
+
+def run_report():
+ stdout = run_coverage_command('report', '--show-missing')
+ stdout = stdout.decode('iso8859-1') # 'safe' decoding
+ lines = stdout.splitlines()
+ print(stdout)
+
+ module_path = 'module1.pyx'
+ assert any(module_path in line for line in lines), (
+ "'%s' not found in coverage report:\n\n%s" % (module_path, stdout))
+
+ files = {}
+ line_iter = iter(lines)
+ for line in line_iter:
+ if line.startswith('---'):
+ break
+ extend = [''] * 2
+ for line in line_iter:
+ if not line or line.startswith('---'):
+ continue
+ name, statements, missed, covered, _missing = (line.split(None, 4) + extend)[:5]
+ missing = []
+ for start, end in re.findall('([0-9]+)(?:-([0-9]+))?', _missing):
+ if end:
+ missing.extend(range(int(start), int(end)+1))
+ else:
+ missing.append(int(start))
+ files[os.path.basename(name)] = (statements, missed, covered, missing)
+ assert 5 not in files[module_path][-1], files[module_path]
+ assert 6 not in files[module_path][-1], files[module_path]
+ assert 7 not in files[module_path][-1], files[module_path]
+ assert 11 not in files[module_path][-1], files[module_path]
+
+
+def run_xml_report():
+ stdout = run_coverage_command('xml', '-o', '-')
+ print(stdout)
+
+ import xml.etree.ElementTree as etree
+ data = etree.fromstring(stdout)
+
+ files = {}
+ for module in data.iterfind('.//class'):
+ files[module.get('filename').replace('\\', '/')] = dict(
+ (int(line.get('number')), int(line.get('hits')))
+ for line in module.findall('lines/line')
+ )
+
+ module_path = 'src/pkg/module1.pyx'
+
+ assert files[module_path][5] > 0, files[module_path]
+ assert files[module_path][6] > 0, files[module_path]
+ assert files[module_path][7] > 0, files[module_path]
+ assert files[module_path][11] > 0, files[module_path]
+
+
+def run_html_report():
+ from collections import defaultdict
+
+ stdout = run_coverage_command('html', '-d', 'html')
+ # coverage 6.1+ changed the order of the attributes => need to parse them separately
+ _parse_id = re.compile(r'id=["\'][^0-9"\']*(?P<id>[0-9]+)[^0-9"\']*["\']').search
+ _parse_state = re.compile(r'class=["\'][^"\']*(?P<state>mis|run|exc)[^"\']*["\']').search
+
+ files = {}
+ for file_path in iglob('html/*.html'):
+ with open(file_path) as f:
+ page = f.read()
+ report = defaultdict(set)
+ for line in re.split(r'id=["\']source["\']', page)[-1].splitlines():
+ lineno = _parse_id(line)
+ state = _parse_state(line)
+ if not lineno or not state:
+ continue
+ report[state.group('state')].add(int(lineno.group('id')))
+ files[file_path] = report
+
+ file_report = [data for path, data in files.items() if 'module1' in path][0]
+ executed, missing = file_report["run"], file_report["mis"]
+ assert executed
+ assert 5 in executed, executed
+ assert 6 in executed, executed
+ assert 7 in executed, executed
+ assert 11 in executed, executed
+
+
+if __name__ == '__main__':
+ run_report()
+ run_xml_report()
+ run_html_report()
diff --git a/tests/run/cpdef_void_return.pyx b/tests/run/cpdef_void_return.pyx
index e15448505..7943c3466 100644
--- a/tests/run/cpdef_void_return.pyx
+++ b/tests/run/cpdef_void_return.pyx
@@ -1,4 +1,4 @@
-cpdef void unraisable():
+cpdef void unraisable() noexcept:
"""
>>> unraisable()
here
diff --git a/tests/run/cpp_classes.pyx b/tests/run/cpp_classes.pyx
index d2babdac3..1a1110b91 100644
--- a/tests/run/cpp_classes.pyx
+++ b/tests/run/cpp_classes.pyx
@@ -9,7 +9,7 @@ cdef extern from "shapes.h" namespace "shapes":
float area()
cdef cppclass Ellipse(Shape):
- Ellipse(int a, int b) nogil except +
+ Ellipse(int a, int b) except + nogil
cdef cppclass Circle(Ellipse):
int radius
diff --git a/tests/run/cpp_classes_def.pyx b/tests/run/cpp_classes_def.pyx
index e36fc4fbd..855de7051 100644
--- a/tests/run/cpp_classes_def.pyx
+++ b/tests/run/cpp_classes_def.pyx
@@ -21,7 +21,7 @@ cdef cppclass RegularPolygon(Shape):
__init__(int n, float radius):
this.n = n
this.radius = radius
- float area() const:
+ float area() noexcept const:
cdef double theta = pi / this.n
return this.radius * this.radius * sin(theta) * cos(theta) * this.n
void do_with() except *:
diff --git a/tests/run/cpp_exceptions_nogil.pyx b/tests/run/cpp_exceptions_nogil.pyx
index 1d21d40f9..5c6315323 100644
--- a/tests/run/cpp_exceptions_nogil.pyx
+++ b/tests/run/cpp_exceptions_nogil.pyx
@@ -9,7 +9,7 @@ cdef extern from "cpp_exceptions_nogil_helper.h" nogil:
cdef void bar "foo"(int i) except +ValueError
cdef void spam"foo"(int i) except +raise_TypeError
-cdef int foo_nogil(int i) nogil except *:
+cdef int foo_nogil(int i) except * nogil:
foo(i)
def test_foo_nogil():
diff --git a/tests/run/cpp_extern.srctree b/tests/run/cpp_extern.srctree
new file mode 100644
index 000000000..d2c11bb5f
--- /dev/null
+++ b/tests/run/cpp_extern.srctree
@@ -0,0 +1,151 @@
+# mode: run
+# tag: cpp
+# ticket: 1839
+
+"""
+PYTHON setup.py build_ext --inplace
+PYTHON -c "from foo import test; test()"
+PYTHON -c "from bar import test; test()"
+PYTHON -c "from baz import test; test()"
+"""
+
+######## setup.py ########
+
+from Cython.Build import cythonize
+from Cython.Distutils.extension import Extension
+from distutils.core import setup
+
+foo = Extension(
+ "foo",
+ ["foo.pyx", "foo1.cpp", "foo2.cpp"],
+)
+bar = Extension(
+ "bar",
+ ["bar.pyx", "bar1.c", "bar2.cpp"],
+)
+baz = Extension(
+ "baz",
+ ["baz.pyx", "baz1.c", "baz2.cpp"],
+ define_macros = [("__PYX_EXTERN_C", 'extern "C"')],
+)
+
+setup(
+ ext_modules=cythonize([foo, bar, baz]),
+)
+
+######## foo.pyx ########
+
+# distutils: language = c++
+
+from libcpp cimport vector
+
+cdef public vector.vector[int] get_vector():
+ return [1,2,3]
+
+cdef extern from "foo_header.h":
+ cdef size_t size_vector1()
+ cdef size_t size_vector2()
+
+def test():
+ assert size_vector1() == 3
+ assert size_vector2() == 3
+
+######## foo_header.h ########
+
+size_t size_vector1();
+size_t size_vector2();
+
+######## foo1.cpp ########
+
+#include <vector>
+#include "foo.h"
+
+size_t size_vector1() {
+ return get_vector().size();
+}
+
+######## foo2.cpp ########
+
+#include <vector>
+extern "C" {
+// #include within `extern "C"` is legal.
+// We want to make sure here that Cython C++ functions are flagged as `extern "C++"`.
+// Otherwise they would be interpreted with C-linkage if the header is include within a `extern "C"` block.
+#include "foo.h"
+}
+
+size_t size_vector2() {
+ return get_vector().size();
+}
+
+######## bar.pyx ########
+
+cdef public char get_char():
+ return 42
+
+cdef extern from "bar_header.h":
+ cdef int get_int1()
+ cdef int get_int2()
+
+def test():
+ assert get_int1() == 42
+ assert get_int2() == 42
+
+######## bar_header.h ########
+
+int get_int1();
+int get_int2();
+
+######## bar1.c ########
+
+#include "bar.h"
+
+int get_int1() { return (int)get_char(); }
+
+######## bar2.cpp ########
+
+extern "C" {
+#include "bar.h"
+}
+
+extern "C" int get_int2() { return (int)get_char(); }
+
+######## baz.pyx ########
+
+# distutils: language = c++
+
+cdef public char get_char():
+ return 42
+
+cdef extern from "baz_header.h":
+ cdef int get_int1()
+ cdef int get_int2()
+
+def test():
+ assert get_int1() == 42
+ assert get_int2() == 42
+
+######## baz_header.h ########
+
+#ifdef __cplusplus
+ #define BAZ_EXTERN_C extern "C"
+#else
+ #define BAZ_EXTERN_C
+#endif
+
+BAZ_EXTERN_C int get_int1();
+int get_int2();
+
+######## baz1.c ########
+
+#undef __PYX_EXTERN_C
+#define __PYX_EXTERN_C
+#include "baz.h"
+
+int get_int1() { return (int)get_char(); }
+
+######## baz2.cpp ########
+
+#include "baz.h"
+
+int get_int2() { return (int)get_char(); }
diff --git a/tests/run/cpp_function_lib.pxd b/tests/run/cpp_function_lib.pxd
index 2a5d72886..ba6694cb9 100644
--- a/tests/run/cpp_function_lib.pxd
+++ b/tests/run/cpp_function_lib.pxd
@@ -13,7 +13,7 @@ cdef extern from "cpp_function_lib.h":
double call "operator()"(double a, int b)
cdef cppclass FunctionKeeper:
- FunctionKeeper(function[double(double, int)] user_function)
- void set_function(function[double(double, int)] user_function)
- function[double(double, int)] get_function()
+ FunctionKeeper(function[double(double, int) noexcept] user_function)
+ void set_function(function[double(double, int) noexcept] user_function)
+ function[double(double, int) noexcept] get_function()
double call_function(double a, int b) except +
diff --git a/tests/run/cpp_iterators.pyx b/tests/run/cpp_iterators.pyx
index 850632581..81048d0b3 100644
--- a/tests/run/cpp_iterators.pyx
+++ b/tests/run/cpp_iterators.pyx
@@ -2,6 +2,10 @@
# tag: cpp, werror, no-cpp-locals
from libcpp.deque cimport deque
+from libcpp.list cimport list as stdlist
+from libcpp.map cimport map as stdmap
+from libcpp.set cimport set as stdset
+from libcpp.string cimport string
from libcpp.vector cimport vector
from cython.operator cimport dereference as deref
@@ -10,6 +14,11 @@ cdef extern from "cpp_iterators_simple.h":
DoublePointerIter(double* start, int len)
double* begin()
double* end()
+ cdef cppclass DoublePointerIterDefaultConstructible:
+ DoublePointerIterDefaultConstructible()
+ DoublePointerIterDefaultConstructible(double* start, int len)
+ double* begin()
+ double* end()
def test_vector(py_v):
"""
@@ -98,6 +107,35 @@ def test_custom():
finally:
del iter
+def test_custom_deref():
+ """
+ >>> test_custom_deref()
+ [1.0, 2.0, 3.0]
+ """
+ cdef double* values = [1, 2, 3]
+ cdef DoublePointerIter* iter
+ try:
+ iter = new DoublePointerIter(values, 3)
+ return [x for x in deref(iter)]
+ finally:
+ del iter
+
+def test_custom_genexp():
+ """
+ >>> test_custom_genexp()
+ [1.0, 2.0, 3.0]
+ """
+ def to_list(g): # function to hide the intent to avoid inlined-generator expression optimization
+ return list(g)
+ cdef double* values = [1, 2, 3]
+ cdef DoublePointerIterDefaultConstructible* iter
+ try:
+ iter = new DoublePointerIterDefaultConstructible(values, 3)
+ # TODO: Only needs to copy once - currently copies twice
+ return to_list(x for x in iter[0])
+ finally:
+ del iter
+
def test_iteration_over_heap_vector(L):
"""
>>> test_iteration_over_heap_vector([1,2])
@@ -201,3 +239,119 @@ def test_const_iterator_calculations(py_v):
first == clast,
last == cfirst
]
+
+cdef extern from "cpp_iterators_over_attribute_of_rvalue_support.h":
+ cdef cppclass HasIterableAttribute:
+ vector[int] vec
+ HasIterableAttribute()
+ HasIterableAttribute(vector[int])
+
+cdef HasIterableAttribute get_object_with_iterable_attribute():
+ return HasIterableAttribute()
+
+def test_iteration_over_attribute_of_call():
+ """
+ >>> test_iteration_over_attribute_of_call()
+ 1
+ 2
+ 3
+ 42
+ 43
+ 44
+ 1
+ 2
+ 3
+ """
+ for i in HasIterableAttribute().vec:
+ print(i)
+ cdef vector[int] vec
+ for i in range(42, 45):
+ vec.push_back(i)
+ for i in HasIterableAttribute(vec).vec:
+ print(i)
+ for i in get_object_with_iterable_attribute().vec:
+ print(i)
+
+def test_iteration_over_reversed_list(py_v):
+ """
+ >>> test_iteration_over_reversed_list([2, 4, 6])
+ 6
+ 4
+ 2
+ """
+ cdef stdlist[int] lint
+ for e in py_v:
+ lint.push_back(e)
+ for e in reversed(lint):
+ print(e)
+
+def test_iteration_over_reversed_map(py_v):
+ """
+ >>> test_iteration_over_reversed_map([(1, 10), (2, 20), (3, 30)])
+ 3 30
+ 2 20
+ 1 10
+ """
+ cdef stdmap[int, int] m
+ for k, v in py_v:
+ m[k] = v
+ for k, v in reversed(m):
+ print("%s %s" % (k, v))
+
+def test_iteration_over_reversed_set(py_v):
+ """
+ >>> test_iteration_over_reversed_set([1, 2, 3])
+ 3
+ 2
+ 1
+ """
+ cdef stdset[int] s
+ for e in py_v:
+ s.insert(e)
+ for e in reversed(s):
+ print(e)
+
+def test_iteration_over_reversed_string():
+ """
+ >>> test_iteration_over_reversed_string()
+ n
+ o
+ h
+ t
+ y
+ c
+ """
+ cdef string cppstr = "cython"
+ for c in reversed(cppstr):
+ print(chr(c))
+
+def test_iteration_over_reversed_vector(py_v):
+ """
+ >>> test_iteration_over_reversed_vector([1, 2, 3])
+ 3
+ 2
+ 1
+ """
+ cdef vector[int] vint
+ for e in py_v:
+ vint.push_back(e)
+ for e in reversed(vint):
+ print(e)
+
+def test_non_built_in_reversed_function(py_v):
+ """
+ >>> test_non_built_in_reversed_function([1, 3, 5])
+ Non-built-in reversed called.
+ 5
+ 3
+ 1
+ """
+ def reversed(arg):
+ print("Non-built-in reversed called.")
+ return arg[::-1]
+
+ cdef vector[int] vint
+ for e in py_v:
+ vint.push_back(e)
+ for e in reversed(vint):
+ print(e)
diff --git a/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h b/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h
new file mode 100644
index 000000000..b4a10b5be
--- /dev/null
+++ b/tests/run/cpp_iterators_over_attribute_of_rvalue_support.h
@@ -0,0 +1,11 @@
+#include <vector>
+
+class HasIterableAttribute {
+public:
+ std::vector<int> vec;
+ HasIterableAttribute() {
+ for (int i = 1; i<=3; i++)
+ vec.push_back(i);
+ }
+ HasIterableAttribute(std::vector<int> vec) : vec(vec) {}
+};
diff --git a/tests/run/cpp_iterators_simple.h b/tests/run/cpp_iterators_simple.h
index 3a4b50e3c..8373237d8 100644
--- a/tests/run/cpp_iterators_simple.h
+++ b/tests/run/cpp_iterators_simple.h
@@ -8,3 +8,14 @@ private:
int len_;
};
+class DoublePointerIterDefaultConstructible: public DoublePointerIter {
+ // an alternate version that is default-constructible
+public:
+ DoublePointerIterDefaultConstructible() :
+ DoublePointerIter(0, 0)
+ {}
+ DoublePointerIterDefaultConstructible(double* start, int len) :
+ DoublePointerIter(start, len)
+ {}
+
+};
diff --git a/tests/run/cpp_locals_directive.pyx b/tests/run/cpp_locals_directive.pyx
index 6c9c89ba5..359ae0b10 100644
--- a/tests/run/cpp_locals_directive.pyx
+++ b/tests/run/cpp_locals_directive.pyx
@@ -19,13 +19,9 @@ cdef extern from *:
C(C&& rhs) : x(rhs.x), print_destructor(rhs.print_destructor) {
rhs.print_destructor = false; // moved-from instances are deleted silently
}
- C& operator=(C&& rhs) {
- x=rhs.x;
- print_destructor=rhs.print_destructor;
- rhs.print_destructor = false; // moved-from instances are deleted silently
- return *this;
- }
- C(const C& rhs) = default;
+ // also test that we don't require the assignment operator
+ C& operator=(C&& rhs) = delete;
+ C(const C& rhs) = delete;
C& operator=(const C& rhs) = default;
~C() {
if (print_destructor) print_C_destructor();
diff --git a/tests/run/cpp_nested_classes.pyx b/tests/run/cpp_nested_classes.pyx
index b50f79936..8877c0440 100644
--- a/tests/run/cpp_nested_classes.pyx
+++ b/tests/run/cpp_nested_classes.pyx
@@ -25,6 +25,22 @@ cdef extern from "cpp_nested_classes_support.h":
cdef cppclass SpecializedTypedClass(TypedClass[double]):
pass
+cdef cppclass AA:
+ cppclass BB:
+ int square(int x):
+ return x * x
+ cppclass CC:
+ int cube(int x):
+ return x * x * x
+ BB* createB():
+ return new BB()
+ ctypedef int my_int
+ @staticmethod
+ my_int negate(my_int x):
+ return -x
+
+cdef cppclass DD(AA):
+ ctypedef int my_other_int
ctypedef A AliasA1
ctypedef AliasA1 AliasA2
@@ -44,6 +60,27 @@ def test_nested_classes():
assert b_ptr.square(4) == 16
del b_ptr
+def test_nested_defined_classes():
+ """
+ >>> test_nested_defined_classes()
+ """
+ cdef AA a
+ cdef AA.BB b
+ assert b.square(3) == 9
+ cdef AA.BB.CC c
+ assert c.cube(3) == 27
+
+ cdef AA.BB *b_ptr = a.createB()
+ assert b_ptr.square(4) == 16
+ del b_ptr
+
+def test_nested_inherited_classes():
+ """
+ >>> test_nested_inherited_classes()
+ """
+ cdef DD.BB b
+ assert b.square(3) == 9
+
def test_nested_typedef(py_x):
"""
>>> test_nested_typedef(5)
@@ -51,6 +88,13 @@ def test_nested_typedef(py_x):
cdef A.my_int x = py_x
assert A.negate(x) == -py_x
+def test_nested_defined_typedef(py_x):
+ """
+ >>> test_nested_typedef(5)
+ """
+ cdef AA.my_int x = py_x
+ assert AA.negate(x) == -py_x
+
def test_typedef_for_nested(py_x):
"""
>>> test_typedef_for_nested(5)
diff --git a/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx b/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx
new file mode 100644
index 000000000..ebe8d8fa8
--- /dev/null
+++ b/tests/run/cpp_stl_associated_containers_contains_cpp20.pyx
@@ -0,0 +1,106 @@
+# mode: run
+# tag: cpp, cpp20
+
+# cython: language_level=3
+
+from libcpp.map cimport map, multimap
+from libcpp.set cimport set, multiset
+from libcpp.unordered_map cimport unordered_map, unordered_multimap
+from libcpp.unordered_set cimport unordered_set, unordered_multiset
+
+def test_map_contains(vals, int key_to_find):
+ """
+ >>> test_map_contains([(1,100),(2,200),(3,300)], 3)
+ True
+ >>> test_map_contains([(1,100),(2,200),(3,300)], 4)
+ False
+ """
+ cdef map[int,int] m = map[int, int]()
+ for v in vals:
+ m.insert(v)
+ return m.contains(key_to_find)
+
+def test_unordered_map_contains(vals, int key_to_find):
+ """
+ >>> test_unordered_map_contains([(1,100),(2,200),(3,300)], 3)
+ True
+ >>> test_unordered_map_contains([(1,100),(2,200),(3,300)], 4)
+ False
+ """
+ cdef unordered_map[int,int] um = unordered_map[int, int]()
+ for v in vals:
+ um.insert(v)
+ return um.contains(key_to_find)
+
+def test_multimap_contains(vals, int key_to_find):
+ """
+ >>> test_multimap_contains([(1,100),(2,200),(3,300)], 3)
+ True
+ >>> test_multimap_contains([(1,100),(2,200),(3,300)], 4)
+ False
+ """
+ cdef multimap[int,int] mm = multimap[int, int]()
+ for v in vals:
+ mm.insert(v)
+ return mm.contains(key_to_find)
+
+def test_unordered_multimap_contains(vals, int key_to_find):
+ """
+ >>> test_unordered_multimap_contains([(1,100),(2,200),(3,300)], 3)
+ True
+ >>> test_unordered_multimap_contains([(1,100),(2,200),(3,300)], 4)
+ False
+ """
+ cdef unordered_multimap[int,int] umm = unordered_multimap[int, int]()
+ for v in vals:
+ umm.insert(v)
+ return umm.contains(key_to_find)
+
+
+def test_set_contains(vals, int val_to_find):
+ """
+ >>> test_set_contains([1, 2, 3], 3)
+ True
+ >>> test_set_contains([1, 2, 3], 4)
+ False
+ """
+ cdef set[int] s = set[int]()
+ for v in vals:
+ s.insert(v)
+ return s.contains(val_to_find)
+
+def test_unordered_set_contains(vals, int val_to_find):
+ """
+ >>> test_unordered_set_contains([1, 2, 3], 3)
+ True
+ >>> test_unordered_set_contains([1, 2, 3], 4)
+ False
+ """
+ cdef unordered_set[int] us = unordered_set[int]()
+ for v in vals:
+ us.insert(v)
+ return us.contains(val_to_find)
+
+def test_multiset_contains(vals, int val_to_find):
+ """
+ >>> test_multiset_contains([1, 2, 3], 3)
+ True
+ >>> test_multiset_contains([1, 2, 3], 4)
+ False
+ """
+ cdef multiset[int] ms = multiset[int]()
+ for v in vals:
+ ms.insert(v)
+ return ms.contains(val_to_find)
+
+def test_unordered_multiset_contains(vals, int val_to_find):
+ """
+ >>> test_unordered_multiset_contains([1, 2, 3], 3)
+ True
+ >>> test_unordered_multiset_contains([1, 2, 3], 4)
+ False
+ """
+ cdef unordered_multiset[int] ums = unordered_multiset[int]()
+ for v in vals:
+ ums.insert(v)
+ return ums.contains(val_to_find)
diff --git a/tests/run/cpp_stl_bit_cpp20.pyx b/tests/run/cpp_stl_bit_cpp20.pyx
new file mode 100644
index 000000000..5aae8326a
--- /dev/null
+++ b/tests/run/cpp_stl_bit_cpp20.pyx
@@ -0,0 +1,131 @@
+# mode: run
+# tag: cpp, werror, cpp20
+
+from libcpp cimport bool
+from libc.stdint cimport uint8_t, int8_t
+from libcpp.bit cimport (bit_cast, has_single_bit, bit_ceil, bit_floor,
+ bit_width, rotr, rotl, countl_zero, countl_one, countr_zero,
+ countr_one, popcount)
+
+def test_bit_cast():
+ """
+ Test bit_cast with a signed 8bit wide integer type.
+ -127U = 0b1000'0001U
+ >>> test_bit_cast()
+ 129
+ """
+ cdef int8_t x = -127
+ cdef result = bit_cast[uint8_t, int8_t](x)
+ return result
+
+def test_has_single_bit():
+ """
+ Test has_single_bit with a unsigned 8bit wide integer type.
+ >>> test_has_single_bit()
+ True
+ """
+ cdef uint8_t x = 1
+ cdef bint res = has_single_bit[uint8_t](x)
+ return res
+
+def test_bit_ceil():
+ """
+ Test bit_ceil with a unsigned 8bit wide integer type.
+ >>> test_bit_ceil()
+ 4
+ """
+ cdef uint8_t x = 3
+ cdef uint8_t res = bit_ceil[uint8_t](x)
+ return res
+
+def test_bit_floor():
+ """
+ Test bit_floor with a unsigned 8bit wide integer type.
+ >>> test_bit_floor()
+ 4
+ """
+ cdef uint8_t x = 5
+ cdef uint8_t res = bit_floor[uint8_t](x)
+ return res
+
+def test_bit_width():
+ """
+ Test bit_width with a unsigned 8bit wide integer type.
+ >>> test_bit_width()
+ 3
+ """
+ cdef uint8_t x = 5
+ cdef int res = bit_width[uint8_t](x)
+ return res
+
+def test_rotl():
+ """
+ Test rotl with a unsigned 8bit wide integer type.
+ >>> test_rotl()
+ 209
+ """
+ cdef uint8_t x = 29
+ cdef int s = 4
+ cdef uint8_t res = rotl[uint8_t](x, s)
+ return res
+
+def test_rotr():
+ """
+ Test rotr with a unsigned 8bit wide integer type.
+ >>> test_rotr()
+ 142
+ """
+ cdef uint8_t x = 29
+ cdef int s = 1
+ cdef uint8_t res = rotr[uint8_t](x, s)
+ return res
+
+def test_countl_zero():
+ """
+ Test countl_zero with a unsigned 8bit wide integer type.
+ >>> test_countl_zero()
+ 3
+ """
+ cdef uint8_t x = 24
+ cdef int res = countl_zero[uint8_t](x)
+ return res
+
+def test_countr_zero():
+ """
+ Test countr_zero with a unsigned 8bit wide integer type.
+ >>> test_countr_zero()
+ 3
+ """
+ cdef uint8_t x = 24
+ cdef int res = countr_zero[uint8_t](x)
+ return res
+
+def test_countl_one():
+ """
+ Test countl_one with a unsigned 8bit wide integer type.
+ >>> test_countl_one()
+ 3
+ """
+ cdef uint8_t x = 231
+ cdef int res = countl_one[uint8_t](x)
+ return res
+
+def test_countr_one():
+ """
+ Test countr_one with a unsigned 8bit wide integer type.
+ >>> test_countr_one()
+ 3
+ """
+ cdef uint8_t x = 231
+ cdef int res = countr_one[uint8_t](x)
+ return res
+
+def test_popcount():
+ """
+ Test popcount with a unsigned 8bit wide integer type.
+ >>> test_popcount()
+ 8
+ """
+ cdef uint8_t x = 255
+ cdef int res = popcount[uint8_t](x)
+ return res
diff --git a/tests/run/cpp_stl_function.pyx b/tests/run/cpp_stl_function.pyx
index 723773481..14a92c586 100644
--- a/tests/run/cpp_stl_function.pyx
+++ b/tests/run/cpp_stl_function.pyx
@@ -49,25 +49,25 @@ cdef class FunctionKeeper:
"""
cdef cpp_function_lib.FunctionKeeper* function_keeper
- cdef function[double(double, int)]* _get_function_ptr_from_name(self, function_name):
- cdef function[double(double, int)] *f
+ cdef function[double(double, int) noexcept]* _get_function_ptr_from_name(self, function_name):
+ cdef function[double(double, int) noexcept] *f
if function_name == 'add_one':
- f = new function[double(double, int)](cpp_function_lib.add_one)
+ f = new function[double(double, int) noexcept](cpp_function_lib.add_one)
elif function_name == 'add_two':
- f = new function[double(double, int)](cpp_function_lib.add_two)
+ f = new function[double(double, int) noexcept](cpp_function_lib.add_two)
elif function_name == 'AddAnotherFunctor5':
- f = new function[double(double, int)]()
+ f = new function[double(double, int) noexcept]()
f[0] = cpp_function_lib.AddAnotherFunctor(5.0)
elif function_name == 'NULL':
- f = new function[double(double, int)](NULL)
+ f = new function[double(double, int) noexcept](NULL)
elif function_name == 'default':
- f = new function[double(double, int)]()
+ f = new function[double(double, int) noexcept]()
return f
def __cinit__(self, function_name):
- cdef function[double(double, int)] *f = self._get_function_ptr_from_name(function_name)
+ cdef function[double(double, int) noexcept] *f = self._get_function_ptr_from_name(function_name)
self.function_keeper = new cpp_function_lib.FunctionKeeper(f[0])
del f
@@ -81,6 +81,6 @@ cdef class FunctionKeeper:
return <bint> self.function_keeper.get_function()
def set_function(self, function_name):
- cdef function[double(double, int)] *f = self._get_function_ptr_from_name(function_name)
+ cdef function[double(double, int) noexcept] *f = self._get_function_ptr_from_name(function_name)
self.function_keeper.set_function(f[0])
del f
diff --git a/tests/run/cpp_stl_numeric_ops_cpp17.pyx b/tests/run/cpp_stl_numeric_ops_cpp17.pyx
index eba4d2beb..e89540d35 100644
--- a/tests/run/cpp_stl_numeric_ops_cpp17.pyx
+++ b/tests/run/cpp_stl_numeric_ops_cpp17.pyx
@@ -3,7 +3,7 @@
from libcpp.numeric cimport (reduce, transform_reduce, inclusive_scan,
exclusive_scan, transform_inclusive_scan,
- transform_exclusive_scan)
+ transform_exclusive_scan, gcd, lcm)
from libcpp.execution cimport seq
from libcpp.vector cimport vector
@@ -275,3 +275,19 @@ def test_transform_exclusive_scan_with_execpolicy(vector[int] v, int init):
cdef vector[int] out = vector[int](v.size())
transform_exclusive_scan(seq, v.begin(), v.end(), out.begin(), init, add_integers, multiply_with_2)
return out
+
+def test_gcd(int a, int b):
+ """
+ Test gcd
+ >>> test_gcd(12, 18)
+ 6
+ """
+ return gcd[int](a, b)
+
+def test_lcm(int a, int b):
+ """
+ Test lcm
+ >>> test_lcm(45, 75)
+ 225
+ """
+ return lcm[int](a, b) \ No newline at end of file
diff --git a/tests/run/cpp_stl_numeric_ops_cpp20.pyx b/tests/run/cpp_stl_numeric_ops_cpp20.pyx
new file mode 100644
index 000000000..e3a8c01df
--- /dev/null
+++ b/tests/run/cpp_stl_numeric_ops_cpp20.pyx
@@ -0,0 +1,23 @@
+# mode: run
+# tag: cpp, werror, cpp20
+
+from libcpp.numeric cimport midpoint
+
+def test_midpoint_integer(int a, int b):
+ """
+ Test midpoint for integer types
+ >>> test_midpoint_integer(2, 6)
+ 4
+ """
+ cdef int res = midpoint[int](a, b)
+ return res
+
+
+def test_midpoint_float(float a, float b):
+ """
+ Test midpoint for float
+ >>> test_midpoint_float(2, 6)
+ 4.0
+ """
+ cdef float res = midpoint[float](a, b)
+ return res
diff --git a/tests/run/cpp_stl_random.pyx b/tests/run/cpp_stl_random.pyx
index 58f7db040..3b074c278 100644
--- a/tests/run/cpp_stl_random.pyx
+++ b/tests/run/cpp_stl_random.pyx
@@ -1,7 +1,16 @@
# mode: run
# tag: cpp, cpp11
-from libcpp.random cimport mt19937
+from libcpp.random cimport mt19937, mt19937_64, random_device, uniform_int_distribution, \
+ uniform_real_distribution, bernoulli_distribution, binomial_distribution, \
+ geometric_distribution, negative_binomial_distribution, poisson_distribution, \
+ exponential_distribution, gamma_distribution, weibull_distribution, \
+ extreme_value_distribution, normal_distribution, lognormal_distribution, \
+ chi_squared_distribution, cauchy_distribution, fisher_f_distribution, student_t_distribution
+from libc.float cimport DBL_MAX as DBL_MAX_
+
+
+DBL_MAX = DBL_MAX_
def mt19937_seed_test():
@@ -9,8 +18,8 @@ def mt19937_seed_test():
>>> print(mt19937_seed_test())
1608637542
"""
- cdef mt19937 rd = mt19937(42)
- return rd()
+ cdef mt19937 gen = mt19937(42)
+ return gen()
def mt19937_reseed_test():
@@ -18,9 +27,9 @@ def mt19937_reseed_test():
>>> print(mt19937_reseed_test())
1608637542
"""
- cdef mt19937 rd
- rd.seed(42)
- return rd()
+ cdef mt19937 gen
+ gen.seed(42)
+ return gen()
def mt19937_min_max():
@@ -31,8 +40,8 @@ def mt19937_min_max():
>>> print(y) # 2 ** 32 - 1 because mt19937 is 32 bit.
4294967295
"""
- cdef mt19937 rd
- return rd.min(), rd.max()
+ cdef mt19937 gen
+ return gen.min(), gen.max()
def mt19937_discard(z):
@@ -43,13 +52,297 @@ def mt19937_discard(z):
>>> print(y)
1972458954
"""
- cdef mt19937 rd = mt19937(42)
+ cdef mt19937 gen = mt19937(42)
# Throw away z random numbers.
- rd.discard(z)
- a = rd()
+ gen.discard(z)
+ a = gen()
# Iterate over z random numbers.
- rd.seed(42)
+ gen.seed(42)
for _ in range(z + 1):
- b = rd()
+ b = gen()
return a, b
+
+
+def mt19937_64_seed_test():
+ """
+ >>> print(mt19937_64_seed_test())
+ 13930160852258120406
+ """
+ cdef mt19937_64 gen = mt19937_64(42)
+ return gen()
+
+
+def mt19937_64_reseed_test():
+ """
+ >>> print(mt19937_64_reseed_test())
+ 13930160852258120406
+ """
+ cdef mt19937_64 gen
+ gen.seed(42)
+ return gen()
+
+
+def mt19937_64_min_max():
+ """
+ >>> x, y = mt19937_64_min_max()
+ >>> print(x)
+ 0
+ >>> print(y) # 2 ** 64 - 1 because mt19937_64 is 64 bit.
+ 18446744073709551615
+ """
+ cdef mt19937_64 gen
+ return gen.min(), gen.max()
+
+
+def mt19937_64_discard(z):
+ """
+ >>> x, y = mt19937_64_discard(13)
+ >>> print(x)
+ 11756813601242511406
+ >>> print(y)
+ 11756813601242511406
+ """
+ cdef mt19937_64 gen = mt19937_64(42)
+ # Throw away z random numbers.
+ gen.discard(z)
+ a = gen()
+
+ # Iterate over z random numbers.
+ gen.seed(42)
+ for _ in range(z + 1):
+ b = gen()
+ return a, b
+
+
+ctypedef fused any_dist:
+ uniform_int_distribution[int]
+ uniform_real_distribution[double]
+ bernoulli_distribution
+ binomial_distribution[int]
+ geometric_distribution[int]
+ negative_binomial_distribution[int]
+ poisson_distribution[int]
+ exponential_distribution[double]
+ gamma_distribution[double]
+ weibull_distribution[double]
+ extreme_value_distribution[double]
+ normal_distribution[double]
+ lognormal_distribution[double]
+ chi_squared_distribution[double]
+ cauchy_distribution[double]
+ fisher_f_distribution[double]
+ student_t_distribution[double]
+
+
+cdef sample_or_range(any_dist dist, bint sample):
+ """
+ This helper function returns a sample if `sample` is truthy and the range of the distribution
+ if `sample` is falsy. We use a fused type to avoid duplicating the conditional statement in each
+ distribution test.
+ """
+ cdef random_device rd
+ if sample:
+ dist(mt19937(rd()))
+ else:
+ return dist.min(), dist.max()
+
+
+def uniform_int_distribution_test(a, b, sample=True):
+ """
+ >>> uniform_int_distribution_test(2, 3)
+ >>> uniform_int_distribution_test(5, 9, False)
+ (5, 9)
+ """
+ cdef uniform_int_distribution[int] dist = uniform_int_distribution[int](a, b)
+ return sample_or_range[uniform_int_distribution[int]](dist, sample)
+
+
+def uniform_real_distribution_test(a, b, sample=True):
+ """
+ >>> x = uniform_real_distribution_test(4, 5)
+ >>> uniform_real_distribution_test(3, 8, False)
+ (3.0, 8.0)
+ """
+ cdef uniform_real_distribution[double] dist = uniform_real_distribution[double](a, b)
+ return sample_or_range[uniform_real_distribution[double]](dist, sample)
+
+
+def bernoulli_distribution_test(proba, sample=True):
+ """
+ >>> bernoulli_distribution_test(0.2)
+ >>> bernoulli_distribution_test(0.7, False)
+ (False, True)
+ """
+ cdef bernoulli_distribution dist = bernoulli_distribution(proba)
+ return sample_or_range[bernoulli_distribution](dist, sample)
+
+
+def binomial_distribution_test(n, proba, sample=True):
+ """
+ >>> binomial_distribution_test(10, 0.7)
+ >>> binomial_distribution_test(75, 0.3, False)
+ (0, 75)
+ """
+ cdef binomial_distribution[int] dist = binomial_distribution[int](n, proba)
+ return sample_or_range[binomial_distribution[int]](dist, sample)
+
+
+def geometric_distribution_test(proba, sample=True):
+ """
+ >>> geometric_distribution_test(.4)
+ >>> geometric_distribution_test(0.2, False) # 2147483647 = 2 ** 32 - 1
+ (0, 2147483647)
+ """
+ cdef geometric_distribution[int] dist = geometric_distribution[int](proba)
+ return sample_or_range[geometric_distribution[int]](dist, sample)
+
+
+def negative_binomial_distribution_test(n, p, sample=True):
+ """
+ >>> negative_binomial_distribution_test(5, .1)
+ >>> negative_binomial_distribution_test(10, 0.2, False) # 2147483647 = 2 ** 32 - 1
+ (0, 2147483647)
+ """
+ cdef negative_binomial_distribution[int] dist = negative_binomial_distribution[int](n, p)
+ return sample_or_range[negative_binomial_distribution[int]](dist, sample)
+
+
+def poisson_distribution_test(rate, sample=True):
+ """
+ >>> poisson_distribution_test(7)
+ >>> poisson_distribution_test(7, False) # 2147483647 = 2 ** 32 - 1
+ (0, 2147483647)
+ """
+ cdef poisson_distribution[int] dist = poisson_distribution[int](rate)
+ return sample_or_range[poisson_distribution[int]](dist, sample)
+
+
+def exponential_distribution_test(rate, sample=True):
+ """
+ >>> x = exponential_distribution_test(6)
+ >>> l, u = exponential_distribution_test(1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef exponential_distribution[double] dist = exponential_distribution[double](rate)
+ return sample_or_range[exponential_distribution[double]](dist, sample)
+
+
+def gamma_distribution_test(shape, scale, sample=True):
+ """
+ >>> gamma_distribution_test(3, 4)
+ >>> l, u = gamma_distribution_test(1, 1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef gamma_distribution[double] dist = gamma_distribution[double](shape, scale)
+ return sample_or_range[gamma_distribution[double]](dist, sample)
+
+
+def weibull_distribution_test(shape, scale, sample=True):
+ """
+ >>> weibull_distribution_test(3, 2)
+ >>> l, u = weibull_distribution_test(1, 1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef weibull_distribution[double] dist = weibull_distribution[double](shape, scale)
+ return sample_or_range[weibull_distribution[double]](dist, sample)
+
+
+def extreme_value_distribution_test(shape, scale, sample=True):
+ """
+ >>> extreme_value_distribution_test(3, 0.1)
+ >>> l, u = extreme_value_distribution_test(1, 1, False)
+ >>> l == -DBL_MAX or l == -float("inf")
+ True
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef extreme_value_distribution[double] dist = extreme_value_distribution[double](shape, scale)
+ return sample_or_range[extreme_value_distribution[double]](dist, sample)
+
+
+def normal_distribution_test(loc, scale, sample=True):
+ """
+ >>> normal_distribution_test(3, 2)
+ >>> l, u = normal_distribution_test(1, 1, False)
+ >>> l == -DBL_MAX or l == -float("inf")
+ True
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef normal_distribution[double] dist = normal_distribution[double](loc, scale)
+ return sample_or_range[normal_distribution[double]](dist, sample)
+
+
+def lognormal_distribution_test(loc, scale, sample=True):
+ """
+ >>> lognormal_distribution_test(3, 2)
+ >>> l, u = lognormal_distribution_test(1, 1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef lognormal_distribution[double] dist = lognormal_distribution[double](loc, scale)
+ return sample_or_range[lognormal_distribution[double]](dist, sample)
+
+
+def chi_squared_distribution_test(dof, sample=True):
+ """
+ >>> x = chi_squared_distribution_test(9)
+ >>> l, u = chi_squared_distribution_test(5, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef chi_squared_distribution[double] dist = chi_squared_distribution[double](dof)
+ return sample_or_range[chi_squared_distribution[double]](dist, sample)
+
+
+def cauchy_distribution_test(loc, scale, sample=True):
+ """
+ >>> cauchy_distribution_test(3, 9)
+ >>> l, u = cauchy_distribution_test(1, 1, False)
+ >>> l == -DBL_MAX or l == -float("inf")
+ True
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef cauchy_distribution[double] dist = cauchy_distribution[double](loc, scale)
+ return sample_or_range[cauchy_distribution[double]](dist, sample)
+
+
+def fisher_f_distribution_test(m, n, sample=True):
+ """
+ >>> x = fisher_f_distribution_test(9, 11)
+ >>> l, u = fisher_f_distribution_test(1, 1, False)
+ >>> l
+ 0.0
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef fisher_f_distribution[double] dist = fisher_f_distribution[double](m, n)
+ return sample_or_range[fisher_f_distribution[double]](dist, sample)
+
+
+def student_t_distribution_test(dof, sample=True):
+ """
+ >>> x = student_t_distribution_test(13)
+ >>> l, u = student_t_distribution_test(1, False)
+ >>> l == -DBL_MAX or l == -float("inf")
+ True
+ >>> u == DBL_MAX or u == float("inf")
+ True
+ """
+ cdef student_t_distribution[double] dist = student_t_distribution[double](dof)
+ return sample_or_range[student_t_distribution[double]](dist, sample)
diff --git a/tests/run/cpp_stl_string_cpp20.pyx b/tests/run/cpp_stl_string_cpp20.pyx
new file mode 100644
index 000000000..f3a2b80d1
--- /dev/null
+++ b/tests/run/cpp_stl_string_cpp20.pyx
@@ -0,0 +1,61 @@
+# mode: run
+# tag: cpp, werror, cpp20
+
+from libcpp cimport bool
+from libcpp.string cimport string
+
+b_A = b'A'
+b_F = b'F'
+b_abc = b"ABC"
+b_def = b"DEF"
+
+def test_string_starts_with_char(bytes py_str):
+ """
+ Test std::string.starts_with() with char type argument
+ >>> test_string_starts_with_char(b'A')
+ True
+ >>> test_string_starts_with_char(b'F')
+ False
+ """
+ cdef char c = py_str[0]
+ cdef string s = b"ABCDEF"
+ return s.starts_with(c)
+
+
+def test_string_starts_with_cstr(bytes py_str):
+ """
+ Test std::string.starts_with() with c str type argument (char*)
+ >>> test_string_starts_with_cstr(b"ABC")
+ True
+ >>> test_string_starts_with_cstr(b"DEF")
+ False
+ """
+ cdef char* c = py_str
+ cdef string s = b"ABCDEF"
+ return s.starts_with(c)
+
+
+def test_string_ends_with_char(bytes py_str):
+ """
+ Test std::string.ends_with() with char type argument
+ >>> test_string_ends_with_char(b'F')
+ True
+ >>> test_string_ends_with_char(b'A')
+ False
+ """
+ cdef char c = py_str[0]
+ cdef string s = b"ABCDEF"
+ return s.ends_with(c)
+
+
+def test_string_ends_with_cstr(bytes py_str):
+ """
+ Test std::string.ends_with() with c str type argument (char*)
+ >>> test_string_ends_with_cstr(b"DEF")
+ True
+ >>> test_string_ends_with_cstr(b"ABC")
+ False
+ """
+ cdef char* c = py_str
+ cdef string s = b"ABCDEF"
+ return s.ends_with(c) \ No newline at end of file
diff --git a/tests/run/cython_no_files.srctree b/tests/run/cython_no_files.srctree
new file mode 100644
index 000000000..455258c03
--- /dev/null
+++ b/tests/run/cython_no_files.srctree
@@ -0,0 +1,34 @@
+PYTHON test_cythonize_no_files.py
+PYTHON test_cython_no_files.py
+
+######## a.py ###########
+a = 1
+
+######## b.py ###########
+b = 2
+
+######## c.pyx ###########
+c = 3
+
+######## d.pyx ###########
+d = 4
+
+######## test_cythonize_no_files.py ###########
+import subprocess
+import sys
+
+cmd = [sys.executable, '-c', 'from Cython.Build.Cythonize import main; main()', 'a.py', 'b.py', 'c.py', '*.pyx']
+proc = subprocess.Popen(cmd, stderr=subprocess.PIPE)
+_, err = proc.communicate()
+assert proc.returncode == 1, proc.returncode
+assert b"No such file or directory: 'c.py'" in err, err
+
+######## test_cython_no_files.py ###########
+import subprocess
+import sys
+
+cmd = [sys.executable, '-c', 'from Cython.Compiler.Main import main; main(command_line = 1)', 'a.py', 'b.py', 'c.py', '*.pyx']
+proc = subprocess.Popen(cmd, stderr=subprocess.PIPE)
+_, err = proc.communicate()
+assert proc.returncode == 1, proc.returncode
+assert b"No such file or directory: 'c.py'" in err, err
diff --git a/tests/run/decorators.pyx b/tests/run/decorators.pyx
index 54623e0cb..64b0f0e20 100644
--- a/tests/run/decorators.pyx
+++ b/tests/run/decorators.pyx
@@ -17,6 +17,10 @@ __doc__ = u"""
3
>>> i.HERE
1
+ >>> i_called_directly(4)
+ 3
+ >>> i_called_directly.HERE
+ 1
"""
class wrap:
@@ -62,6 +66,35 @@ a = A()
def i(x):
return x - 1
+@A().decorate
+def i_called_directly(x):
+ # PEP 614 means this now works
+ return x - 1
+
+list_of_decorators = [decorate, decorate2]
+
+@list_of_decorators[0]
+def test_index_from_decorator_list0(a, b):
+ """
+ PEP 614 means this now works
+ >>> test_index_from_decorator_list0(1, 2)
+ 4
+ >>> test_index_from_decorator_list0.HERE
+ 1
+ """
+ return a+b+1
+
+@list_of_decorators[1](1,2)
+def test_index_from_decorator_list1(a, b):
+ """
+ PEP 614 means this now works
+ >>> test_index_from_decorator_list1(1, 2)
+ 4
+ >>> test_index_from_decorator_list1.HERE
+ 1
+ """
+ return a+b+1
+
def append_to_list_decorator(lst):
def do_append_to_list_dec(func):
def new_func():
@@ -81,3 +114,26 @@ def outer(arg1, arg2):
def method():
return [4]
return method()
+
+class HasProperty(object):
+ """
+ >>> hp = HasProperty()
+ >>> hp.value
+ 0
+ >>> hp.value = 1
+ >>> hp.value
+ 1
+ """
+ def __init__(self) -> None:
+ self._value = 0
+
+ @property
+ def value(self) -> int:
+ return self._value
+
+ # https://github.com/cython/cython/issues/4836
+ # The variable tracker was confusing "value" in the decorator
+ # for "value" in the argument list
+ @value.setter
+ def value(self, value: int):
+ self._value = value
diff --git a/tests/run/delete.pyx b/tests/run/delete.pyx
index ec0b6c71a..6127fa9f1 100644
--- a/tests/run/delete.pyx
+++ b/tests/run/delete.pyx
@@ -29,15 +29,33 @@ def del_item(L, o):
del L[o]
return L
+
@cython.test_assert_path_exists('//DelStatNode//IndexNode//NoneCheckNode')
def del_dict(dict D, o):
"""
>>> del_dict({1: 'a', 2: 'b'}, 1)
{2: 'b'}
+ >>> del_dict(None, 1) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: 'NoneType' object ...
"""
del D[o]
return D
+
+@cython.test_fail_if_path_exists('//DelStatNode//IndexNode//NoneCheckNode')
+def del_dict_ann(D: dict, o):
+ """
+ >>> del_dict_ann({1: 'a', 2: 'b'}, 1)
+ {2: 'b'}
+ >>> del_dict_ann(None, 1)
+ Traceback (most recent call last):
+ TypeError: Argument 'D' has incorrect type (expected dict, got NoneType)
+ """
+ del D[o]
+ return D
+
+
@cython.test_fail_if_path_exists('//NoneCheckNode')
def del_dict_from_literal(o):
"""
diff --git a/tests/run/exceptionpropagation.pyx b/tests/run/exceptionpropagation.pyx
index 2c79bf26e..2466550d5 100644
--- a/tests/run/exceptionpropagation.pyx
+++ b/tests/run/exceptionpropagation.pyx
@@ -56,4 +56,26 @@ def test_except_promotion_compare(bint fire):
...
RuntimeError
"""
- except_promotion_compare(fire) \ No newline at end of file
+ except_promotion_compare(fire)
+
+
+cdef int cdef_function_that_raises():
+ raise RuntimeError
+
+cdef int cdef_noexcept_function_that_raises() noexcept:
+ raise RuntimeError
+
+def test_except_raise_by_default():
+ """
+ >>> test_except_raise_by_default()
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+ """
+ cdef_function_that_raises()
+
+def test_noexcept():
+ """
+ >>> test_noexcept()
+ """
+ cdef_noexcept_function_that_raises()
diff --git a/tests/run/exceptions_nogil.pyx b/tests/run/exceptions_nogil.pyx
index 2bcedd9ed..31af84ae2 100644
--- a/tests/run/exceptions_nogil.pyx
+++ b/tests/run/exceptions_nogil.pyx
@@ -1,7 +1,7 @@
# mode: run
# tag: nogil, withgil, exceptions
-cdef void foo_nogil(int i) nogil except *:
+cdef void foo_nogil(int i) except * nogil:
if i != 0: raise ValueError("huhu !")
diff --git a/tests/run/extern_varobject_extensions.srctree b/tests/run/extern_varobject_extensions.srctree
new file mode 100644
index 000000000..c927b8147
--- /dev/null
+++ b/tests/run/extern_varobject_extensions.srctree
@@ -0,0 +1,94 @@
+# mode: run
+
+PYTHON setup.py build_ext --inplace
+PYTHON -c "import classes"
+PYTHON -c "import test_inherit"
+
+######## setup.py ########
+
+from Cython.Build.Dependencies import cythonize
+
+from distutils.core import setup
+
+setup(
+ ext_modules=cythonize("*.pyx"),
+)
+
+###### dummy_module.py ###########
+
+tpl = tuple
+lst = list
+
+###### classes.pxd ################
+
+cdef extern from *:
+ # apart from list, these are all variable sized types
+ # and Cython shouldn't trip up about the struct size
+ ctypedef class dummy_module.tpl [object PyTupleObject]:
+ pass
+ ctypedef class dummy_module.lst [object PyListObject]:
+ pass
+ ctypedef class types.CodeType [object PyCodeObject]:
+ pass
+ # Note that bytes doesn't work here because it further
+ # the tp_basicsize to save space
+
+##### classes.pyx #################
+
+def check_tuple(tpl x):
+ assert isinstance(x, tuple)
+
+def check_list(lst x):
+ assert isinstance(x, list)
+
+def check_code(CodeType x):
+ import types
+ assert isinstance(x, types.CodeType)
+
+check_tuple((1, 2))
+check_list([1, 2])
+check_code(eval("lambda: None").__code__)
+
+##### failed_inherit1.pyx #############
+
+from classes cimport tpl
+
+cdef class SuperTuple(tpl):
+ cdef int a # importing this gives an error message
+
+##### failed_inherit2.pyx #############
+
+from classes cimport tpl
+
+cdef class SuperTuple(tpl):
+ # adding a method creates a vtab so should also fail
+ cdef int func(self):
+ return 1
+
+##### successful_inherit.pyx #########
+
+from classes cimport lst, tpl
+
+cdef class SuperList(lst):
+ cdef int a # This works OK
+
+cdef class SuperTuple(tpl):
+ # This is actually OK because it doesn't add anything
+ pass
+
+##### test_inherit.py ################
+
+try:
+ import failed_inherit1
+except TypeError as e:
+ assert e.args[0] == "inheritance from PyVarObject types like 'tuple' not currently supported", e.args[0]
+else:
+ assert False
+try:
+ import failed_inherit2
+except TypeError as e:
+ assert e.args[0] == "inheritance from PyVarObject types like 'tuple' not currently supported", e.args[0]
+else:
+ assert False
+
+import successful_inherit
diff --git a/tests/run/extra_patma.pyx b/tests/run/extra_patma.pyx
new file mode 100644
index 000000000..b2303f45b
--- /dev/null
+++ b/tests/run/extra_patma.pyx
@@ -0,0 +1,18 @@
+# mode: run
+
+cdef bint is_null(int* x):
+ return False # disabled - currently just a parser test
+ match x:
+ case NULL:
+ return True
+ case _:
+ return False
+
+def test_is_null():
+ """
+ >>> test_is_null()
+ """
+ cdef int some_int = 1
+ return # disabled - currently just a parser test
+ assert is_null(&some_int) == False
+ assert is_null(NULL) == True
diff --git a/tests/run/funcexc_iter_T228.pyx b/tests/run/funcexc_iter_T228.pyx
index 4b81166f6..40db3afb2 100644
--- a/tests/run/funcexc_iter_T228.pyx
+++ b/tests/run/funcexc_iter_T228.pyx
@@ -65,3 +65,89 @@ def double_raise(py_iterator):
print(sys.exc_info()[0] is ValueError or sys.exc_info()[0])
a = list(cy_iterator())
print(sys.exc_info()[0] is ValueError or sys.exc_info()[0])
+
+
+###### Tests to do with the optimization of StopIteration to "return NULL" #######
+# we're mainly checking that
+# 1. Calling __next__ manually doesn't crash (the wrapper function adds the exception)
+# 2. if you raise a value then that value gets raised
+# 3. putting the exception in various places try...finally / try...except blocks works
+
+def call_next_directly():
+ """
+ >>> call_next_directly()
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ """
+ cy_iterator().__next__()
+
+cdef class cy_iter_many_options:
+ cdef what
+ def __init__(self, what):
+ self.what = what
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self.what == "StopIteration in finally no return":
+ try:
+ raise StopIteration
+ finally:
+ print "Finally..."
+ elif self.what == "StopIteration in finally return":
+ try:
+ raise StopIteration
+ finally:
+ self.what = None
+ return "in finally" # but will stop iterating next time
+ elif self.what == "StopIteration from finally":
+ try:
+ raise ValueError
+ finally:
+ raise StopIteration
+ elif self.what == "catch StopIteration":
+ try:
+ raise StopIteration
+ except StopIteration:
+ self.what = None
+ return "in except" # but will stop next time
+ elif self.what == "don't catch StopIteration":
+ try:
+ raise StopIteration
+ except ValueError:
+ return 0
+ elif self.what == "StopIteration from except":
+ try:
+ raise ValueError
+ except ValueError:
+ raise StopIteration
+ elif self.what == "StopIteration with value":
+ raise StopIteration("I'm a value!")
+ elif self.what is None:
+ raise StopIteration
+ else:
+ raise ValueError("self.what didn't match anything")
+
+def test_cy_iter_many_options(option):
+ """
+ >>> test_cy_iter_many_options("StopIteration in finally no return")
+ Finally...
+ []
+ >>> test_cy_iter_many_options("StopIteration in finally return")
+ ['in finally']
+ >>> test_cy_iter_many_options("StopIteration from finally")
+ []
+ >>> test_cy_iter_many_options("catch StopIteration")
+ ['in except']
+ >>> test_cy_iter_many_options("don't catch StopIteration")
+ []
+ >>> try:
+ ... cy_iter_many_options("StopIteration with value").__next__()
+ ... except StopIteration as e:
+ ... print(e.args)
+ ("I'm a value!",)
+ """
+ return list(cy_iter_many_options(option))
+
diff --git a/tests/run/function_self.py b/tests/run/function_self.py
index 938810491..945da404f 100644
--- a/tests/run/function_self.py
+++ b/tests/run/function_self.py
@@ -25,13 +25,8 @@ def fused(x):
>>> hasattr(nested, "__self__")
False
- #>>> hasattr(fused, "__self__") # FIXME this fails for fused functions
- #False
- # but this is OK:
- >>> fused.__self__ #doctest: +ELLIPSIS
- Traceback (most recent call last):
- ...
- AttributeError: 'function' object has no attribute '__self__'...
+ >>> hasattr(fused, "__self__")
+ False
"""
def nested_in_fused(y):
return x+y
@@ -74,15 +69,11 @@ if sys.version_info[0] > 2 or cython.compiled:
if cython.compiled:
__doc__ = """
- >>> fused['double'].__self__ #doctest: +ELLIPSIS
- Traceback (most recent call last):
- ...
- AttributeError: 'function' object has no attribute '__self__'...
+ >>> hasattr(fused['double'], '__self__')
+ False
- >>> C.fused['double'].__self__ #doctest: +ELLIPSIS
- Traceback (most recent call last):
- ...
- AttributeError: 'function' object has no attribute '__self__'...
+ >>> hasattr(C.fused['double'], '__self__')
+ False
>>> c = C()
>>> c.fused['double'].__self__ is c #doctest: +ELLIPSIS
diff --git a/tests/run/fused_cpp.pyx b/tests/run/fused_cpp.pyx
index 9f3bb5104..95b326904 100644
--- a/tests/run/fused_cpp.pyx
+++ b/tests/run/fused_cpp.pyx
@@ -2,6 +2,7 @@
cimport cython
from libcpp.vector cimport vector
+from libcpp.map cimport map
from libcpp.typeinfo cimport type_info
from cython.operator cimport typeid
@@ -41,3 +42,49 @@ def typeid_call2(cython.integral x):
"""
cdef const type_info* a = &typeid(cython.integral)
return a[0] == tidint[0]
+
+cdef fused_ref(cython.integral& x):
+ return x*2
+
+def test_fused_ref(int x):
+ """
+ >>> test_fused_ref(5)
+ (10, 10)
+ """
+ return fused_ref(x), fused_ref[int](x)
+
+ctypedef fused nested_fused:
+ vector[cython.integral]
+
+cdef vec_of_fused(nested_fused v):
+ x = v[0]
+ return cython.typeof(x)
+
+def test_nested_fused():
+ """
+ >>> test_nested_fused()
+ int
+ long
+ """
+ cdef vector[int] vi = [0,1]
+ cdef vector[long] vl = [0,1]
+ print vec_of_fused(vi)
+ print vec_of_fused(vl)
+
+ctypedef fused nested_fused2:
+ map[cython.integral, cython.floating]
+
+cdef map_of_fused(nested_fused2 m):
+ for pair in m:
+ return cython.typeof(pair.first), cython.typeof(pair.second)
+
+def test_nested_fused2():
+ """
+ >>> test_nested_fused2()
+ ('int', 'float')
+ ('long', 'double')
+ """
+ cdef map[int, float] mif = { 0: 0.0 }
+ cdef map[long, double] mld = { 0: 0.0 }
+ print map_of_fused(mif)
+ print map_of_fused(mld)
diff --git a/tests/run/generators_py.py b/tests/run/generators_py.py
index 914252bf4..9ec6991cf 100644
--- a/tests/run/generators_py.py
+++ b/tests/run/generators_py.py
@@ -387,3 +387,20 @@ def test_yield_in_const_conditional_true():
"""
if True:
print((yield 1))
+
+
+def test_generator_scope():
+ """
+ Tests that the function is run at the correct time
+ (i.e. when the generator is created, not when it's run)
+ >>> list(test_generator_scope())
+ inner running
+ generator created
+ [0, 10]
+ """
+ def inner(val):
+ print("inner running")
+ return [0, val]
+ gen = (a for a in inner(10))
+ print("generator created")
+ return gen
diff --git a/tests/run/genexpr_arg_order.py b/tests/run/genexpr_arg_order.py
new file mode 100644
index 000000000..5b9e27238
--- /dev/null
+++ b/tests/run/genexpr_arg_order.py
@@ -0,0 +1,181 @@
+# mode: run
+# tag: genexpr, py3, py2
+
+from __future__ import print_function
+
+# Tests that function arguments to generator expressions are
+# evaluated in the correct order (even after optimization)
+# WARNING: there may be an amount of luck in this working correctly (since it
+# isn't strictly enforced). Therefore perhaps be prepared to disable these
+# tests if they stop working and aren't easily fixed
+
+import cython
+
+@cython.cfunc
+@cython.returns(cython.int)
+def zero():
+ print("In zero")
+ return 0
+
+@cython.cfunc
+@cython.returns(cython.int)
+def five():
+ print("In five")
+ return 5
+
+@cython.cfunc
+@cython.returns(cython.int)
+def one():
+ print("In one")
+ return 1
+
+# FIXME - I don't think this is easy to enforce unfortunately, but it is slightly wrong
+#@cython.test_assert_path_exists("//ForFromStatNode")
+#def genexp_range_argument_order():
+# """
+# >>> list(genexp_range_argument_order())
+# In zero
+# In five
+# [0, 1, 2, 3, 4]
+# """
+# return (a for a in range(zero(), five()))
+#
+#@cython.test_assert_path_exists("//ForFromStatNode")
+#@cython.test_assert_path_exists(
+# "//InlinedGeneratorExpressionNode",
+# "//ComprehensionAppendNode")
+#def list_range_argument_order():
+# """
+# >>> list_range_argument_order()
+# In zero
+# In five
+# [0, 1, 2, 3, 4]
+# """
+# return list(a for a in range(zero(), five()))
+
+@cython.test_assert_path_exists("//ForFromStatNode")
+def genexp_array_slice_order():
+ """
+ >>> list(genexp_array_slice_order())
+ In zero
+ In five
+ [0, 1, 2, 3, 4]
+ """
+ # TODO ideally find a way to add the evaluation of x to this test too
+ x = cython.declare(cython.int[20])
+ x = list(range(20))
+ return (a for a in x[zero():five()])
+
+@cython.test_assert_path_exists("//ForFromStatNode")
+@cython.test_assert_path_exists(
+ "//InlinedGeneratorExpressionNode",
+ "//ComprehensionAppendNode")
+def list_array_slice_order():
+ """
+ >>> list(list_array_slice_order())
+ In zero
+ In five
+ [0, 1, 2, 3, 4]
+ """
+ # TODO ideally find a way to add the evaluation of x to this test too
+ x = cython.declare(cython.int[20])
+ x = list(range(20))
+ return list(a for a in x[zero():five()])
+
+class IndexableClass:
+ def __getitem__(self, idx):
+ print("In indexer")
+ return [ idx.start, idx.stop, idx.step ]
+
+class NoisyAttributeLookup:
+ @property
+ def indexer(self):
+ print("Getting indexer")
+ return IndexableClass()
+
+ @property
+ def function(self):
+ print("Getting function")
+ def func(a, b, c):
+ print("In func")
+ return [a, b, c]
+ return func
+
+def genexp_index_order():
+ """
+ >>> list(genexp_index_order())
+ Getting indexer
+ In zero
+ In five
+ In one
+ In indexer
+ Made generator expression
+ [0, 5, 1]
+ """
+ obj = NoisyAttributeLookup()
+ ret = (a for a in obj.indexer[zero():five():one()])
+ print("Made generator expression")
+ return ret
+
+@cython.test_assert_path_exists("//InlinedGeneratorExpressionNode")
+def list_index_order():
+ """
+ >>> list_index_order()
+ Getting indexer
+ In zero
+ In five
+ In one
+ In indexer
+ [0, 5, 1]
+ """
+ obj = NoisyAttributeLookup()
+ return list(a for a in obj.indexer[zero():five():one()])
+
+
+def genexpr_fcall_order():
+ """
+ >>> list(genexpr_fcall_order())
+ Getting function
+ In zero
+ In five
+ In one
+ In func
+ Made generator expression
+ [0, 5, 1]
+ """
+ obj = NoisyAttributeLookup()
+ ret = (a for a in obj.function(zero(), five(), one()))
+ print("Made generator expression")
+ return ret
+
+@cython.test_assert_path_exists("//InlinedGeneratorExpressionNode")
+def list_fcall_order():
+ """
+ >>> list_fcall_order()
+ Getting function
+ In zero
+ In five
+ In one
+ In func
+ [0, 5, 1]
+ """
+ obj = NoisyAttributeLookup()
+ return list(a for a in obj.function(zero(), five(), one()))
+
+def call1():
+ print("In call1")
+ return ["a"]
+def call2():
+ print("In call2")
+ return ["b"]
+
+def multiple_genexps_to_call_order():
+ """
+ >>> multiple_genexps_to_call_order()
+ In call1
+ In call2
+ """
+ def takes_two_genexps(a, b):
+ pass
+
+ return takes_two_genexps((x for x in call1()), (x for x in call2()))
diff --git a/tests/run/genexpr_iterable_lookup_T600.pyx b/tests/run/genexpr_iterable_lookup_T600.pyx
index 945652717..c288993a6 100644
--- a/tests/run/genexpr_iterable_lookup_T600.pyx
+++ b/tests/run/genexpr_iterable_lookup_T600.pyx
@@ -35,6 +35,11 @@ def genexpr_iterable_in_closure():
result = list( x*2 for x in x if x != 'b' )
assert x == 'abc' # don't leak in Py3 code
assert f() == 'abc' # don't leak in Py3 code
+
+ # Py2 cleanup (pretty irrelevant to the actual test!)
+ import sys
+ if sys.version_info[0] == 2:
+ result = map(bytes, result)
return result
@@ -51,6 +56,7 @@ def genexpr_over_complex_arg(func, L):
def listcomp():
"""
>>> listcomp()
+ [0, 1, 5, 8]
"""
data = [('red', 5), ('blue', 1), ('yellow', 8), ('black', 0)]
data.sort(key=lambda r: r[1])
@@ -84,3 +90,15 @@ def genexpr_in_dictcomp_dictiter():
"""
d = {1:2, 3:4, 5:6}
return {k:d for k,d in d.iteritems() if d != 4}
+
+
+def genexpr_over_array_slice():
+ """
+ >>> list(genexpr_over_array_slice())
+ [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
+ """
+ cdef double x[10]
+ for i in range(10):
+ x[i] = i
+ cdef int n = 5
+ return (n for n in x[:n+1])
diff --git a/tests/run/line_trace.pyx b/tests/run/line_trace.pyx
index d6f9c3d0e..0a3dc13fa 100644
--- a/tests/run/line_trace.pyx
+++ b/tests/run/line_trace.pyx
@@ -74,7 +74,9 @@ def _create_trace_func(trace):
local_names = {}
def _trace_func(frame, event, arg):
- if sys.version_info < (3,) and 'line_trace' not in frame.f_code.co_filename:
+ if sys.version_info < (3,) and (
+ 'line_trace' not in frame.f_code.co_filename and
+ '<string>' not in frame.f_code.co_filename):
# Prevent tracing into Py2 doctest functions.
return None
@@ -153,7 +155,7 @@ def global_name(global_name):
return global_name + 321
-cdef int cy_add_nogil(int a, int b) nogil except -1:
+cdef int cy_add_nogil(int a, int b) except -1 nogil:
x = a + b # 1
return x # 2
@@ -165,19 +167,28 @@ def cy_try_except(func):
raise AttributeError(exc.args[0])
-def run_trace(func, *args, bint with_sys=False):
- """
- >>> def py_add(a,b):
- ... x = a+b
- ... return x
+# CPython 3.11 has an issue when these Python functions are implemented inside of doctests and the trace function fails.
+# https://github.com/python/cpython/issues/94381
+plain_python_functions = {}
+exec("""
+def py_add(a,b):
+ x = a+b
+ return x
+
+def py_add_with_nogil(a,b):
+ x=a; y=b # 1
+ for _ in range(1): # 2
+ z = 0 # 3
+ z += py_add(x, y) # 4
+ return z
+
+def py_return(retval=123): return retval
+""", plain_python_functions)
- >>> def py_add_with_nogil(a,b):
- ... x=a; y=b # 1
- ... for _ in range(1): # 2
- ... z = 0 # 3
- ... z += py_add(x, y) # 4
- ... return z # 5
+def run_trace(func, *args, bint with_sys=False):
+ """
+ >>> py_add = plain_python_functions['py_add']
>>> run_trace(py_add, 1, 2)
[('call', 0), ('line', 1), ('line', 2), ('return', 2)]
>>> run_trace(cy_add, 1, 2)
@@ -204,6 +215,7 @@ def run_trace(func, *args, bint with_sys=False):
>>> result[9:] # sys
[('line', 2), ('line', 5), ('return', 5)]
+ >>> py_add_with_nogil = plain_python_functions['py_add_with_nogil']
>>> result = run_trace(py_add_with_nogil, 1, 2)
>>> result[:5] # py
[('call', 0), ('line', 1), ('line', 2), ('line', 3), ('line', 4)]
@@ -239,7 +251,7 @@ def run_trace(func, *args, bint with_sys=False):
def run_trace_with_exception(func, bint with_sys=False, bint fail=False):
"""
- >>> def py_return(retval=123): return retval
+ >>> py_return = plain_python_functions["py_return"]
>>> run_trace_with_exception(py_return)
OK: 123
[('call', 0), ('line', 1), ('line', 2), ('call', 0), ('line', 0), ('return', 0), ('return', 2)]
@@ -295,10 +307,7 @@ def run_trace_with_exception(func, bint with_sys=False, bint fail=False):
def fail_on_call_trace(func, *args):
"""
- >>> def py_add(a,b):
- ... x = a+b
- ... return x
-
+ >>> py_add = plain_python_functions["py_add"]
>>> fail_on_call_trace(py_add, 1, 2)
Traceback (most recent call last):
ValueError: failing call trace!
@@ -319,17 +328,6 @@ def fail_on_call_trace(func, *args):
def fail_on_line_trace(fail_func, add_func, nogil_add_func):
"""
- >>> def py_add(a,b):
- ... x = a+b # 1
- ... return x # 2
-
- >>> def py_add_with_nogil(a,b):
- ... x=a; y=b # 1
- ... for _ in range(1): # 2
- ... z = 0 # 3
- ... z += py_add(x, y) # 4
- ... return z # 5
-
>>> result = fail_on_line_trace(None, cy_add, cy_add_with_nogil)
>>> len(result)
17
@@ -342,6 +340,8 @@ def fail_on_line_trace(fail_func, add_func, nogil_add_func):
>>> result[14:]
[('line', 2), ('line', 5), ('return', 5)]
+ >>> py_add = plain_python_functions["py_add"]
+ >>> py_add_with_nogil = plain_python_functions['py_add_with_nogil']
>>> result = fail_on_line_trace(None, py_add, py_add_with_nogil)
>>> len(result)
17
@@ -405,9 +405,7 @@ def fail_on_line_trace(fail_func, add_func, nogil_add_func):
def disable_trace(func, *args, bint with_sys=False):
"""
- >>> def py_add(a,b):
- ... x = a+b
- ... return x
+ >>> py_add = plain_python_functions["py_add"]
>>> disable_trace(py_add, 1, 2)
[('call', 0), ('line', 1)]
>>> disable_trace(py_add, 1, 2, with_sys=True)
diff --git a/tests/run/locals.pyx b/tests/run/locals.pyx
index f343fe1cb..9473ad01e 100644
--- a/tests/run/locals.pyx
+++ b/tests/run/locals.pyx
@@ -113,3 +113,13 @@ def buffers_in_locals(object[char, ndim=1] a):
cdef object[unsigned char, ndim=1] b = a
return locals()
+
+def set_comp_scope():
+ """
+ locals should be evaluated in the outer scope
+ >>> list(set_comp_scope())
+ ['something']
+ """
+ something = 1
+ return { b for b in locals().keys() }
+
diff --git a/tests/run/nogil.pyx b/tests/run/nogil.pyx
index efaee4ff6..356021149 100644
--- a/tests/run/nogil.pyx
+++ b/tests/run/nogil.pyx
@@ -71,7 +71,7 @@ def test_get_gil_in_nogil():
cdef int with_gil_func() except -1 with gil:
raise Exception("error!")
-cdef int nogil_func() nogil except -1:
+cdef int nogil_func() except -1 nogil:
with_gil_func()
def test_nogil_exception_propagation():
@@ -85,7 +85,7 @@ def test_nogil_exception_propagation():
nogil_func()
-cdef int write_unraisable() nogil:
+cdef int write_unraisable() noexcept nogil:
with gil:
raise ValueError()
diff --git a/tests/run/nogil_conditional.pyx b/tests/run/nogil_conditional.pyx
index eba22d5b2..92eff0853 100644
--- a/tests/run/nogil_conditional.pyx
+++ b/tests/run/nogil_conditional.pyx
@@ -34,7 +34,7 @@ cdef int with_gil_func() except? -1 with gil:
raise Exception("error!")
-cdef int nogil_func() nogil except? -1:
+cdef int nogil_func() except? -1 nogil:
with_gil_func()
@@ -51,7 +51,7 @@ def test_nogil_exception_propagation():
nogil_func()
-cdef int write_unraisable() nogil:
+cdef int write_unraisable() noexcept nogil:
with gil:
raise ValueError()
diff --git a/tests/run/parallel.pyx b/tests/run/parallel.pyx
index c3739b10b..40d7ac10d 100644
--- a/tests/run/parallel.pyx
+++ b/tests/run/parallel.pyx
@@ -32,7 +32,7 @@ def test_parallel():
free(buf)
-cdef int get_num_threads() with gil:
+cdef int get_num_threads() noexcept with gil:
print "get_num_threads called"
return 3
diff --git a/tests/run/pep442_tp_finalize.pyx b/tests/run/pep442_tp_finalize.pyx
index 49bed3268..6532757f9 100644
--- a/tests/run/pep442_tp_finalize.pyx
+++ b/tests/run/pep442_tp_finalize.pyx
@@ -1,5 +1,9 @@
# mode: run
+from __future__ import print_function
+
+cimport cython
+
import gc
cdef class nontrivial_del:
@@ -49,6 +53,80 @@ def test_del_and_dealloc():
gc.collect()
print("finish")
+@cython.final
+cdef class FinalClass:
+ def __init__(self):
+ print("init")
+ def __del__(self):
+ print("del")
+
+def test_final_class():
+ """
+ >>> test_final_class()
+ start
+ init
+ del
+ finish
+ """
+ print("start")
+ d = FinalClass()
+ d = None
+ gc.collect()
+ print("finish")
+
+@cython.final
+cdef class FinalInherits(nontrivial_del):
+ def __init__(self):
+ super().__init__()
+ print("FinalInherits init")
+ # no __del__ but nontrivial_del should still be called
+ def __dealloc__(self):
+ pass # define __dealloc__ so as not to fall back on base __dealloc__
+
+def test_final_inherited():
+ """
+ >>> test_final_inherited()
+ start
+ init
+ FinalInherits init
+ del
+ finish
+ """
+ print("start")
+ d = FinalInherits()
+ d = None
+ gc.collect()
+ print("finish")
+
+cdef class DummyBase:
+ pass
+
+class RegularClass:
+ __slots__ = ()
+ def __del__(self):
+ print("del")
+
+@cython.final
+cdef class FinalMultipleInheritance(DummyBase, RegularClass):
+ def __init__(self):
+ super().__init__()
+ print("init")
+ def __dealloc__(self):
+ pass
+
+def test_final_multiple_inheritance():
+ """
+ >>> test_final_multiple_inheritance()
+ start
+ init
+ del
+ finish
+ """
+ print("start")
+ d = FinalMultipleInheritance()
+ d = None
+ gc.collect()
+ print("finish")
cdef class del_with_exception:
def __init__(self):
@@ -301,3 +379,4 @@ class derived_python_child(cdef_nontrivial_parent):
raise RuntimeError("End function")
func(derived_python_child)
+
diff --git a/tests/run/pep442_tp_finalize_cimport.srctree b/tests/run/pep442_tp_finalize_cimport.srctree
new file mode 100644
index 000000000..8a257177f
--- /dev/null
+++ b/tests/run/pep442_tp_finalize_cimport.srctree
@@ -0,0 +1,67 @@
+"""
+PYTHON setup.py build_ext -i
+PYTHON runtests.py
+"""
+
+####### runtests.py #######
+
+import gc
+from testclasses import *
+import baseclasses
+
+def test_has_del():
+ inst = HasIndirectDel()
+ inst = None
+ gc.collect()
+ assert baseclasses.HasDel_del_called_count
+
+def test_no_del():
+ inst = NoIndirectDel()
+ inst = None
+ gc.collect()
+ # The test here is that it doesn't crash
+
+test_has_del()
+test_no_del()
+
+######## setup.py ########
+
+from setuptools import setup
+from Cython.Build import cythonize
+
+setup(ext_modules = cythonize('*.pyx'))
+
+####### baseclasses.pxd ######
+
+cdef class HasDel:
+ pass
+
+cdef class DoesntHaveDel:
+ pass
+
+####### baseclasses.pyx ######
+
+HasDel_del_called_count = 0
+
+cdef class HasDel:
+ def __del__(self):
+ global HasDel_del_called_count
+ HasDel_del_called_count += 1
+
+cdef class DoesntHaveDel:
+ pass
+
+######## testclasses.pyx ######
+
+cimport cython
+from baseclasses cimport HasDel, DoesntHaveDel
+
+@cython.final
+cdef class HasIndirectDel(HasDel):
+ pass
+
+@cython.final
+cdef class NoIndirectDel(DoesntHaveDel):
+ # But Cython can't tell that we don't have __del__ until runtime,
+ # so has to generate code to call it (and not crash!)
+ pass
diff --git a/tests/run/pep448_extended_unpacking.pyx b/tests/run/pep448_extended_unpacking.pyx
index d40a1c6a2..ac3e903a0 100644
--- a/tests/run/pep448_extended_unpacking.pyx
+++ b/tests/run/pep448_extended_unpacking.pyx
@@ -185,6 +185,24 @@ def unpack_list_literal_mult():
return [*([1, 2, *([4, 5] * 2)] * 3)]
+def unpack_list_tuple_mult():
+ """
+ >>> unpack_list_tuple_mult()
+ [1, 1]
+ """
+ return [*(1,) * 2]
+
+
+def unpack_list_tuple_bad_mult():
+ """
+ >>> unpack_list_tuple_bad_mult() # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ ...
+ TypeError: ... 'float'
+ """
+ return [*(1,) * 1.5]
+
+
@cython.test_fail_if_path_exists(
"//ListNode//ListNode",
"//MergedSequenceNode",
diff --git a/tests/run/pep526_variable_annotations.py b/tests/run/pep526_variable_annotations.py
index 3e30075c3..6f430c0af 100644
--- a/tests/run/pep526_variable_annotations.py
+++ b/tests/run/pep526_variable_annotations.py
@@ -15,11 +15,11 @@ except ImportError:
var = 1 # type: annotation
-var: int = 2
-fvar: float = 1.2
+var: cython.int = 2
+fvar: cython.float = 1.2
some_number: cython.int # variable without initial value
-some_list: List[int] = [] # variable with initial value
-t: Tuple[int, ...] = (1, 2, 3)
+some_list: List[cython.int] = [] # variable with initial value
+t: Tuple[cython.int, ...] = (1, 2, 3)
body: Optional[List[str]]
descr_only : "descriptions are allowed but ignored"
@@ -34,11 +34,11 @@ def f():
(2, 1.5, [], (1, 2, 3))
"""
var = 1 # type: annotation
- var: int = 2
- fvar: float = 1.5
+ var: cython.int = 2
+ fvar: cython.float = 1.5
some_number: cython.int # variable without initial value
- some_list: List[int] = [] # variable with initial value
- t: Tuple[int, ...] = (1, 2, 3)
+ some_list: List[cython.int] = [] # variable with initial value
+ t: Tuple[cython.int, ...] = (1, 2, 3)
body: Optional[List[str]]
descr_only: "descriptions are allowed but ignored"
@@ -59,7 +59,7 @@ class BasicStarship(object):
"""
captain: str = 'Picard' # instance variable with default
damage: cython.int # instance variable without default
- stats: ClassVar[Dict[str, int]] = {} # class variable
+ stats: ClassVar[Dict[str, cython.int]] = {} # class variable
descr_only: "descriptions are allowed but ignored"
def __init__(self, damage):
@@ -75,7 +75,7 @@ class BasicStarshipExt(object):
"""
captain: str = 'Picard' # instance variable with default
damage: cython.int # instance variable without default
- stats: ClassVar[Dict[str, int]] = {} # class variable
+ stats: ClassVar[Dict[str, cython.int]] = {} # class variable
descr_only: "descriptions are allowed but ignored"
def __init__(self, damage):
@@ -124,7 +124,7 @@ def iter_declared_dict(d):
# specialized "compiled" test in module-level __doc__
"""
- typed_dict : Dict[float, float] = d
+ typed_dict : Dict[cython.float, cython.float] = d
s = 0.0
for key in typed_dict:
s += d[key]
@@ -135,7 +135,7 @@ def iter_declared_dict(d):
"//WhileStatNode",
"//WhileStatNode//DictIterationNextNode",
)
-def iter_declared_dict_arg(d : Dict[float, float]):
+def iter_declared_dict_arg(d : Dict[cython.float, cython.float]):
"""
>>> d = {1.1: 2.5, 3.3: 4.5}
>>> iter_declared_dict_arg(d)
@@ -165,8 +165,8 @@ def test_subscripted_types():
list object
set object
"""
- a: typing.Dict[int, float] = {}
- b: List[int] = []
+ a: typing.Dict[cython.int, cython.float] = {}
+ b: List[cython.int] = []
c: _SET_[object] = set()
print(cython.typeof(a) + (" object" if not cython.compiled else ""))
@@ -174,26 +174,58 @@ def test_subscripted_types():
print(cython.typeof(c) + (" object" if not cython.compiled else ""))
# because tuple is specifically special cased to go to ctuple where possible
-def test_tuple(a: typing.Tuple[int, float], b: typing.Tuple[int, ...],
- c: Tuple[int, object] # cannot be a ctuple
+def test_tuple(a: typing.Tuple[cython.int, cython.float], b: typing.Tuple[cython.int, ...],
+ c: Tuple[cython.int, object] # cannot be a ctuple
):
"""
>>> test_tuple((1, 1.0), (1, 1.0), (1, 1.0))
int
int
+ Python object
+ Python object
+ (int, float)
+ tuple object
tuple object
tuple object
"""
- x: typing.Tuple[int, float] = (a[0], a[1])
- y: Tuple[int, ...] = (1,2.)
- z = a[0] # should infer to int
+ x: typing.Tuple[int, float] = (a[0], a[1]) # note: Python int/float, not cython.int/float
+ y: Tuple[cython.int, ...] = (1,2.)
+ z = a[0] # should infer to C int
+ p = x[1] # should infer to Python float -> C double
print(cython.typeof(z))
- print(cython.typeof(x[0]))
+ print("int" if cython.compiled and cython.typeof(x[0]) == "Python object" else cython.typeof(x[0])) # FIXME: infer Python int
+ print(cython.typeof(p) if cython.compiled or cython.typeof(p) != 'float' else "Python object") # FIXME: infer C double
+ print(cython.typeof(x[1]) if cython.compiled or cython.typeof(p) != 'float' else "Python object") # FIXME: infer C double
+ print(cython.typeof(a) if cython.compiled or cython.typeof(a) != 'tuple' else "(int, float)")
+ print(cython.typeof(x) + (" object" if not cython.compiled else ""))
print(cython.typeof(y) + (" object" if not cython.compiled else ""))
print(cython.typeof(c) + (" object" if not cython.compiled else ""))
+def test_use_typing_attributes_as_non_annotations():
+ """
+ >>> test_use_typing_attributes_as_non_annotations()
+ typing.Tuple typing.Tuple[int]
+ typing.Optional True
+ typing.Optional True
+ """
+ x1 = typing.Tuple
+ x2 = typing.Tuple[int]
+ y1 = typing.Optional
+ y2 = typing.Optional[typing.Dict]
+ z1 = Optional
+ z2 = Optional[Dict]
+ # The result of printing "Optional[type]" is slightly version-dependent
+ # so accept both possible forms
+ allowed_optional_strings = [
+ "typing.Union[typing.Dict, NoneType]",
+ "typing.Optional[typing.Dict]"
+ ]
+ print(x1, x2)
+ print(y1, str(y2) in allowed_optional_strings)
+ print(z1, str(z2) in allowed_optional_strings)
+
if cython.compiled:
__doc__ = """
# passing non-dicts to variables declared as dict now fails
@@ -210,6 +242,5 @@ if cython.compiled:
TypeError: Expected dict, got D
"""
-
_WARNINGS = """
"""
diff --git a/tests/run/pep526_variable_annotations_cy.pyx b/tests/run/pep526_variable_annotations_cy.pyx
index c08c832b0..448824b36 100644
--- a/tests/run/pep526_variable_annotations_cy.pyx
+++ b/tests/run/pep526_variable_annotations_cy.pyx
@@ -48,9 +48,9 @@ def test_tuple(typing.Tuple[int, float] a, typing.Tuple[int, ...] b,
tuple object
tuple object
"""
- cdef typing.Tuple[int, float] x = (a[0], a[1])
+ cdef typing.Tuple[int, float] x = (a[0], a[1]) # C int/float
cdef Tuple[int, ...] y = (1,2.)
- z = a[0] # should infer to int
+ z = a[0] # should infer to C int
print(cython.typeof(z))
print(cython.typeof(x[0]))
diff --git a/tests/run/pure_cdef_class_dataclass.py b/tests/run/pure_cdef_class_dataclass.py
index 7b8fcb851..e5c4bcd32 100644
--- a/tests/run/pure_cdef_class_dataclass.py
+++ b/tests/run/pure_cdef_class_dataclass.py
@@ -11,9 +11,9 @@ class MyDataclass:
"""
>>> sorted(list(MyDataclass.__dataclass_fields__.keys()))
['a', 'self']
- >>> inst1 = MyDataclass(2.0, ['a', 'b'])
+ >>> inst1 = MyDataclass(2, ['a', 'b'])
>>> print(inst1)
- MyDataclass(a=2.0, self=['a', 'b'])
+ MyDataclass(a=2, self=['a', 'b'])
>>> inst2 = MyDataclass()
>>> print(inst2)
MyDataclass(a=1, self=[])
@@ -25,7 +25,54 @@ class MyDataclass:
True
>>> hash(inst1) != id(inst1)
True
+ >>> inst1.func_with_annotations(2.0)
+ 4.0
"""
a: int = 1
self: list = cython.dataclasses.field(default_factory=list, hash=False) # test that arguments of init don't conflict
+
+ def func_with_annotations(self, b: float):
+ c: float = b
+ return self.a * c
+
+
+class DummyObj:
+ def __repr__(self):
+ return "DummyObj()"
+
+
+@cython.dataclasses.dataclass
+@cython.cclass
+class NoInitFields:
+ """
+ >>> NoInitFields()
+ NoInitFields(has_default=DummyObj(), has_factory='From a lambda', neither=None)
+ >>> NoInitFields().has_default is NoInitFields().has_default
+ True
+
+ >>> NoInitFields(1) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ TypeError: NoInitFields.__init__() takes 1 positional argument but 2 were given
+
+ >>> NoInitFields(has_default=1) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...has_default...
+ >>> NoInitFields(has_factory=1) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...has_factory...
+ >>> NoInitFields(neither=1) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ TypeError: ...neither...
+ """
+ has_default : object = cython.dataclasses.field(default=DummyObj(), init=False)
+ has_factory : object = cython.dataclasses.field(default_factory=lambda: "From a lambda", init=False)
+ # Cython will default-initialize to None
+ neither : object = cython.dataclasses.field(init=False)
+
+ def __post_init__(self):
+ if not cython.compiled:
+ # Cython will default-initialize this to None, while Python won't
+ # and not initializing it will mess up repr
+ assert not hasattr(self, "neither")
+ self.neither = None
diff --git a/tests/run/pure_py.py b/tests/run/pure_py.py
index 93f737453..ae1f820d3 100644
--- a/tests/run/pure_py.py
+++ b/tests/run/pure_py.py
@@ -33,17 +33,18 @@ def test_sizeof():
def test_declare(n):
"""
>>> test_declare(100)
- (100, 100)
+ (100, 100, 100)
>>> test_declare(100.5)
- (100, 100)
+ (100, 100, 100)
"""
x = cython.declare(cython.int)
y = cython.declare(cython.int, n)
+ z = cython.declare(int, n) # C int
if cython.compiled:
cython.declare(xx=cython.int, yy=cython.long)
i = cython.sizeof(xx)
ptr = cython.declare(cython.p_int, cython.address(y))
- return y, ptr[0]
+ return y, z, ptr[0]
@cython.locals(x=cython.double, n=cython.int)
@@ -548,18 +549,18 @@ def empty_declare():
]
r2.is_integral = True
- assert( r2.is_integral == True )
+ assert r2.is_integral == True
r3.x = 12.3
- assert( r3.x == 12.3 )
+ assert r3.x == 12.3
#It generates a correct C code, but raises an exception when interpreted
if cython.compiled:
r4[0].is_integral = True
- assert( r4[0].is_integral == True )
+ assert r4[0].is_integral == True
r5[0] = 42
- assert ( r5[0] == 42 )
+ assert r5[0] == 42
return [i for i, x in enumerate(res) if not x]
diff --git a/tests/run/relative_cimport_compare.srctree b/tests/run/relative_cimport_compare.srctree
new file mode 100644
index 000000000..77b6fb22e
--- /dev/null
+++ b/tests/run/relative_cimport_compare.srctree
@@ -0,0 +1,327 @@
+# mode: run
+# tag: cimport, pep489
+
+PYTHON setup.py build_ext --inplace
+PYTHON -c "import test_import"
+PYTHON -c "import test_cimport"
+
+
+######## setup.py ########
+
+from distutils.core import setup
+from Cython.Build import cythonize
+from Cython.Distutils.extension import Extension
+
+setup(
+ ext_modules=cythonize('**/*.pyx'),
+)
+
+######## test_import.py ########
+import sys
+SUPPORTS_PEP_489 = sys.version_info > (3, 5)
+if SUPPORTS_PEP_489:
+ import cypkg.sub.submodule
+ import cypkg.sub.sub2.sub2module
+ import pypkg.module
+ import pypkg.sub.submodule
+ import pypkg.sub.sub2.sub2module
+
+######## test_cimport.py ########
+import sys
+SUPPORTS_PEP_489 = sys.version_info > (3, 5)
+if SUPPORTS_PEP_489:
+ import module
+
+
+######## module.pyx ########
+cimport cypkg
+
+cdef cypkg.a_type a1 = 3
+assert a1 == 3
+cdef cypkg.a.a_type a2 = 3
+assert a2 == 3
+cdef cypkg.b_type b1 = 4
+assert b1 == 4
+cdef cypkg.b.b_type b2 = 4
+assert b2 == 4
+
+
+cimport cypkg.sub
+cdef cypkg.sub.a_type a3 = 3
+assert a3 == 3
+cdef cypkg.sub.a.a_type a4 = 3
+assert a4 == 3
+cdef cypkg.sub.b_type b3 = 4
+assert b3 == 4
+cdef cypkg.sub.b.b_type b4 = 4
+assert b4 == 4
+
+
+cimport cypkg.sub.sub2
+cdef cypkg.sub.sub2.a_type a5 = 3
+assert a5 == 3
+cdef cypkg.sub.sub2.a.a_type a6 = 3
+assert a6 == 3
+cdef cypkg.sub.sub2.b_type b5 = 4
+assert b5 == 4
+cdef cypkg.sub.sub2.b.b_type b6 = 4
+assert b6 == 4
+
+import pypkg
+assert pypkg.a_value == 3
+assert pypkg.a.a_value == 3
+assert pypkg.b_value == 4
+assert pypkg.b.b_value == 4
+
+
+import pypkg.sub
+assert pypkg.sub.a_value == 3
+assert pypkg.sub.a.a_value == 3
+assert pypkg.sub.b_value == 4
+assert pypkg.sub.b.b_value == 4
+
+
+import cypkg.sub.sub2
+assert pypkg.sub.sub2.a_value == 3
+assert pypkg.sub.sub2.a.a_value == 3
+assert pypkg.sub.sub2.b_value == 4
+assert pypkg.sub.sub2.b.b_value == 4
+
+
+######## cypkg/__init__.pxd ########
+
+cimport cypkg.sub
+cimport cypkg.sub.sub2
+
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from . cimport sub
+from .sub cimport a
+from .sub.a cimport a_type
+from .sub.sub2 cimport b
+from .sub.sub2.b cimport b_type
+
+######## cypkg/__init__.pyx ########
+
+
+######## cypkg/module.pyx ########
+
+cimport cypkg
+cimport cypkg.sub
+cimport cypkg.sub.sub2
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from . cimport sub
+from .sub cimport a
+from .sub.a cimport a_type
+from .sub.sub2 cimport b
+from .sub.sub2.b cimport b_type
+
+
+######## cypkg/sub/__init__.pxd ########
+
+cimport cypkg
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from . cimport a
+from .a cimport a_type
+
+from .. cimport sub
+from ..sub cimport a
+from ..sub.a cimport a_type
+from ..sub.sub2 cimport b
+from ..sub.sub2.b cimport b_type
+
+######## cypkg/sub/__init__.pyx ########
+
+######## cypkg/sub/a.pxd ########
+
+ctypedef int a_type
+
+######## cypkg/sub/submodule.pyx ########
+
+cimport cypkg
+cimport cypkg.sub
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from . cimport a
+from .a cimport a_type
+
+from .. cimport sub
+from ..sub cimport a
+from ..sub.a cimport a_type
+from ..sub.sub2 cimport b
+from ..sub.sub2.b cimport b_type
+
+######## cypkg/sub/sub2/__init__.pxd ########
+
+cimport cypkg
+cimport cypkg.sub
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from ..sub2 cimport b
+from ..sub2.b cimport b_type
+
+from ...sub cimport a
+from ...sub.a cimport a_type
+
+from ... cimport sub
+from ...sub.sub2 cimport b
+from ...sub.sub2.b cimport b_type
+
+######## cypkg/sub/sub2/__init__.pyx ########
+
+######## cypkg/sub/sub2/b.pxd ########
+
+ctypedef int b_type
+
+
+######## cypkg/sub/sub2/sub2module.pyx ########
+
+cimport cypkg
+cimport cypkg.sub
+from cypkg.sub cimport a
+from cypkg.sub.a cimport a_type
+from cypkg.sub.sub2 cimport b
+from cypkg.sub.sub2.b cimport b_type
+
+from .. cimport sub2
+from ..sub2 cimport b
+from ..sub2.b cimport b_type
+
+from ...sub cimport a
+from ...sub.a cimport a_type
+
+from ... cimport sub
+from ...sub.sub2 cimport b
+from ...sub.sub2.b cimport b_type
+
+######## pypkg/__init__.py ########
+
+import pypkg.sub
+import pypkg.sub.sub2
+
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from . import sub
+from .sub import a
+from .sub.a import a_value
+from .sub.sub2 import b
+from .sub.sub2.b import b_value
+
+######## pypkg/module.py ########
+
+import pypkg
+import pypkg.sub
+import pypkg.sub.sub2
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from . import sub
+from .sub import a
+from .sub.a import a_value
+from .sub.sub2 import b
+from .sub.sub2.b import b_value
+
+######## pypkg/sub/__init__.py ########
+
+import pypkg
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from . import a
+from .a import a_value
+
+from .. import sub
+from ..sub import a
+from ..sub.a import a_value
+from ..sub.sub2 import b
+from ..sub.sub2.b import b_value
+
+######## pypkg/sub/a.py ########
+
+a_value = 3
+
+######## pypkg/sub/submodule.py ########
+
+import pypkg
+import pypkg.sub
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from . import a
+from .a import a_value
+
+from .. import sub
+from ..sub import a
+from ..sub.a import a_value
+from ..sub.sub2 import b
+from ..sub.sub2.b import b_value
+
+######## pypkg/sub/sub2/__init__.py ########
+
+import pypkg
+import pypkg.sub
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from ..sub2 import b
+from ..sub2.b import b_value
+
+from ...sub import a
+from ...sub.a import a_value
+
+from ... import sub
+from ...sub.sub2 import b
+from ...sub.sub2.b import b_value
+
+######## pypkg/sub/sub2/b.py ########
+
+b_value = 4
+
+
+######## pypkg/sub/sub2/sub2module.py ########
+
+import pypkg
+import pypkg.sub
+from pypkg.sub import a
+from pypkg.sub.a import a_value
+from pypkg.sub.sub2 import b
+from pypkg.sub.sub2.b import b_value
+
+from .. import sub2
+from ..sub2 import b
+from ..sub2.b import b_value
+
+from ...sub import a
+from ...sub.a import a_value
+
+from ... import sub
+from ...sub.sub2 import b
+from ...sub.sub2.b import b_value
diff --git a/tests/run/sequential_parallel.pyx b/tests/run/sequential_parallel.pyx
index 3d8e1efff..cd4bbd6bc 100644
--- a/tests/run/sequential_parallel.pyx
+++ b/tests/run/sequential_parallel.pyx
@@ -315,7 +315,7 @@ def test_nan_init():
c1 = 16
-cdef void nogil_print(char *s) with gil:
+cdef void nogil_print(char *s) noexcept with gil:
print s.decode('ascii')
def test_else_clause():
@@ -406,7 +406,7 @@ def test_nested_break_continue():
print i
-cdef int parallel_return() nogil:
+cdef int parallel_return() noexcept nogil:
cdef int i
for i in prange(10):
@@ -640,7 +640,7 @@ def test_parallel_with_gil_continue_unnested():
print sum
-cdef int inner_parallel_section() nogil:
+cdef int inner_parallel_section() noexcept nogil:
cdef int j, sum = 0
for j in prange(10):
sum += j
@@ -656,10 +656,10 @@ def outer_parallel_section():
sum += inner_parallel_section()
return sum
-cdef int nogil_cdef_except_clause() nogil except -1:
+cdef int nogil_cdef_except_clause() except -1 nogil:
return 1
-cdef void nogil_cdef_except_star() nogil except *:
+cdef void nogil_cdef_except_star() except * nogil:
pass
def test_nogil_cdef_except_clause():
@@ -683,7 +683,7 @@ def test_num_threads_compile():
for i in prange(10):
pass
-cdef int chunksize() nogil:
+cdef int chunksize() noexcept nogil:
return 3
def test_chunksize():
@@ -784,7 +784,7 @@ cdef extern from *:
"""
void address_of_temp(...) nogil
void address_of_temp2(...) nogil
- double get_value() nogil except -1.0 # will generate a temp for exception checking
+ double get_value() except -1.0 nogil # will generate a temp for exception checking
def test_inner_private():
"""
diff --git a/tests/run/special_methods_T561.pyx b/tests/run/special_methods_T561.pyx
index 5eb9dddfc..bd68291e7 100644
--- a/tests/run/special_methods_T561.pyx
+++ b/tests/run/special_methods_T561.pyx
@@ -956,3 +956,44 @@ cdef class ReverseMethodsExist:
return "radd"
def __rsub__(self, other):
return "rsub"
+
+
+cdef class ArgumentTypeConversions:
+ """
+ The user can set the signature of special method arguments so that
+ it doesn't match the C signature. This just tests that a few
+ variations work
+
+ >>> obj = ArgumentTypeConversions()
+ >>> obj[1]
+ 1
+ >>> obj["not a number!"]
+ Traceback (most recent call last):
+ ...
+ TypeError: an integer is required
+ >>> obj < obj
+ In comparison 0
+ True
+ >>> obj == obj
+ In comparison 2
+ False
+
+ Here I'm not sure how reproducible the flags are between Python versions.
+ Therefore I'm just checking that they end with ".0"
+ >>> memoryview(obj) # doctest:+ELLIPSIS
+ Traceback (most recent call last):
+ ...
+ RuntimeError: From __getbuffer__ with flags ....0
+ """
+ # force conversion of object to int
+ def __getitem__(self, int x):
+ return x
+
+ # force conversion of comparison (int) to object
+ def __richcmp__(self, other, object comparison):
+ print "In comparison", comparison
+ return not bool(comparison)
+
+ # force conversion of flags (int) to double
+ def __getbuffer__(self, Py_buffer *buffer, double flags):
+ raise RuntimeError("From __getbuffer__ with flags {}".format(flags))
diff --git a/tests/run/test_coroutines_pep492.pyx b/tests/run/test_coroutines_pep492.pyx
index 2841d97af..3060ab704 100644
--- a/tests/run/test_coroutines_pep492.pyx
+++ b/tests/run/test_coroutines_pep492.pyx
@@ -14,7 +14,7 @@ import copy
#import types
import pickle
import os.path
-#import inspect
+import inspect
import unittest
import warnings
import contextlib
@@ -754,7 +754,8 @@ class AsyncBadSyntaxTest(unittest.TestCase):
async def g(): pass
await z
await = 1
- #self.assertTrue(inspect.iscoroutinefunction(f))
+ if sys.version_info >= (3,10,6):
+ self.assertTrue(inspect.iscoroutinefunction(f))
class TokenizerRegrTest(unittest.TestCase):
@@ -777,7 +778,8 @@ class TokenizerRegrTest(unittest.TestCase):
exec(buf, ns, ns)
self.assertEqual(ns['i499'](), 499)
self.assertEqual(type(ns['foo']()).__name__, 'coroutine')
- #self.assertTrue(inspect.iscoroutinefunction(ns['foo']))
+ if sys.version_info >= (3,10,6):
+ self.assertTrue(inspect.iscoroutinefunction(ns['foo']))
class CoroutineTest(unittest.TestCase):
diff --git a/tests/run/test_dataclasses.pxi b/tests/run/test_dataclasses.pxi
new file mode 100644
index 000000000..998d837f2
--- /dev/null
+++ b/tests/run/test_dataclasses.pxi
@@ -0,0 +1,19 @@
+from cython.dataclasses cimport dataclass, field
+from cython cimport cclass
+from dataclasses import (
+ fields, FrozenInstanceError, InitVar, is_dataclass, asdict, astuple, replace
+)
+import unittest
+from unittest.mock import Mock
+import pickle
+import inspect
+from typing import ClassVar, Any, List, Union, Tuple, Dict, Generic, TypeVar, Optional
+from typing import get_type_hints
+from collections import deque, OrderedDict, namedtuple
+import sys
+
+def skip_on_versions_below(version):
+ def decorator(func):
+ if sys.version_info >= version:
+ return func
+ return decorator
diff --git a/tests/run/test_dataclasses.pyx b/tests/run/test_dataclasses.pyx
new file mode 100644
index 000000000..4daf62cf8
--- /dev/null
+++ b/tests/run/test_dataclasses.pyx
@@ -0,0 +1,1186 @@
+# AUTO-GENERATED BY Tools/make_dataclass_tests.py
+# DO NOT EDIT
+
+# cython: language_level=3
+include "test_dataclasses.pxi"
+
+@dataclass
+@cclass
+class C_TestCase_test_no_fields:
+ pass
+
+@dataclass
+@cclass
+class C_TestCase_test_no_fields_but_member_variable:
+ i = 0
+
+@dataclass
+@cclass
+class C_TestCase_test_one_field_no_default:
+ x: int
+
+@dataclass
+@cclass
+class C_TestCase_test_named_init_params:
+ x: int
+
+@dataclass
+@cclass
+class C_TestCase_test_field_named_object:
+ object: str
+
+@dataclass(frozen=True)
+@cclass
+class C_TestCase_test_field_named_object_frozen:
+ object: str
+
+@dataclass
+@cclass
+class C0_TestCase_test_0_field_compare:
+ pass
+
+@dataclass(order=False)
+@cclass
+class C1_TestCase_test_0_field_compare:
+ pass
+
+@dataclass(order=True)
+@cclass
+class C_TestCase_test_0_field_compare:
+ pass
+
+@dataclass
+@cclass
+class C0_TestCase_test_1_field_compare:
+ x: int
+
+@dataclass(order=False)
+@cclass
+class C1_TestCase_test_1_field_compare:
+ x: int
+
+@dataclass(order=True)
+@cclass
+class C_TestCase_test_1_field_compare:
+ x: int
+
+@dataclass
+@cclass
+class C_TestCase_test_field_no_default:
+ x: int = field()
+
+@dataclass
+@cclass
+class C_TestCase_test_not_in_compare:
+ x: int = 0
+ y: int = field(compare=False, default=4)
+
+class Mutable_TestCase_test_deliberately_mutable_defaults:
+
+ def __init__(self):
+ self.l = []
+
+@dataclass
+@cclass
+class C_TestCase_test_deliberately_mutable_defaults:
+ x: Mutable_TestCase_test_deliberately_mutable_defaults
+
+@dataclass()
+@cclass
+class C_TestCase_test_no_options:
+ x: int
+
+@dataclass
+@cclass
+class Point_TestCase_test_not_tuple:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_not_tuple:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class Point3D_TestCase_test_not_other_dataclass:
+ x: int
+ y: int
+ z: int
+
+@dataclass
+@cclass
+class Date_TestCase_test_not_other_dataclass:
+ year: int
+ month: int
+ day: int
+
+@dataclass
+@cclass
+class Point3Dv1_TestCase_test_not_other_dataclass:
+ x: int = 0
+ y: int = 0
+ z: int = 0
+
+@dataclass
+@cclass
+class C_TestCase_test_class_var_no_default:
+ x: ClassVar[int]
+
+@dataclass
+@cclass
+class C_TestCase_test_init_var:
+ x: int = None
+ init_param: InitVar[int] = None
+
+ def __post_init__(self, init_param):
+ if self.x is None:
+ self.x = init_param * 2
+
+@dataclass
+@cclass
+class Foo_TestCase_test_default_factory_derived:
+ x: dict = field(default_factory=dict)
+
+@dataclass
+@cclass
+class Bar_TestCase_test_default_factory_derived(Foo_TestCase_test_default_factory_derived):
+ y: int = 1
+
+@dataclass
+@cclass
+class Baz_TestCase_test_default_factory_derived(Foo_TestCase_test_default_factory_derived):
+ pass
+
+@dataclass
+@cclass
+class A_TestCase_test_intermediate_non_dataclass:
+ x: int
+
+@cclass
+class B_TestCase_test_intermediate_non_dataclass(A_TestCase_test_intermediate_non_dataclass):
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_intermediate_non_dataclass(B_TestCase_test_intermediate_non_dataclass):
+ z: int
+
+class D_TestCase_test_intermediate_non_dataclass(C_TestCase_test_intermediate_non_dataclass):
+ t: int
+
+class NotDataClass_TestCase_test_is_dataclass:
+ pass
+
+@dataclass
+@cclass
+class C_TestCase_test_is_dataclass:
+ x: int
+
+@dataclass
+@cclass
+class D_TestCase_test_is_dataclass:
+ d: C_TestCase_test_is_dataclass
+ e: int
+
+class A_TestCase_test_is_dataclass_when_getattr_always_returns:
+
+ def __getattr__(self, key):
+ return 0
+
+class B_TestCase_test_is_dataclass_when_getattr_always_returns:
+ pass
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_fields_with_class_instance:
+ x: int
+ y: float
+
+class C_TestCase_test_helper_fields_exception:
+ pass
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_raises_on_classes:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_copy_values:
+ x: int
+ y: List[int] = field(default_factory=list)
+
+@dataclass
+@cclass
+class UserId_TestCase_test_helper_asdict_nested:
+ token: int
+ group: int
+
+@dataclass
+@cclass
+class User_TestCase_test_helper_asdict_nested:
+ name: str
+ id: UserId_TestCase_test_helper_asdict_nested
+
+@dataclass
+@cclass
+class User_TestCase_test_helper_asdict_builtin_containers:
+ name: str
+ id: int
+
+@dataclass
+@cclass
+class GroupList_TestCase_test_helper_asdict_builtin_containers:
+ id: int
+ users: List[User_TestCase_test_helper_asdict_builtin_containers]
+
+@dataclass
+@cclass
+class GroupTuple_TestCase_test_helper_asdict_builtin_containers:
+ id: int
+ users: Tuple[User_TestCase_test_helper_asdict_builtin_containers, ...]
+
+@dataclass
+@cclass
+class GroupDict_TestCase_test_helper_asdict_builtin_containers:
+ id: int
+ users: Dict[str, User_TestCase_test_helper_asdict_builtin_containers]
+
+@dataclass
+@cclass
+class Child_TestCase_test_helper_asdict_builtin_object_containers:
+ d: object
+
+@dataclass
+@cclass
+class Parent_TestCase_test_helper_asdict_builtin_object_containers:
+ child: Child_TestCase_test_helper_asdict_builtin_object_containers
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_factory:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_namedtuple:
+ x: str
+ y: T
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_namedtuple_key:
+ f: dict
+
+class T_TestCase_test_helper_asdict_namedtuple_derived(namedtuple('Tbase', 'a')):
+
+ def my_a(self):
+ return self.a
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_asdict_namedtuple_derived:
+ f: T_TestCase_test_helper_asdict_namedtuple_derived
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple:
+ x: int
+ y: int = 0
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple_raises_on_classes:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple_copy_values:
+ x: int
+ y: List[int] = field(default_factory=list)
+
+@dataclass
+@cclass
+class UserId_TestCase_test_helper_astuple_nested:
+ token: int
+ group: int
+
+@dataclass
+@cclass
+class User_TestCase_test_helper_astuple_nested:
+ name: str
+ id: UserId_TestCase_test_helper_astuple_nested
+
+@dataclass
+@cclass
+class User_TestCase_test_helper_astuple_builtin_containers:
+ name: str
+ id: int
+
+@dataclass
+@cclass
+class GroupList_TestCase_test_helper_astuple_builtin_containers:
+ id: int
+ users: List[User_TestCase_test_helper_astuple_builtin_containers]
+
+@dataclass
+@cclass
+class GroupTuple_TestCase_test_helper_astuple_builtin_containers:
+ id: int
+ users: Tuple[User_TestCase_test_helper_astuple_builtin_containers, ...]
+
+@dataclass
+@cclass
+class GroupDict_TestCase_test_helper_astuple_builtin_containers:
+ id: int
+ users: Dict[str, User_TestCase_test_helper_astuple_builtin_containers]
+
+@dataclass
+@cclass
+class Child_TestCase_test_helper_astuple_builtin_object_containers:
+ d: object
+
+@dataclass
+@cclass
+class Parent_TestCase_test_helper_astuple_builtin_object_containers:
+ child: Child_TestCase_test_helper_astuple_builtin_object_containers
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple_factory:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestCase_test_helper_astuple_namedtuple:
+ x: str
+ y: T
+
+@dataclass
+@cclass
+class C_TestCase_test_alternate_classmethod_constructor:
+ x: int
+
+ @classmethod
+ def from_file(cls, filename):
+ value_in_file = 20
+ return cls(value_in_file)
+
+@dataclass
+@cclass
+class C_TestCase_test_field_metadata_default:
+ i: int
+
+@dataclass
+@cclass
+class P_TestCase_test_dataclasses_pickleable:
+ x: int
+ y: int = 0
+
+@dataclass
+@cclass
+class Q_TestCase_test_dataclasses_pickleable:
+ x: int
+ y: int = field(default=0, init=False)
+
+@dataclass
+@cclass
+class R_TestCase_test_dataclasses_pickleable:
+ x: int
+ y: List[int] = field(default_factory=list)
+
+@dataclass
+@cclass
+class C_TestInit_test_overwriting_init:
+ x: int
+
+ def __init__(self, x):
+ self.x = 2 * x
+
+@dataclass(init=True)
+@cclass
+class C_TestInit_test_overwriting_init_:
+ x: int
+
+ def __init__(self, x):
+ self.x = 2 * x
+
+@dataclass(init=False)
+@cclass
+class C_TestInit_test_overwriting_init__:
+ x: int
+
+ def __init__(self, x):
+ self.x = 2 * x
+
+@dataclass
+@cclass
+class C_TestRepr_test_overwriting_repr:
+ x: int
+
+ def __repr__(self):
+ return 'x'
+
+@dataclass(repr=True)
+@cclass
+class C_TestRepr_test_overwriting_repr_:
+ x: int
+
+ def __repr__(self):
+ return 'x'
+
+@dataclass(repr=False)
+@cclass
+class C_TestRepr_test_overwriting_repr__:
+ x: int
+
+ def __repr__(self):
+ return 'x'
+
+@dataclass(eq=False)
+@cclass
+class C_TestEq_test_no_eq:
+ x: int
+
+@dataclass(eq=False)
+@cclass
+class C_TestEq_test_no_eq_:
+ x: int
+
+ def __eq__(self, other):
+ return other == 10
+
+@dataclass
+@cclass
+class C_TestEq_test_overwriting_eq:
+ x: int
+
+ def __eq__(self, other):
+ return other == 3
+
+@dataclass(eq=True)
+@cclass
+class C_TestEq_test_overwriting_eq_:
+ x: int
+
+ def __eq__(self, other):
+ return other == 4
+
+@dataclass(eq=False)
+@cclass
+class C_TestEq_test_overwriting_eq__:
+ x: int
+
+ def __eq__(self, other):
+ return other == 5
+
+@dataclass(unsafe_hash=True)
+@cclass
+class C_TestHash_test_unsafe_hash:
+ x: int
+ y: str
+
+@dataclass(frozen=True)
+@cclass
+class C_TestHash_test_0_field_hash:
+ pass
+
+@dataclass(unsafe_hash=True)
+@cclass
+class C_TestHash_test_0_field_hash_:
+ pass
+
+@dataclass(frozen=True)
+@cclass
+class C_TestHash_test_1_field_hash:
+ x: int
+
+@dataclass(unsafe_hash=True)
+@cclass
+class C_TestHash_test_1_field_hash_:
+ x: int
+
+class Base1_TestMakeDataclass_test_base:
+ pass
+
+class Base2_TestMakeDataclass_test_base:
+ pass
+
+@dataclass
+@cclass
+class Base1_TestMakeDataclass_test_base_dataclass:
+ x: int
+
+class Base2_TestMakeDataclass_test_base_dataclass:
+ pass
+
+@dataclass(frozen=True)
+@cclass
+class C_TestReplace_test:
+ x: int
+ y: int
+
+@dataclass(frozen=True)
+@cclass
+class C_TestReplace_test_invalid_field_name:
+ x: int
+ y: int
+
+@dataclass(frozen=True)
+@cclass
+class C_TestReplace_test_invalid_object:
+ x: int
+ y: int
+
+@dataclass
+@cclass
+class C_TestReplace_test_no_init:
+ x: int
+ y: int = field(init=False, default=10)
+
+@dataclass
+@cclass
+class C_TestReplace_test_classvar:
+ x: int
+ y: ClassVar[int] = 1000
+
+@dataclass
+@cclass
+class C_TestReplace_test_initvar_is_specified:
+ x: int
+ y: InitVar[int]
+
+ def __post_init__(self, y):
+ self.x *= y
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr:
+ f: object
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr_two_attrs:
+ f: object
+ g: object
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr_indirection:
+ f: object
+
+@dataclass
+@cclass
+class D_TestReplace_test_recursive_repr_indirection:
+ f: object
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr_indirection_two:
+ f: object
+
+@dataclass
+@cclass
+class D_TestReplace_test_recursive_repr_indirection_two:
+ f: object
+
+@dataclass
+@cclass
+class E_TestReplace_test_recursive_repr_indirection_two:
+ f: object
+
+@dataclass
+@cclass
+class C_TestReplace_test_recursive_repr_misc_attrs:
+ f: object
+ g: int
+
+class CustomError(Exception):
+ pass
+
+class TestCase(unittest.TestCase):
+
+ def test_no_fields(self):
+ C = C_TestCase_test_no_fields
+ o = C()
+ self.assertEqual(len(fields(C)), 0)
+
+ def test_no_fields_but_member_variable(self):
+ C = C_TestCase_test_no_fields_but_member_variable
+ o = C()
+ self.assertEqual(len(fields(C)), 0)
+
+ def test_one_field_no_default(self):
+ C = C_TestCase_test_one_field_no_default
+ o = C(42)
+ self.assertEqual(o.x, 42)
+
+ def test_named_init_params(self):
+ C = C_TestCase_test_named_init_params
+ o = C(x=32)
+ self.assertEqual(o.x, 32)
+
+ def test_field_named_object(self):
+ C = C_TestCase_test_field_named_object
+ c = C('foo')
+ self.assertEqual(c.object, 'foo')
+
+ def test_field_named_object_frozen(self):
+ C = C_TestCase_test_field_named_object_frozen
+ c = C('foo')
+ self.assertEqual(c.object, 'foo')
+
+ def test_0_field_compare(self):
+ C0 = C0_TestCase_test_0_field_compare
+ C1 = C1_TestCase_test_0_field_compare
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(), cls())
+ for (idx, fn) in enumerate([lambda a, b: a < b, lambda a, b: a <= b, lambda a, b: a > b, lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaises(TypeError):
+ fn(cls(), cls())
+ C = C_TestCase_test_0_field_compare
+ self.assertLessEqual(C(), C())
+ self.assertGreaterEqual(C(), C())
+
+ def test_1_field_compare(self):
+ C0 = C0_TestCase_test_1_field_compare
+ C1 = C1_TestCase_test_1_field_compare
+ for cls in [C0, C1]:
+ with self.subTest(cls=cls):
+ self.assertEqual(cls(1), cls(1))
+ self.assertNotEqual(cls(0), cls(1))
+ for (idx, fn) in enumerate([lambda a, b: a < b, lambda a, b: a <= b, lambda a, b: a > b, lambda a, b: a >= b]):
+ with self.subTest(idx=idx):
+ with self.assertRaises(TypeError):
+ fn(cls(0), cls(0))
+ C = C_TestCase_test_1_field_compare
+ self.assertLess(C(0), C(1))
+ self.assertLessEqual(C(0), C(1))
+ self.assertLessEqual(C(1), C(1))
+ self.assertGreater(C(1), C(0))
+ self.assertGreaterEqual(C(1), C(0))
+ self.assertGreaterEqual(C(1), C(1))
+
+ def test_field_no_default(self):
+ C = C_TestCase_test_field_no_default
+ self.assertEqual(C(5).x, 5)
+ with self.assertRaises(TypeError):
+ C()
+
+ def test_not_in_compare(self):
+ C = C_TestCase_test_not_in_compare
+ self.assertEqual(C(), C(0, 20))
+ self.assertEqual(C(1, 10), C(1, 20))
+ self.assertNotEqual(C(3), C(4, 10))
+ self.assertNotEqual(C(3, 10), C(4, 10))
+
+ def test_deliberately_mutable_defaults(self):
+ Mutable = Mutable_TestCase_test_deliberately_mutable_defaults
+ C = C_TestCase_test_deliberately_mutable_defaults
+ lst = Mutable()
+ o1 = C(lst)
+ o2 = C(lst)
+ self.assertEqual(o1, o2)
+ o1.x.l.extend([1, 2])
+ self.assertEqual(o1, o2)
+ self.assertEqual(o1.x.l, [1, 2])
+ self.assertIs(o1.x, o2.x)
+
+ def test_no_options(self):
+ C = C_TestCase_test_no_options
+ self.assertEqual(C(42).x, 42)
+
+ def test_not_tuple(self):
+ Point = Point_TestCase_test_not_tuple
+ self.assertNotEqual(Point(1, 2), (1, 2))
+ C = C_TestCase_test_not_tuple
+ self.assertNotEqual(Point(1, 3), C(1, 3))
+
+ def test_not_other_dataclass(self):
+ Point3D = Point3D_TestCase_test_not_other_dataclass
+ Date = Date_TestCase_test_not_other_dataclass
+ self.assertNotEqual(Point3D(2017, 6, 3), Date(2017, 6, 3))
+ self.assertNotEqual(Point3D(1, 2, 3), (1, 2, 3))
+ with self.assertRaises(TypeError):
+ (x, y, z) = Point3D(4, 5, 6)
+ Point3Dv1 = Point3Dv1_TestCase_test_not_other_dataclass
+ self.assertNotEqual(Point3D(0, 0, 0), Point3Dv1())
+
+ def test_class_var_no_default(self):
+ C = C_TestCase_test_class_var_no_default
+ self.assertNotIn('x', C.__dict__)
+
+ def test_init_var(self):
+ C = C_TestCase_test_init_var
+ c = C(init_param=10)
+ self.assertEqual(c.x, 20)
+
+ @skip_on_versions_below((3, 10))
+ def test_init_var_preserve_type(self):
+ self.assertEqual(InitVar[int].type, int)
+ self.assertEqual(repr(InitVar[int]), 'dataclasses.InitVar[int]')
+ self.assertEqual(repr(InitVar[List[int]]), 'dataclasses.InitVar[typing.List[int]]')
+ self.assertEqual(repr(InitVar[list[int]]), 'dataclasses.InitVar[list[int]]')
+ self.assertEqual(repr(InitVar[int | str]), 'dataclasses.InitVar[int | str]')
+
+ def test_default_factory_derived(self):
+ Foo = Foo_TestCase_test_default_factory_derived
+ Bar = Bar_TestCase_test_default_factory_derived
+ self.assertEqual(Foo().x, {})
+ self.assertEqual(Bar().x, {})
+ self.assertEqual(Bar().y, 1)
+ Baz = Baz_TestCase_test_default_factory_derived
+ self.assertEqual(Baz().x, {})
+
+ def test_intermediate_non_dataclass(self):
+ A = A_TestCase_test_intermediate_non_dataclass
+ B = B_TestCase_test_intermediate_non_dataclass
+ C = C_TestCase_test_intermediate_non_dataclass
+ c = C(1, 3)
+ self.assertEqual((c.x, c.z), (1, 3))
+ with self.assertRaises(AttributeError):
+ c.y
+ D = D_TestCase_test_intermediate_non_dataclass
+ d = D(4, 5)
+ self.assertEqual((d.x, d.z), (4, 5))
+
+ def test_is_dataclass(self):
+ NotDataClass = NotDataClass_TestCase_test_is_dataclass
+ self.assertFalse(is_dataclass(0))
+ self.assertFalse(is_dataclass(int))
+ self.assertFalse(is_dataclass(NotDataClass))
+ self.assertFalse(is_dataclass(NotDataClass()))
+ C = C_TestCase_test_is_dataclass
+ D = D_TestCase_test_is_dataclass
+ c = C(10)
+ d = D(c, 4)
+ self.assertTrue(is_dataclass(C))
+ self.assertTrue(is_dataclass(c))
+ self.assertFalse(is_dataclass(c.x))
+ self.assertTrue(is_dataclass(d.d))
+ self.assertFalse(is_dataclass(d.e))
+
+ def test_is_dataclass_when_getattr_always_returns(self):
+ A = A_TestCase_test_is_dataclass_when_getattr_always_returns
+ self.assertFalse(is_dataclass(A))
+ a = A()
+ B = B_TestCase_test_is_dataclass_when_getattr_always_returns
+ b = B()
+ b.__dataclass_fields__ = []
+ for obj in (a, b):
+ with self.subTest(obj=obj):
+ self.assertFalse(is_dataclass(obj))
+ with self.assertRaises(TypeError):
+ asdict(obj)
+ with self.assertRaises(TypeError):
+ astuple(obj)
+ with self.assertRaises(TypeError):
+ replace(obj, x=0)
+
+ def test_helper_fields_with_class_instance(self):
+ C = C_TestCase_test_helper_fields_with_class_instance
+ self.assertEqual(fields(C), fields(C(0, 0.0)))
+
+ def test_helper_fields_exception(self):
+ with self.assertRaises(TypeError):
+ fields(0)
+ C = C_TestCase_test_helper_fields_exception
+ with self.assertRaises(TypeError):
+ fields(C)
+ with self.assertRaises(TypeError):
+ fields(C())
+
+ def test_helper_asdict(self):
+ C = C_TestCase_test_helper_asdict
+ c = C(1, 2)
+ self.assertEqual(asdict(c), {'x': 1, 'y': 2})
+ self.assertEqual(asdict(c), asdict(c))
+ self.assertIsNot(asdict(c), asdict(c))
+ c.x = 42
+ self.assertEqual(asdict(c), {'x': 42, 'y': 2})
+ self.assertIs(type(asdict(c)), dict)
+
+ def test_helper_asdict_raises_on_classes(self):
+ C = C_TestCase_test_helper_asdict_raises_on_classes
+ with self.assertRaises(TypeError):
+ asdict(C)
+ with self.assertRaises(TypeError):
+ asdict(int)
+
+ def test_helper_asdict_copy_values(self):
+ C = C_TestCase_test_helper_asdict_copy_values
+ initial = []
+ c = C(1, initial)
+ d = asdict(c)
+ self.assertEqual(d['y'], initial)
+ self.assertIsNot(d['y'], initial)
+ c = C(1)
+ d = asdict(c)
+ d['y'].append(1)
+ self.assertEqual(c.y, [])
+
+ def test_helper_asdict_nested(self):
+ UserId = UserId_TestCase_test_helper_asdict_nested
+ User = User_TestCase_test_helper_asdict_nested
+ u = User('Joe', UserId(123, 1))
+ d = asdict(u)
+ self.assertEqual(d, {'name': 'Joe', 'id': {'token': 123, 'group': 1}})
+ self.assertIsNot(asdict(u), asdict(u))
+ u.id.group = 2
+ self.assertEqual(asdict(u), {'name': 'Joe', 'id': {'token': 123, 'group': 2}})
+
+ def test_helper_asdict_builtin_containers(self):
+ User = User_TestCase_test_helper_asdict_builtin_containers
+ GroupList = GroupList_TestCase_test_helper_asdict_builtin_containers
+ GroupTuple = GroupTuple_TestCase_test_helper_asdict_builtin_containers
+ GroupDict = GroupDict_TestCase_test_helper_asdict_builtin_containers
+ a = User('Alice', 1)
+ b = User('Bob', 2)
+ gl = GroupList(0, [a, b])
+ gt = GroupTuple(0, (a, b))
+ gd = GroupDict(0, {'first': a, 'second': b})
+ self.assertEqual(asdict(gl), {'id': 0, 'users': [{'name': 'Alice', 'id': 1}, {'name': 'Bob', 'id': 2}]})
+ self.assertEqual(asdict(gt), {'id': 0, 'users': ({'name': 'Alice', 'id': 1}, {'name': 'Bob', 'id': 2})})
+ self.assertEqual(asdict(gd), {'id': 0, 'users': {'first': {'name': 'Alice', 'id': 1}, 'second': {'name': 'Bob', 'id': 2}}})
+
+ def test_helper_asdict_builtin_object_containers(self):
+ Child = Child_TestCase_test_helper_asdict_builtin_object_containers
+ Parent = Parent_TestCase_test_helper_asdict_builtin_object_containers
+ self.assertEqual(asdict(Parent(Child([1]))), {'child': {'d': [1]}})
+ self.assertEqual(asdict(Parent(Child({1: 2}))), {'child': {'d': {1: 2}}})
+
+ def test_helper_asdict_factory(self):
+ C = C_TestCase_test_helper_asdict_factory
+ c = C(1, 2)
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, OrderedDict([('x', 1), ('y', 2)]))
+ self.assertIsNot(d, asdict(c, dict_factory=OrderedDict))
+ c.x = 42
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, OrderedDict([('x', 42), ('y', 2)]))
+ self.assertIs(type(d), OrderedDict)
+
+ def test_helper_asdict_namedtuple(self):
+ T = namedtuple('T', 'a b c')
+ C = C_TestCase_test_helper_asdict_namedtuple
+ c = C('outer', T(1, C('inner', T(11, 12, 13)), 2))
+ d = asdict(c)
+ self.assertEqual(d, {'x': 'outer', 'y': T(1, {'x': 'inner', 'y': T(11, 12, 13)}, 2)})
+ d = asdict(c, dict_factory=OrderedDict)
+ self.assertEqual(d, {'x': 'outer', 'y': T(1, {'x': 'inner', 'y': T(11, 12, 13)}, 2)})
+ self.assertIs(type(d), OrderedDict)
+ self.assertIs(type(d['y'][1]), OrderedDict)
+
+ def test_helper_asdict_namedtuple_key(self):
+ C = C_TestCase_test_helper_asdict_namedtuple_key
+ T = namedtuple('T', 'a')
+ c = C({T('an a'): 0})
+ self.assertEqual(asdict(c), {'f': {T(a='an a'): 0}})
+
+ def test_helper_asdict_namedtuple_derived(self):
+ T = T_TestCase_test_helper_asdict_namedtuple_derived
+ C = C_TestCase_test_helper_asdict_namedtuple_derived
+ t = T(6)
+ c = C(t)
+ d = asdict(c)
+ self.assertEqual(d, {'f': T(a=6)})
+ self.assertIsNot(d['f'], t)
+ self.assertEqual(d['f'].my_a(), 6)
+
+ def test_helper_astuple(self):
+ C = C_TestCase_test_helper_astuple
+ c = C(1)
+ self.assertEqual(astuple(c), (1, 0))
+ self.assertEqual(astuple(c), astuple(c))
+ self.assertIsNot(astuple(c), astuple(c))
+ c.y = 42
+ self.assertEqual(astuple(c), (1, 42))
+ self.assertIs(type(astuple(c)), tuple)
+
+ def test_helper_astuple_raises_on_classes(self):
+ C = C_TestCase_test_helper_astuple_raises_on_classes
+ with self.assertRaises(TypeError):
+ astuple(C)
+ with self.assertRaises(TypeError):
+ astuple(int)
+
+ def test_helper_astuple_copy_values(self):
+ C = C_TestCase_test_helper_astuple_copy_values
+ initial = []
+ c = C(1, initial)
+ t = astuple(c)
+ self.assertEqual(t[1], initial)
+ self.assertIsNot(t[1], initial)
+ c = C(1)
+ t = astuple(c)
+ t[1].append(1)
+ self.assertEqual(c.y, [])
+
+ def test_helper_astuple_nested(self):
+ UserId = UserId_TestCase_test_helper_astuple_nested
+ User = User_TestCase_test_helper_astuple_nested
+ u = User('Joe', UserId(123, 1))
+ t = astuple(u)
+ self.assertEqual(t, ('Joe', (123, 1)))
+ self.assertIsNot(astuple(u), astuple(u))
+ u.id.group = 2
+ self.assertEqual(astuple(u), ('Joe', (123, 2)))
+
+ def test_helper_astuple_builtin_containers(self):
+ User = User_TestCase_test_helper_astuple_builtin_containers
+ GroupList = GroupList_TestCase_test_helper_astuple_builtin_containers
+ GroupTuple = GroupTuple_TestCase_test_helper_astuple_builtin_containers
+ GroupDict = GroupDict_TestCase_test_helper_astuple_builtin_containers
+ a = User('Alice', 1)
+ b = User('Bob', 2)
+ gl = GroupList(0, [a, b])
+ gt = GroupTuple(0, (a, b))
+ gd = GroupDict(0, {'first': a, 'second': b})
+ self.assertEqual(astuple(gl), (0, [('Alice', 1), ('Bob', 2)]))
+ self.assertEqual(astuple(gt), (0, (('Alice', 1), ('Bob', 2))))
+ self.assertEqual(astuple(gd), (0, {'first': ('Alice', 1), 'second': ('Bob', 2)}))
+
+ def test_helper_astuple_builtin_object_containers(self):
+ Child = Child_TestCase_test_helper_astuple_builtin_object_containers
+ Parent = Parent_TestCase_test_helper_astuple_builtin_object_containers
+ self.assertEqual(astuple(Parent(Child([1]))), (([1],),))
+ self.assertEqual(astuple(Parent(Child({1: 2}))), (({1: 2},),))
+
+ def test_helper_astuple_factory(self):
+ C = C_TestCase_test_helper_astuple_factory
+ NT = namedtuple('NT', 'x y')
+
+ def nt(lst):
+ return NT(*lst)
+ c = C(1, 2)
+ t = astuple(c, tuple_factory=nt)
+ self.assertEqual(t, NT(1, 2))
+ self.assertIsNot(t, astuple(c, tuple_factory=nt))
+ c.x = 42
+ t = astuple(c, tuple_factory=nt)
+ self.assertEqual(t, NT(42, 2))
+ self.assertIs(type(t), NT)
+
+ def test_helper_astuple_namedtuple(self):
+ T = namedtuple('T', 'a b c')
+ C = C_TestCase_test_helper_astuple_namedtuple
+ c = C('outer', T(1, C('inner', T(11, 12, 13)), 2))
+ t = astuple(c)
+ self.assertEqual(t, ('outer', T(1, ('inner', (11, 12, 13)), 2)))
+ t = astuple(c, tuple_factory=list)
+ self.assertEqual(t, ['outer', T(1, ['inner', T(11, 12, 13)], 2)])
+
+ def test_alternate_classmethod_constructor(self):
+ C = C_TestCase_test_alternate_classmethod_constructor
+ self.assertEqual(C.from_file('filename').x, 20)
+
+ def test_field_metadata_default(self):
+ C = C_TestCase_test_field_metadata_default
+ self.assertFalse(fields(C)[0].metadata)
+ self.assertEqual(len(fields(C)[0].metadata), 0)
+ with self.assertRaises(TypeError):
+ fields(C)[0].metadata['test'] = 3
+
+ def test_dataclasses_pickleable(self):
+ global P, Q, R
+ P = P_TestCase_test_dataclasses_pickleable
+ Q = Q_TestCase_test_dataclasses_pickleable
+ R = R_TestCase_test_dataclasses_pickleable
+ q = Q(1)
+ q.y = 2
+ samples = [P(1), P(1, 2), Q(1), q, R(1), R(1, [2, 3, 4])]
+ for sample in samples:
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(sample=sample, proto=proto):
+ new_sample = pickle.loads(pickle.dumps(sample, proto))
+ self.assertEqual(sample.x, new_sample.x)
+ self.assertEqual(sample.y, new_sample.y)
+ self.assertIsNot(sample, new_sample)
+ new_sample.x = 42
+ another_new_sample = pickle.loads(pickle.dumps(new_sample, proto))
+ self.assertEqual(new_sample.x, another_new_sample.x)
+ self.assertEqual(sample.y, another_new_sample.y)
+
+class TestFieldNoAnnotation(unittest.TestCase):
+ pass
+
+class TestInit(unittest.TestCase):
+
+ def test_overwriting_init(self):
+ C = C_TestInit_test_overwriting_init
+ self.assertEqual(C(3).x, 6)
+ C = C_TestInit_test_overwriting_init_
+ self.assertEqual(C(4).x, 8)
+ C = C_TestInit_test_overwriting_init__
+ self.assertEqual(C(5).x, 10)
+
+class TestRepr(unittest.TestCase):
+
+ def test_overwriting_repr(self):
+ C = C_TestRepr_test_overwriting_repr
+ self.assertEqual(repr(C(0)), 'x')
+ C = C_TestRepr_test_overwriting_repr_
+ self.assertEqual(repr(C(0)), 'x')
+ C = C_TestRepr_test_overwriting_repr__
+ self.assertEqual(repr(C(0)), 'x')
+
+class TestEq(unittest.TestCase):
+
+ def test_no_eq(self):
+ C = C_TestEq_test_no_eq
+ self.assertNotEqual(C(0), C(0))
+ c = C(3)
+ self.assertEqual(c, c)
+ C = C_TestEq_test_no_eq_
+ self.assertEqual(C(3), 10)
+
+ def test_overwriting_eq(self):
+ C = C_TestEq_test_overwriting_eq
+ self.assertEqual(C(1), 3)
+ self.assertNotEqual(C(1), 1)
+ C = C_TestEq_test_overwriting_eq_
+ self.assertEqual(C(1), 4)
+ self.assertNotEqual(C(1), 1)
+ C = C_TestEq_test_overwriting_eq__
+ self.assertEqual(C(1), 5)
+ self.assertNotEqual(C(1), 1)
+
+class TestOrdering(unittest.TestCase):
+ pass
+
+class TestHash(unittest.TestCase):
+
+ def test_unsafe_hash(self):
+ C = C_TestHash_test_unsafe_hash
+ self.assertEqual(hash(C(1, 'foo')), hash((1, 'foo')))
+
+ def test_0_field_hash(self):
+ C = C_TestHash_test_0_field_hash
+ self.assertEqual(hash(C()), hash(()))
+ C = C_TestHash_test_0_field_hash_
+ self.assertEqual(hash(C()), hash(()))
+
+ def test_1_field_hash(self):
+ C = C_TestHash_test_1_field_hash
+ self.assertEqual(hash(C(4)), hash((4,)))
+ self.assertEqual(hash(C(42)), hash((42,)))
+ C = C_TestHash_test_1_field_hash_
+ self.assertEqual(hash(C(4)), hash((4,)))
+ self.assertEqual(hash(C(42)), hash((42,)))
+
+class TestMakeDataclass(unittest.TestCase):
+ pass
+
+class TestReplace(unittest.TestCase):
+
+ def test(self):
+ C = C_TestReplace_test
+ c = C(1, 2)
+ c1 = replace(c, x=3)
+ self.assertEqual(c1.x, 3)
+ self.assertEqual(c1.y, 2)
+
+ def test_invalid_field_name(self):
+ C = C_TestReplace_test_invalid_field_name
+ c = C(1, 2)
+ with self.assertRaises(TypeError):
+ c1 = replace(c, z=3)
+
+ def test_invalid_object(self):
+ C = C_TestReplace_test_invalid_object
+ with self.assertRaises(TypeError):
+ replace(C, x=3)
+ with self.assertRaises(TypeError):
+ replace(0, x=3)
+
+ def test_no_init(self):
+ C = C_TestReplace_test_no_init
+ c = C(1)
+ c.y = 20
+ c1 = replace(c, x=5)
+ self.assertEqual((c1.x, c1.y), (5, 10))
+ with self.assertRaises(ValueError):
+ replace(c, x=2, y=30)
+ with self.assertRaises(ValueError):
+ replace(c, y=30)
+
+ def test_classvar(self):
+ C = C_TestReplace_test_classvar
+ c = C(1)
+ d = C(2)
+ self.assertIs(c.y, d.y)
+ self.assertEqual(c.y, 1000)
+ with self.assertRaises(TypeError):
+ replace(c, y=30)
+ replace(c, x=5)
+
+ def test_initvar_is_specified(self):
+ C = C_TestReplace_test_initvar_is_specified
+ c = C(1, 10)
+ self.assertEqual(c.x, 10)
+ with self.assertRaises(ValueError):
+ replace(c, x=3)
+ c = replace(c, x=3, y=5)
+ self.assertEqual(c.x, 15)
+
+ def test_recursive_repr(self):
+ C = C_TestReplace_test_recursive_repr
+ c = C(None)
+ c.f = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr(f=...)')
+
+ def test_recursive_repr_two_attrs(self):
+ C = C_TestReplace_test_recursive_repr_two_attrs
+ c = C(None, None)
+ c.f = c
+ c.g = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr_two_attrs(f=..., g=...)')
+
+ def test_recursive_repr_indirection(self):
+ C = C_TestReplace_test_recursive_repr_indirection
+ D = D_TestReplace_test_recursive_repr_indirection
+ c = C(None)
+ d = D(None)
+ c.f = d
+ d.f = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr_indirection(f=D_TestReplace_test_recursive_repr_indirection(f=...))')
+
+ def test_recursive_repr_indirection_two(self):
+ C = C_TestReplace_test_recursive_repr_indirection_two
+ D = D_TestReplace_test_recursive_repr_indirection_two
+ E = E_TestReplace_test_recursive_repr_indirection_two
+ c = C(None)
+ d = D(None)
+ e = E(None)
+ c.f = d
+ d.f = e
+ e.f = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr_indirection_two(f=D_TestReplace_test_recursive_repr_indirection_two(f=E_TestReplace_test_recursive_repr_indirection_two(f=...)))')
+
+ def test_recursive_repr_misc_attrs(self):
+ C = C_TestReplace_test_recursive_repr_misc_attrs
+ c = C(None, 1)
+ c.f = c
+ self.assertEqual(repr(c), 'C_TestReplace_test_recursive_repr_misc_attrs(f=..., g=1)')
+
+class TestAbstract(unittest.TestCase):
+ pass
+
+class TestKeywordArgs(unittest.TestCase):
+ pass
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/run/test_grammar.py b/tests/run/test_grammar.py
index c41b75f55..dfa11e087 100644
--- a/tests/run/test_grammar.py
+++ b/tests/run/test_grammar.py
@@ -64,8 +64,7 @@ if cython.compiled:
def use_old_parser():
- # FIXME: currently disabling new PEG parser tests.
- return True
+ return False
import unittest
@@ -798,8 +797,6 @@ class GrammarTests(unittest.TestCase):
self.assertEqual(f.__annotations__, {'return': 'list'})
# Test expressions as decorators (PEP 614):
- # FIXME: implement PEP 614
- """
@False or null
def f(x): pass
@d := null
@@ -812,7 +809,6 @@ class GrammarTests(unittest.TestCase):
def f(x): pass
@[null][0].__call__.__call__
def f(x): pass
- """
# test closures with a variety of opargs
closure = 1
@@ -1706,8 +1702,6 @@ class GrammarTests(unittest.TestCase):
class G: pass
# Test expressions as decorators (PEP 614):
- # FIXME: implement PEP 614
- """
@False or class_decorator
class H: pass
@d := class_decorator
@@ -1720,7 +1714,6 @@ class GrammarTests(unittest.TestCase):
class L: pass
@[class_decorator][0].__call__.__call__
class M: pass
- """
def test_dictcomps(self):
# dictorsetmaker: ( (test ':' test (comp_for |
@@ -1869,68 +1862,53 @@ class GrammarTests(unittest.TestCase):
with manager() as x, manager():
pass
- if not use_old_parser():
- test_cases = [
- """if 1:
- with (
- manager()
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x
- ):
- pass
- """,
- """if 1:
- with (
- manager() as (x, y),
- manager() as z,
- ):
- pass
- """,
- """if 1:
- with (
- manager(),
- manager()
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x,
- manager() as y
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x,
- manager()
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x,
- manager() as y,
- manager() as z,
- ):
- pass
- """,
- """if 1:
- with (
- manager() as x,
- manager() as y,
- manager(),
- ):
- pass
- """,
- ]
- for case in test_cases:
- with self.subTest(case=case):
- compile(case, "<string>", "exec")
+ with (
+ manager()
+ ):
+ pass
+
+ with (
+ manager() as x
+ ):
+ pass
+
+ with (
+ manager() as (x, y),
+ manager() as z,
+ ):
+ pass
+
+ with (
+ manager(),
+ manager()
+ ):
+ pass
+
+ with (
+ manager() as x,
+ manager() as y
+ ):
+ pass
+
+ with (
+ manager() as x,
+ manager()
+ ):
+ pass
+
+ with (
+ manager() as x,
+ manager() as y,
+ manager() as z,
+ ):
+ pass
+
+ with (
+ manager() as x,
+ manager() as y,
+ manager(),
+ ):
+ pass
def test_if_else_expr(self):
diff --git a/tests/run/test_named_expressions.py b/tests/run/test_named_expressions.py
index 28147319b..b3e2eb980 100644
--- a/tests/run/test_named_expressions.py
+++ b/tests/run/test_named_expressions.py
@@ -1,12 +1,10 @@
# mode: run
-# tag: pure38, no-cpp
+# tag: pure3.8, no-cpp
# copied from cpython with minimal modifications (mainly exec->cython_inline, and a few exception strings)
# This is not currently run in C++ because all the cython_inline compilations fail for reasons that are unclear
-# FIXME pure38 seems to be ignored
# cython: language_level=3
-import os
import unittest
import cython
from Cython.Compiler.Main import CompileError
diff --git a/tests/run/test_patma.py b/tests/run/test_patma.py
index 6401e23c4..409d8b3cc 100644
--- a/tests/run/test_patma.py
+++ b/tests/run/test_patma.py
@@ -1,43 +1,21 @@
-### COPIED FROM CPython 3.9
+### COPIED FROM CPython 3.12 alpha (July 2022)
### Original part after ############
# cython: language_level=3
# new code
import cython
-from Cython.Compiler.Main import compile as cython_compile, CompileError
-from Cython.Build.Inline import cython_inline
-import contextlib
-from tempfile import NamedTemporaryFile
-
-@contextlib.contextmanager
-def hidden_stderr():
- try:
- from StringIO import StringIO
- except ImportError:
- from io import StringIO
-
- old_stderr = sys.stderr
- try:
- sys.stderr = StringIO()
- yield
- finally:
- sys.stderr = old_stderr
-
-def _compile(code):
- with NamedTemporaryFile(suffix='.py') as f:
- f.write(code.encode('utf8'))
- f.flush()
-
- with hidden_stderr():
- result = cython_compile(f.name, language_level=3)
- return result
+from Cython.TestUtils import py_parse_code
+
if cython.compiled:
def compile(code, name, what):
assert what == 'exec'
- result = _compile(code)
- if not result.c_file:
- raise SyntaxError('unexpected EOF') # compile is only used for testing errors
+ py_parse_code(code)
+
+
+def disable(func):
+ pass
+
############## SLIGHTLY MODIFIED ORIGINAL CODE
import array
@@ -61,9 +39,47 @@ else:
y: int
# TestCompiler removed - it's very CPython-specific
-# TestTracing also removed - doesn't seem like a core test
+# TestTracing also mainly removed - doesn't seem like a core test
+# except for one test that seems misplaced in CPython (which is below)
+
+class TestTracing(unittest.TestCase):
+ if sys.version_info < (3, 4):
+ class SubTestClass(object):
+ def __enter__(self):
+ return self
+ def __exit__(self, exc_type, exc_value, traceback):
+ return
+ def __call__(self, *args):
+ return self
+ subTest = SubTestClass()
+
+ def test_parser_deeply_nested_patterns(self):
+ # Deeply nested patterns can cause exponential backtracking when parsing.
+ # See CPython gh-93671 for more information.
+ #
+ # DW: Cython note - this doesn't break the parser but may cause a
+ # RecursionError later in the code-generation. I don't believe that's
+ # easily avoidable with the way Cython visitors currently work
+
+ levels = 100
+
+ patterns = [
+ "A" + "(" * levels + ")" * levels,
+ "{1:" * levels + "1" + "}" * levels,
+ "[" * levels + "1" + "]" * levels,
+ ]
-# FIXME - return all the "return"s added to cause code to be dropped
+ for pattern in patterns:
+ with self.subTest(pattern):
+ code = inspect.cleandoc("""
+ match None:
+ case {}:
+ pass
+ """.format(pattern))
+ compile(code, "<string>", "exec")
+
+
+# FIXME - remove all the "return"s added to cause code to be dropped
############## ORIGINAL PART FROM CPYTHON
@@ -71,7 +87,7 @@ class TestInheritance(unittest.TestCase):
@staticmethod
def check_sequence_then_mapping(x):
- return
+ return # disabled
match x:
case [*_]:
return "seq"
@@ -80,7 +96,7 @@ class TestInheritance(unittest.TestCase):
@staticmethod
def check_mapping_then_sequence(x):
- return
+ return # disabled
match x:
case {}:
return "map"
@@ -88,7 +104,7 @@ class TestInheritance(unittest.TestCase):
return "seq"
def test_multiple_inheritance_mapping(self):
- return
+ return # disabled
class C:
pass
class M1(collections.UserDict, collections.abc.Sequence):
@@ -109,7 +125,7 @@ class TestInheritance(unittest.TestCase):
self.assertEqual(self.check_mapping_then_sequence(M4()), "map")
def test_multiple_inheritance_sequence(self):
- return
+ return # disabled
class C:
pass
class S1(collections.UserList, collections.abc.Mapping):
@@ -130,7 +146,7 @@ class TestInheritance(unittest.TestCase):
self.assertEqual(self.check_mapping_then_sequence(S4()), "seq")
def test_late_registration_mapping(self):
- return
+ return # disabled
class Parent:
pass
class ChildPre(Parent):
@@ -154,7 +170,7 @@ class TestInheritance(unittest.TestCase):
self.assertEqual(self.check_mapping_then_sequence(GrandchildPost()), "map")
def test_late_registration_sequence(self):
- return
+ return # disabled
class Parent:
pass
class ChildPre(Parent):
@@ -187,7 +203,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(x, True)
def test_patma_001(self):
- return
+ return # disabled
match 0:
case 0 if False:
x = False
@@ -249,7 +265,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(A.y, 1)
def test_patma_009(self):
- return
+ return # disabled
class A:
B = 0
match 0:
@@ -265,14 +281,14 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 2)
def test_patma_010(self):
- return
+ return # disabled
match ():
case []:
x = 0
self.assertEqual(x, 0)
def test_patma_011(self):
- return
+ return # disabled
match (0, 1, 2):
case [*x]:
y = 0
@@ -280,7 +296,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_012(self):
- return
+ return # disabled
match (0, 1, 2):
case [0, *x]:
y = 0
@@ -288,7 +304,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_013(self):
- return
+ return # disabled
match (0, 1, 2):
case [0, 1, *x,]:
y = 0
@@ -296,7 +312,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_014(self):
- return
+ return # disabled
match (0, 1, 2):
case [0, 1, 2, *x]:
y = 0
@@ -304,7 +320,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_015(self):
- return
+ return # disabled
match (0, 1, 2):
case [*x, 2,]:
y = 0
@@ -312,7 +328,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_016(self):
- return
+ return # disabled
match (0, 1, 2):
case [*x, 1, 2]:
y = 0
@@ -320,7 +336,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_017(self):
- return
+ return # disabled
match (0, 1, 2):
case [*x, 0, 1, 2,]:
y = 0
@@ -328,7 +344,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_018(self):
- return
+ return # disabled
match (0, 1, 2):
case [0, *x, 2]:
y = 0
@@ -336,7 +352,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_019(self):
- return
+ return # disabled
match (0, 1, 2):
case [0, 1, *x, 2,]:
y = 0
@@ -344,7 +360,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_020(self):
- return
+ return # disabled
match (0, 1, 2):
case [0, *x, 1, 2]:
y = 0
@@ -352,7 +368,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_021(self):
- return
+ return # disabled
match (0, 1, 2):
case [*x,]:
y = 0
@@ -360,7 +376,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_022(self):
- return
+ return # disabled
x = {}
match x:
case {}:
@@ -369,7 +385,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_023(self):
- return
+ return # disabled
x = {0: 0}
match x:
case {}:
@@ -378,7 +394,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_024(self):
- return
+ return # disabled
x = {}
y = None
match x:
@@ -388,7 +404,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_025(self):
- return
+ return # disabled
x = {0: 0}
match x:
case {0: (0 | 1 | 2 as z)}:
@@ -398,7 +414,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_026(self):
- return
+ return # disabled
x = {0: 1}
match x:
case {0: (0 | 1 | 2 as z)}:
@@ -408,7 +424,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 1)
def test_patma_027(self):
- return
+ return # disabled
x = {0: 2}
match x:
case {0: (0 | 1 | 2 as z)}:
@@ -418,7 +434,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 2)
def test_patma_028(self):
- return
+ return # disabled
x = {0: 3}
y = None
match x:
@@ -428,7 +444,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_029(self):
- return
+ return # disabled
x = {}
y = None
match x:
@@ -442,7 +458,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_030(self):
- return
+ return # disabled
x = {False: (True, 2.0, {})}
match x:
case {0: [1, 2, {}]}:
@@ -455,7 +471,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_031(self):
- return
+ return # disabled
x = {False: (True, 2.0, {}), 1: [[]], 2: 0}
match x:
case {0: [1, 2, {}]}:
@@ -468,7 +484,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_032(self):
- return
+ return # disabled
x = {False: (True, 2.0, {}), 1: [[]], 2: 0}
match x:
case {0: [1, 2]}:
@@ -481,7 +497,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_033(self):
- return
+ return # disabled
x = []
match x:
case {0: [1, 2, {}]}:
@@ -494,7 +510,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 2)
def test_patma_034(self):
- return
+ return # disabled
x = {0: 0}
match x:
case {0: [1, 2, {}]}:
@@ -507,7 +523,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_035(self):
- return
+ return # disabled
x = {0: 0}
match x:
case {0: [1, 2, {}]}:
@@ -553,7 +569,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_040(self):
- return
+ return # disabled
x = 0
match x:
case (0 as z) | (1 as z) | (2 as z) if z == x % 2:
@@ -563,7 +579,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_041(self):
- return
+ return # disabled
x = 1
match x:
case (0 as z) | (1 as z) | (2 as z) if z == x % 2:
@@ -573,7 +589,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 1)
def test_patma_042(self):
- return
+ return # disabled
x = 2
y = None
match x:
@@ -584,7 +600,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 2)
def test_patma_043(self):
- return
+ return # disabled
x = 3
y = None
match x:
@@ -594,7 +610,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_044(self):
- return
+ return # disabled
x = ()
match x:
case []:
@@ -603,7 +619,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_045(self):
- return
+ return # disabled
x = ()
match x:
case ():
@@ -612,7 +628,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_046(self):
- return
+ return # disabled
x = (0,)
match x:
case [0]:
@@ -621,7 +637,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_047(self):
- return
+ return # disabled
x = ((),)
match x:
case [[]]:
@@ -630,7 +646,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_048(self):
- return
+ return # disabled
x = [0, 1]
match x:
case [0, 1] | [1, 0]:
@@ -639,7 +655,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_049(self):
- return
+ return # disabled
x = [1, 0]
match x:
case [0, 1] | [1, 0]:
@@ -648,7 +664,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_050(self):
- return
+ return # disabled
x = [0, 0]
y = None
match x:
@@ -658,7 +674,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_051(self):
- return
+ return # disabled
w = None
x = [1, 0]
match x:
@@ -672,7 +688,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_052(self):
- return
+ return # disabled
x = [1, 0]
match x:
case [0]:
@@ -685,7 +701,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 2)
def test_patma_053(self):
- return
+ return # disabled
x = {0}
y = None
match x:
@@ -695,7 +711,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_054(self):
- return
+ return # disabled
x = set()
y = None
match x:
@@ -705,7 +721,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_055(self):
- return
+ return # disabled
x = iter([1, 2, 3])
y = None
match x:
@@ -715,7 +731,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_056(self):
- return
+ return # disabled
x = {}
y = None
match x:
@@ -725,7 +741,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_057(self):
- return
+ return # disabled
x = {0: False, 1: True}
y = None
match x:
@@ -831,7 +847,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_068(self):
- return
+ return # disabled
x = 0
match x:
case 0 if False:
@@ -842,7 +858,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_069(self):
- return
+ return # disabled
x = 0
y = None
match x:
@@ -854,7 +870,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_070(self):
- return
+ return # disabled
x = 0
match x:
case 0 if True:
@@ -865,7 +881,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_071(self):
- return
+ return # disabled
x = 0
match x:
case 0 if 1:
@@ -876,7 +892,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_072(self):
- return
+ return # disabled
x = 0
match x:
case 0 if True:
@@ -888,7 +904,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 2)
def test_patma_073(self):
- return
+ return # disabled
x = 0
match x:
case 0 if 0:
@@ -900,7 +916,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 2)
def test_patma_074(self):
- return
+ return # disabled
x = 0
y = None
match x:
@@ -912,7 +928,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_075(self):
- return
+ return # disabled
x = "x"
match x:
case ["x"]:
@@ -923,7 +939,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_076(self):
- return
+ return # disabled
x = b"x"
match x:
case [b"x"]:
@@ -938,7 +954,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 4)
def test_patma_077(self):
- return
+ return # disabled
x = bytearray(b"x")
y = None
match x:
@@ -950,7 +966,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_078(self):
- return
+ return # disabled
x = ""
match x:
case []:
@@ -963,7 +979,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 2)
def test_patma_079(self):
- return
+ return # disabled
x = "xxx"
match x:
case ["x", "x", "x"]:
@@ -976,7 +992,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 2)
def test_patma_080(self):
- return
+ return # disabled
x = b"xxx"
match x:
case [120, 120, 120]:
@@ -989,7 +1005,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 2)
def test_patma_081(self):
- return
+ return # disabled
x = 0
match x:
case 0 if not (x := 1):
@@ -1001,7 +1017,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_082(self):
- return
+ return # disabled
x = 0
match x:
case (1 as z) if not (x := 1):
@@ -1030,7 +1046,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_085(self):
- return
+ return # disabled
x = 0
y = None
match x:
@@ -1052,7 +1068,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_087(self):
- return
+ return # disabled
x = 0
match x:
case (0 | 1) | 2:
@@ -1061,7 +1077,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_088(self):
- return
+ return # disabled
x = 1
match x:
case (0 | 1) | 2:
@@ -1070,7 +1086,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_089(self):
- return
+ return # disabled
x = 2
match x:
case (0 | 1) | 2:
@@ -1079,7 +1095,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_090(self):
- return
+ return # disabled
x = 3
y = None
match x:
@@ -1089,7 +1105,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_091(self):
- return
+ return # disabled
x = 0
match x:
case 0 | (1 | 2):
@@ -1098,7 +1114,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_092(self):
- return
+ return # disabled
x = 1
match x:
case 0 | (1 | 2):
@@ -1107,7 +1123,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_093(self):
- return
+ return # disabled
x = 2
match x:
case 0 | (1 | 2):
@@ -1116,7 +1132,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_094(self):
- return
+ return # disabled
x = 3
y = None
match x:
@@ -1126,7 +1142,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_095(self):
- return
+ return # disabled
x = 0
match x:
case -0:
@@ -1342,7 +1358,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(x, 0)
def test_patma_118(self):
- return
+ return # disabled
x = []
match x:
case [*_, _]:
@@ -1353,7 +1369,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_119(self):
- return
+ return # disabled
x = collections.defaultdict(int)
match x:
case {0: 0}:
@@ -1364,7 +1380,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_120(self):
- return
+ return # disabled
x = collections.defaultdict(int)
match x:
case {0: 0}:
@@ -1376,14 +1392,14 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, {})
def test_patma_121(self):
- return
+ return # disabled
match ():
case ():
x = 0
self.assertEqual(x, 0)
def test_patma_122(self):
- return
+ return # disabled
match (0, 1, 2):
case (*x,):
y = 0
@@ -1391,7 +1407,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_123(self):
- return
+ return # disabled
match (0, 1, 2):
case 0, *x:
y = 0
@@ -1399,7 +1415,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_124(self):
- return
+ return # disabled
match (0, 1, 2):
case (0, 1, *x,):
y = 0
@@ -1407,7 +1423,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_125(self):
- return
+ return # disabled
match (0, 1, 2):
case 0, 1, 2, *x:
y = 0
@@ -1415,7 +1431,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_126(self):
- return
+ return # disabled
match (0, 1, 2):
case *x, 2,:
y = 0
@@ -1423,7 +1439,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_127(self):
- return
+ return # disabled
match (0, 1, 2):
case (*x, 1, 2):
y = 0
@@ -1431,7 +1447,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_128(self):
- return
+ return # disabled
match (0, 1, 2):
case *x, 0, 1, 2,:
y = 0
@@ -1439,7 +1455,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_129(self):
- return
+ return # disabled
match (0, 1, 2):
case (0, *x, 2):
y = 0
@@ -1447,7 +1463,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_130(self):
- return
+ return # disabled
match (0, 1, 2):
case 0, 1, *x, 2,:
y = 0
@@ -1455,7 +1471,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_131(self):
- return
+ return # disabled
match (0, 1, 2):
case (0, *x, 1, 2):
y = 0
@@ -1463,7 +1479,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_132(self):
- return
+ return # disabled
match (0, 1, 2):
case *x,:
y = 0
@@ -1471,7 +1487,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_133(self):
- return
+ return # disabled
x = collections.defaultdict(int, {0: 1})
match x:
case {1: 0}:
@@ -1484,7 +1500,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 2)
def test_patma_134(self):
- return
+ return # disabled
x = collections.defaultdict(int, {0: 1})
match x:
case {1: 0}:
@@ -1498,7 +1514,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, {0: 1})
def test_patma_135(self):
- return
+ return # disabled
x = collections.defaultdict(int, {0: 1})
match x:
case {1: 0}:
@@ -1512,7 +1528,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, {})
def test_patma_136(self):
- return
+ return # disabled
x = {0: 1}
match x:
case {1: 0}:
@@ -1525,7 +1541,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_137(self):
- return
+ return # disabled
x = {0: 1}
match x:
case {1: 0}:
@@ -1539,7 +1555,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, {0: 1})
def test_patma_138(self):
- return
+ return # disabled
x = {0: 1}
match x:
case {1: 0}:
@@ -1553,7 +1569,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, {})
def test_patma_139(self):
- return
+ return # disabled
x = False
match x:
case bool(z):
@@ -1563,7 +1579,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_140(self):
- return
+ return # disabled
x = True
match x:
case bool(z):
@@ -1573,7 +1589,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_141(self):
- return
+ return # disabled
x = bytearray()
match x:
case bytearray(z):
@@ -1583,7 +1599,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_142(self):
- return
+ return # disabled
x = b""
match x:
case bytes(z):
@@ -1593,7 +1609,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_143(self):
- return
+ return # disabled
x = {}
match x:
case dict(z):
@@ -1603,7 +1619,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_144(self):
- return
+ return # disabled
x = 0.0
match x:
case float(z):
@@ -1613,7 +1629,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_145(self):
- return
+ return # disabled
x = frozenset()
match x:
case frozenset(z):
@@ -1623,7 +1639,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_146(self):
- return
+ return # disabled
x = 0
match x:
case int(z):
@@ -1633,7 +1649,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_147(self):
- return
+ return # disabled
x = []
match x:
case list(z):
@@ -1643,7 +1659,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_148(self):
- return
+ return # disabled
x = set()
match x:
case set(z):
@@ -1653,7 +1669,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_149(self):
- return
+ return # disabled
x = ""
match x:
case str(z):
@@ -1663,7 +1679,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_150(self):
- return
+ return # disabled
x = ()
match x:
case tuple(z):
@@ -1673,7 +1689,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_151(self):
- return
+ return # disabled
x = 0
match x,:
case y,:
@@ -1683,7 +1699,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, 0)
def test_patma_152(self):
- return
+ return # disabled
w = 0
x = 0
match w, x:
@@ -1696,7 +1712,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(v, 0)
def test_patma_153(self):
- return
+ return # disabled
x = 0
match w := x,:
case y as v,:
@@ -1708,7 +1724,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(v, y)
def test_patma_154(self):
- return
+ return # disabled
x = 0
y = None
match x:
@@ -1718,7 +1734,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_155(self):
- return
+ return # disabled
x = 0
y = None
match x:
@@ -1737,7 +1753,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_157(self):
- return
+ return # disabled
x = 0
y = None
match x:
@@ -1757,7 +1773,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_159(self):
- return
+ return # disabled
x = 0
match x:
case 0 if not x:
@@ -1768,7 +1784,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_160(self):
- return
+ return # disabled
x = 0
z = None
match x:
@@ -1791,7 +1807,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_162(self):
- return
+ return # disabled
x = 0
match x:
case 1 if x:
@@ -1802,7 +1818,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_163(self):
- return
+ return # disabled
x = 0
y = None
match x:
@@ -1825,7 +1841,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_165(self):
- return
+ return # disabled
x = 0
match x:
case 1 if x:
@@ -1836,7 +1852,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 1)
def test_patma_166(self):
- return
+ return # disabled
x = 0
match x:
case z if not z:
@@ -1848,7 +1864,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_167(self):
- return
+ return # disabled
x = 0
match x:
case z if not z:
@@ -1860,7 +1876,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_168(self):
- return
+ return # disabled
x = 0
match x:
case z if not x:
@@ -1872,7 +1888,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_169(self):
- return
+ return # disabled
x = 0
match x:
case z if not z:
@@ -1884,7 +1900,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, x)
def test_patma_170(self):
- return
+ return # disabled
x = 0
match x:
case _ if not x:
@@ -1895,7 +1911,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_171(self):
- return
+ return # disabled
x = 0
y = None
match x:
@@ -1907,7 +1923,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(y, None)
def test_patma_172(self):
- return
+ return # disabled
x = 0
z = None
match x:
@@ -1920,7 +1936,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(z, None)
def test_patma_173(self):
- return
+ return # disabled
x = 0
match x:
case _ if not x:
@@ -1973,7 +1989,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(http_error(401 | 403 | 404), None) # 407
def test_patma_176(self):
- return
+ return # disabled
def whereis(point):
match point:
case (0, 0):
@@ -1993,7 +2009,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(whereis(42), "Not a point")
def test_patma_177(self):
- return
+ return # disabled
def whereis(point):
match point:
case Point(0, 0):
@@ -2017,7 +2033,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(whereis(42), "Not a point")
def test_patma_178(self):
- return
+ return # disabled
def whereis(point):
match point:
case Point(1, var):
@@ -2026,7 +2042,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(whereis(Point(0, 0)), None)
def test_patma_179(self):
- return
+ return # disabled
def whereis(point):
match point:
case Point(1, y=var):
@@ -2035,7 +2051,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(whereis(Point(0, 0)), None)
def test_patma_180(self):
- return
+ return # disabled
def whereis(point):
match point:
case Point(x=1, y=var):
@@ -2044,7 +2060,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(whereis(Point(0, 0)), None)
def test_patma_181(self):
- return
+ return # disabled
def whereis(point):
match point:
case Point(y=var, x=1):
@@ -2053,7 +2069,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(whereis(Point(0, 0)), None)
def test_patma_182(self):
- return
+ return # disabled
def whereis(points):
match points:
case []:
@@ -2076,7 +2092,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(whereis([Point(0, 1), Point(0, 1), Point(0, 1)]), "Something else")
def test_patma_183(self):
- return
+ return # disabled
def whereis(point):
match point:
case Point(x, y) if x == y:
@@ -2091,7 +2107,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(whereis(Point("X", "x")), "Not on the diagonal")
def test_patma_184(self):
- return
+ return # disabled
class Seq(collections.abc.Sequence):
__getitem__ = None
def __len__(self):
@@ -2102,7 +2118,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_185(self):
- return
+ return # disabled
class Seq(collections.abc.Sequence):
__getitem__ = None
def __len__(self):
@@ -2113,7 +2129,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_186(self):
- return
+ return # disabled
class Seq(collections.abc.Sequence):
def __getitem__(self, i):
return i
@@ -2127,7 +2143,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_187(self):
- return
+ return # disabled
w = range(10)
match w:
case [x, y, *rest]:
@@ -2139,7 +2155,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(rest, list(range(2, 10)))
def test_patma_188(self):
- return
+ return # disabled
w = range(100)
match w:
case (x, y, *rest):
@@ -2151,7 +2167,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(rest, list(range(2, 100)))
def test_patma_189(self):
- return
+ return # disabled
w = range(1000)
match w:
case x, y, *rest:
@@ -2163,7 +2179,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(rest, list(range(2, 1000)))
def test_patma_190(self):
- return
+ return # disabled
w = range(1 << 10)
match w:
case [x, y, *_]:
@@ -2174,7 +2190,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_191(self):
- return
+ return # disabled
w = range(1 << 20)
match w:
case (x, y, *_):
@@ -2185,7 +2201,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_192(self):
- return
+ return # disabled
w = range(1 << 30)
match w:
case x, y, *_:
@@ -2196,7 +2212,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_193(self):
- return
+ return # disabled
x = {"bandwidth": 0, "latency": 1}
match x:
case {"bandwidth": b, "latency": l}:
@@ -2207,7 +2223,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_194(self):
- return
+ return # disabled
x = {"bandwidth": 0, "latency": 1, "key": "value"}
match x:
case {"latency": l, "bandwidth": b}:
@@ -2218,7 +2234,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_195(self):
- return
+ return # disabled
x = {"bandwidth": 0, "latency": 1, "key": "value"}
match x:
case {"bandwidth": b, "latency": l, **rest}:
@@ -2230,7 +2246,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_196(self):
- return
+ return # disabled
x = {"bandwidth": 0, "latency": 1}
match x:
case {"latency": l, "bandwidth": b, **rest}:
@@ -2242,7 +2258,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_197(self):
- return
+ return # disabled
w = [Point(-1, 0), Point(1, 2)]
match w:
case (Point(x1, y1), Point(x2, y2) as p2):
@@ -2308,7 +2324,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(f(3.0), None)
def test_patma_200(self):
- return
+ return # disabled
class Class:
__match_args__ = ("a", "b")
c = Class()
@@ -2322,7 +2338,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_201(self):
- return
+ return # disabled
class Class:
__match_args__ = ("a", "b")
c = Class()
@@ -2336,7 +2352,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_202(self):
- return
+ return # disabled
class Parent:
__match_args__ = "a", "b"
class Child(Parent):
@@ -2352,7 +2368,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_203(self):
- return
+ return # disabled
class Parent:
__match_args__ = ("a", "b")
class Child(Parent):
@@ -2407,7 +2423,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(f("1"), None)
def test_patma_207(self):
- return
+ return # disabled
def f(w):
match w:
case [1, 2] | [3, 4]:
@@ -2444,7 +2460,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(f((1, 2)), {})
def test_patma_210(self):
- return
+ return # disabled
def f(w):
match w:
case (x, y, z):
@@ -2462,7 +2478,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(f(bytearray(b"abc")), None)
def test_patma_211(self):
- return
+ return # disabled
def f(w):
match w:
case {"x": x, "y": "y", "z": z}:
@@ -2475,7 +2491,7 @@ class TestPatma(unittest.TestCase):
self.assertIs(f(({"x": "x", "y": "y"})), None)
def test_patma_212(self):
- return
+ return # disabled
def f(w):
match w:
case Point(int(xx), y="hello"):
@@ -2485,7 +2501,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(f(Point(42, "hello")), {"xx": 42})
def test_patma_213(self):
- return
+ return # disabled
def f(w):
match w:
case (p, q) as x:
@@ -2526,7 +2542,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(set(f()), {"abc"})
def test_patma_218(self):
- return
+ return # disabled
def f():
match ..., ...:
case a, b:
@@ -2534,7 +2550,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(set(f()), {"a", "b"})
def test_patma_219(self):
- return
+ return # disabled
def f():
match {"k": ..., "l": ...}:
case {"k": a, "l": b}:
@@ -2542,7 +2558,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(set(f()), {"a", "b"})
def test_patma_220(self):
- return
+ return # disabled
def f():
match Point(..., ...):
case Point(x, y=y):
@@ -2629,14 +2645,14 @@ class TestPatma(unittest.TestCase):
self.assertIs(f(3), None)
def test_patma_228(self):
- return
+ return # disabled
match():
case():
x = 0
self.assertEqual(x, 0)
def test_patma_229(self):
- return
+ return # disabled
x = 0
match(x):
case(x):
@@ -2721,7 +2737,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_238(self):
- return
+ return # disabled
x = ((0, 1), (2, 3))
match x:
case ((a as b, c as d) as e) as w, ((f as g, h) as i) as z:
@@ -2741,7 +2757,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, (2, 3))
def test_patma_239(self):
- return
+ return # disabled
x = collections.UserDict({0: 1, 2: 3})
match x:
case {2: 3}:
@@ -2750,7 +2766,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 0)
def test_patma_240(self):
- return
+ return # disabled
x = collections.UserDict({0: 1, 2: 3})
match x:
case {2: 3, **z}:
@@ -2760,7 +2776,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, {0: 1})
def test_patma_241(self):
- return
+ return # disabled
x = [[{0: 0}]]
match x:
case list([({-0-0j: int(real=0+0j, imag=0-0j) | (1) as z},)]):
@@ -2770,7 +2786,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_242(self):
- return
+ return # disabled
x = range(3)
match x:
case [y, *_, z]:
@@ -2781,7 +2797,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 2)
def test_patma_243(self):
- return
+ return # disabled
x = range(3)
match x:
case [_, *_, y]:
@@ -2791,7 +2807,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_244(self):
- return
+ return # disabled
x = range(3)
match x:
case [*_, y]:
@@ -2801,7 +2817,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_245(self):
- return
+ return # disabled
x = {"y": 1}
match x:
case {"y": (0 as y) | (1 as y)}:
@@ -2811,7 +2827,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(z, 0)
def test_patma_246(self):
- return
+ return # disabled
def f(x):
match x:
case ((a, b, c, d, e, f, g, h, i, 9) |
@@ -2836,7 +2852,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(f(range(10, 20)), alts[4])
def test_patma_247(self):
- return
+ return # disabled
def f(x):
match x:
case [y, (a, b, c, d, e, f, g, h, i, 9) |
@@ -2861,7 +2877,7 @@ class TestPatma(unittest.TestCase):
self.assertEqual(f((False, range(10, 20), True)), alts[4])
def test_patma_248(self):
- return
+ return # disabled
class C(dict):
@staticmethod
def get(key, default=None):
@@ -2874,6 +2890,21 @@ class TestPatma(unittest.TestCase):
self.assertEqual(y, 'bar')
+ def test_patma_249(self):
+ return # disabled
+ class C:
+ __attr = "eggs" # mangled to _C__attr
+ _Outer__attr = "bacon"
+ class Outer:
+ def f(self, x):
+ match x:
+ # looks up __attr, not _C__attr or _Outer__attr
+ case C(__attr=y):
+ return y
+ c = C()
+ setattr(c, "__attr", "spam") # setattr is needed because we're in a class scope
+ self.assertEqual(Outer().f(c), "spam")
+
class TestSyntaxErrors(unittest.TestCase):
@@ -2896,6 +2927,7 @@ class TestSyntaxErrors(unittest.TestCase):
""")
+ @disable # validation will be added when class patterns are added
def test_attribute_name_repeated_in_class_pattern(self):
self.assert_syntax_error("""
match ...:
@@ -2994,6 +3026,7 @@ class TestSyntaxErrors(unittest.TestCase):
pass
""")
+ @disable # will be implemented as part of sequence patterns
def test_multiple_starred_names_in_sequence_pattern_0(self):
self.assert_syntax_error("""
match ...:
@@ -3001,6 +3034,7 @@ class TestSyntaxErrors(unittest.TestCase):
pass
""")
+ @disable # will be implemented as part of sequence patterns
def test_multiple_starred_names_in_sequence_pattern_1(self):
self.assert_syntax_error("""
match ...:
@@ -3135,6 +3169,7 @@ class TestSyntaxErrors(unittest.TestCase):
pass
""")
+ @disable # validation will be added when class patterns are added
def test_mapping_pattern_duplicate_key(self):
self.assert_syntax_error("""
match ...:
@@ -3142,6 +3177,7 @@ class TestSyntaxErrors(unittest.TestCase):
pass
""")
+ @disable # validation will be added when class patterns are added
def test_mapping_pattern_duplicate_key_edge_case0(self):
self.assert_syntax_error("""
match ...:
@@ -3149,6 +3185,7 @@ class TestSyntaxErrors(unittest.TestCase):
pass
""")
+ @disable # validation will be added when class patterns are added
def test_mapping_pattern_duplicate_key_edge_case1(self):
self.assert_syntax_error("""
match ...:
@@ -3156,6 +3193,7 @@ class TestSyntaxErrors(unittest.TestCase):
pass
""")
+ @disable # validation will be added when class patterns are added
def test_mapping_pattern_duplicate_key_edge_case2(self):
self.assert_syntax_error("""
match ...:
@@ -3163,6 +3201,7 @@ class TestSyntaxErrors(unittest.TestCase):
pass
""")
+ @disable # validation will be added when class patterns are added
def test_mapping_pattern_duplicate_key_edge_case3(self):
self.assert_syntax_error("""
match ...:
@@ -3173,7 +3212,7 @@ class TestSyntaxErrors(unittest.TestCase):
class TestTypeErrors(unittest.TestCase):
def test_accepts_positional_subpatterns_0(self):
- return
+ return # disabled
class Class:
__match_args__ = ()
x = Class()
@@ -3186,7 +3225,7 @@ class TestTypeErrors(unittest.TestCase):
self.assertIs(z, None)
def test_accepts_positional_subpatterns_1(self):
- return
+ return # disabled
x = range(10)
y = None
with self.assertRaises(TypeError):
@@ -3197,7 +3236,7 @@ class TestTypeErrors(unittest.TestCase):
self.assertIs(y, None)
def test_got_multiple_subpatterns_for_attribute_0(self):
- return
+ return # disabled
class Class:
__match_args__ = ("a", "a")
a = None
@@ -3212,7 +3251,7 @@ class TestTypeErrors(unittest.TestCase):
self.assertIs(z, None)
def test_got_multiple_subpatterns_for_attribute_1(self):
- return
+ return # disabled
class Class:
__match_args__ = ("a",)
a = None
@@ -3227,7 +3266,7 @@ class TestTypeErrors(unittest.TestCase):
self.assertIs(z, None)
def test_match_args_elements_must_be_strings(self):
- return
+ return # disabled
class Class:
__match_args__ = (None,)
x = Class()
@@ -3240,7 +3279,7 @@ class TestTypeErrors(unittest.TestCase):
self.assertIs(z, None)
def test_match_args_must_be_a_tuple_0(self):
- return
+ return # disabled
class Class:
__match_args__ = None
x = Class()
@@ -3253,7 +3292,7 @@ class TestTypeErrors(unittest.TestCase):
self.assertIs(z, None)
def test_match_args_must_be_a_tuple_1(self):
- return
+ return # disabled
class Class:
__match_args__ = "XYZ"
x = Class()
@@ -3266,7 +3305,7 @@ class TestTypeErrors(unittest.TestCase):
self.assertIs(z, None)
def test_match_args_must_be_a_tuple_2(self):
- return
+ return # disabled
class Class:
__match_args__ = ["spam", "eggs"]
spam = 0
@@ -3285,7 +3324,7 @@ class TestTypeErrors(unittest.TestCase):
class TestValueErrors(unittest.TestCase):
def test_mapping_pattern_checks_duplicate_key_1(self):
- return
+ return # disabled
class Keys:
KEY = "a"
x = {"a": 0, "b": 1}
diff --git a/tests/run/trace_nogil.pyx b/tests/run/trace_nogil.pyx
index dee443e5b..175935ced 100644
--- a/tests/run/trace_nogil.pyx
+++ b/tests/run/trace_nogil.pyx
@@ -1,6 +1,6 @@
# cython: linetrace=True
-cdef void foo(int err) nogil except *:
+cdef void foo(int err) except * nogil:
with gil:
raise ValueError(err)
diff --git a/tests/run/tuple_constants.pyx b/tests/run/tuple_constants.pyx
index f60d5d818..fa5794cf7 100644
--- a/tests/run/tuple_constants.pyx
+++ b/tests/run/tuple_constants.pyx
@@ -36,7 +36,12 @@ def test_deduplicated_args():
# are generated often with the same argument names. Therefore it's worth ensuring that
# they are correctly deduplicated
import sys
- if not hasattr(sys, "pypy_version_info"): # test doesn't work on PyPy (which is probably fair enough)
+ check_identity_of_co_varnames = (
+ not hasattr(sys, "pypy_version_info") and # test doesn't work on PyPy (which is probably fair enough)
+ sys.version_info < (3, 11) # on Python 3.11 co_varnames returns a new, dynamically-calculated tuple
+ # each time it is run
+ )
+ if check_identity_of_co_varnames:
assert func1.__code__.co_varnames is func2.__code__.co_varnames
@cython.test_assert_path_exists("//TupleNode",
diff --git a/tests/run/type_inference.pyx b/tests/run/type_inference.pyx
index df77f6bd9..9a72022b2 100644
--- a/tests/run/type_inference.pyx
+++ b/tests/run/type_inference.pyx
@@ -242,7 +242,7 @@ def c_functions():
>>> c_functions()
"""
f = cfunc
- assert typeof(f) == 'int (*)(int)', typeof(f)
+ assert typeof(f) == 'int (*)(int) except? -1', typeof(f)
assert 2 == f(1)
def builtin_functions():
@@ -537,7 +537,7 @@ def safe_c_functions():
>>> safe_c_functions()
"""
f = cfunc
- assert typeof(f) == 'int (*)(int)', typeof(f)
+ assert typeof(f) == 'int (*)(int) except? -1', typeof(f)
assert 2 == f(1)
@infer_types(None)
diff --git a/tests/run/with_gil.pyx b/tests/run/with_gil.pyx
index 6fee3f192..2eed27eac 100644
--- a/tests/run/with_gil.pyx
+++ b/tests/run/with_gil.pyx
@@ -259,7 +259,7 @@ cpdef test_cpdef():
# Now test some cdef functions with different return types
-cdef void void_nogil_ignore_exception() nogil:
+cdef void void_nogil_ignore_exception() noexcept nogil:
with gil:
raise ExceptionWithMsg("This is swallowed")
@@ -267,7 +267,7 @@ cdef void void_nogil_ignore_exception() nogil:
with gil:
print "unreachable"
-cdef void void_nogil_nested_gil() nogil:
+cdef void void_nogil_nested_gil() noexcept nogil:
with gil:
with nogil:
with gil:
@@ -304,7 +304,7 @@ def test_nogil_void_funcs_with_nogil():
void_nogil_nested_gil()
-cdef PyObject *nogil_propagate_exception() nogil except NULL:
+cdef PyObject *nogil_propagate_exception() except NULL nogil:
with nogil:
with gil:
raise Exception("This exception propagates!")
diff --git a/tests/run/with_gil_automatic.pyx b/tests/run/with_gil_automatic.pyx
index 425dbbce7..954ed6d47 100644
--- a/tests/run/with_gil_automatic.pyx
+++ b/tests/run/with_gil_automatic.pyx
@@ -28,7 +28,7 @@ def test_print_in_nogil_section(x):
@cython.test_fail_if_path_exists(
"//GILStatNode//GILStatNode",
)
-cpdef int test_print_in_nogil_func(x) nogil except -1:
+cpdef int test_print_in_nogil_func(x) except -1 nogil:
"""
>>> _ = test_print_in_nogil_func(123)
--123--
@@ -61,7 +61,7 @@ def test_raise_in_nogil_section(x):
@cython.test_fail_if_path_exists(
"//GILStatNode//GILStatNode",
)
-cpdef int test_raise_in_nogil_func(x) nogil except -1:
+cpdef int test_raise_in_nogil_func(x) except -1 nogil:
"""
>>> test_raise_in_nogil_func(123)
Traceback (most recent call last):
@@ -128,7 +128,7 @@ def assert_in_nogil_section_string(int x):
"//AssertStatNode//GILStatNode",
"//AssertStatNode//GILStatNode//RaiseStatNode",
)
-cpdef int assert_in_nogil_func(int x) nogil except -1:
+cpdef int assert_in_nogil_func(int x) except -1 nogil:
"""
>>> _ = assert_in_nogil_func(123)
>>> assert_in_nogil_func(0)
diff --git a/tests/run/withnogil.pyx b/tests/run/withnogil.pyx
index 55b7896a7..a64779dfe 100644
--- a/tests/run/withnogil.pyx
+++ b/tests/run/withnogil.pyx
@@ -19,5 +19,5 @@ def g():
h()
return 1
-cdef int h() nogil except -1:
+cdef int h() except -1 nogil:
pass
diff --git a/tests/testsupport/cythonarrayutil.pxi b/tests/testsupport/cythonarrayutil.pxi
index 50d764acd..683dc4b71 100644
--- a/tests/testsupport/cythonarrayutil.pxi
+++ b/tests/testsupport/cythonarrayutil.pxi
@@ -2,7 +2,7 @@ from libc.stdlib cimport malloc, free
cimport cython
from cython.view cimport array
-cdef void callback(void *data):
+cdef void callback(void *data) noexcept:
print "callback called"
free(data)
diff --git a/tests/windows_bugs_39.txt b/tests/windows_bugs_39.txt
new file mode 100644
index 000000000..6b56b9d33
--- /dev/null
+++ b/tests/windows_bugs_39.txt
@@ -0,0 +1,3 @@
+# https://github.com/cython/cython/issues/3450
+TestInline
+scanner_trace