summaryrefslogtreecommitdiff
path: root/Objects/object.c
diff options
context:
space:
mode:
Diffstat (limited to 'Objects/object.c')
-rw-r--r--Objects/object.c56
1 files changed, 8 insertions, 48 deletions
diff --git a/Objects/object.c b/Objects/object.c
index 854cc85b1c..c87a83f225 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -2092,25 +2092,13 @@ finally:
/* Trashcan support. */
-/* Add op to the _PyTrash_delete_later list. Called when the current
+#define _PyTrash_UNWIND_LEVEL 50
+
+/* Add op to the gcstate->trash_delete_later list. Called when the current
* call-stack depth gets large. op must be a currently untracked gc'ed
* object, with refcount 0. Py_DECREF must already have been called on it.
*/
-void
-_PyTrash_deposit_object(PyObject *op)
-{
- PyInterpreterState *interp = _PyInterpreterState_GET();
- struct _gc_runtime_state *gcstate = &interp->gc;
-
- _PyObject_ASSERT(op, _PyObject_IS_GC(op));
- _PyObject_ASSERT(op, !_PyObject_GC_IS_TRACKED(op));
- _PyObject_ASSERT(op, Py_REFCNT(op) == 0);
- _PyGCHead_SET_PREV(_Py_AS_GC(op), gcstate->trash_delete_later);
- gcstate->trash_delete_later = op;
-}
-
-/* The equivalent API, using per-thread state recursion info */
-void
+static void
_PyTrash_thread_deposit_object(PyObject *op)
{
PyThreadState *tstate = _PyThreadState_GET();
@@ -2121,37 +2109,9 @@ _PyTrash_thread_deposit_object(PyObject *op)
tstate->trash_delete_later = op;
}
-/* Deallocate all the objects in the _PyTrash_delete_later list. Called when
- * the call-stack unwinds again.
- */
-void
-_PyTrash_destroy_chain(void)
-{
- PyInterpreterState *interp = _PyInterpreterState_GET();
- struct _gc_runtime_state *gcstate = &interp->gc;
-
- while (gcstate->trash_delete_later) {
- PyObject *op = gcstate->trash_delete_later;
- destructor dealloc = Py_TYPE(op)->tp_dealloc;
-
- gcstate->trash_delete_later =
- (PyObject*) _PyGCHead_PREV(_Py_AS_GC(op));
-
- /* Call the deallocator directly. This used to try to
- * fool Py_DECREF into calling it indirectly, but
- * Py_DECREF was already called on this object, and in
- * assorted non-release builds calling Py_DECREF again ends
- * up distorting allocation statistics.
- */
- _PyObject_ASSERT(op, Py_REFCNT(op) == 0);
- ++gcstate->trash_delete_nesting;
- (*dealloc)(op);
- --gcstate->trash_delete_nesting;
- }
-}
-
-/* The equivalent API, using per-thread state recursion info */
-void
+/* Deallocate all the objects in the gcstate->trash_delete_later list.
+ * Called when the call-stack unwinds again. */
+static void
_PyTrash_thread_destroy_chain(void)
{
PyThreadState *tstate = _PyThreadState_GET();
@@ -2192,7 +2152,7 @@ _PyTrash_thread_destroy_chain(void)
int
_PyTrash_begin(PyThreadState *tstate, PyObject *op)
{
- if (tstate->trash_delete_nesting >= PyTrash_UNWIND_LEVEL) {
+ if (tstate->trash_delete_nesting >= _PyTrash_UNWIND_LEVEL) {
/* Store the object (to be deallocated later) and jump past
* Py_TRASHCAN_END, skipping the body of the deallocator */
_PyTrash_thread_deposit_object(op);