summaryrefslogtreecommitdiff
path: root/ghc/rts/Interpreter.c
diff options
context:
space:
mode:
authorsimonmar <unknown>2003-01-10 16:33:50 +0000
committersimonmar <unknown>2003-01-10 16:33:50 +0000
commitc3fb6ff19bae7b0f0f0368244ca1a386b7f57ca6 (patch)
tree3ea630b891704cd711360b94538429faf169e5ea /ghc/rts/Interpreter.c
parent2d6705ca56516c75c2f18c811f38c6ba7be02329 (diff)
downloadhaskell-c3fb6ff19bae7b0f0f0368244ca1a386b7f57ca6.tar.gz
[project @ 2003-01-10 16:33:49 by simonmar]
Changes to the way stack checks are handled in GHCi, to fix a rare bug when a stack check fails in a BCO. We now aggregate all stack use from case alternatives up to the enclosing function/thunk BCO, and do a single stack check at the beginning of that BCO. This simplifies the stack check failure code, because it doesn't have to cope with the case when a case alternative needs to restart. We still employ the trick of doing a fixed stack check before every BCO, only inserting an actual stack check instruction in the BCO if it needs more stack than this fixed amount. The fixed stack check is now only done before running a function/thunk BCO.
Diffstat (limited to 'ghc/rts/Interpreter.c')
-rw-r--r--ghc/rts/Interpreter.c33
1 files changed, 15 insertions, 18 deletions
diff --git a/ghc/rts/Interpreter.c b/ghc/rts/Interpreter.c
index 89a5e5928d..d39becb800 100644
--- a/ghc/rts/Interpreter.c
+++ b/ghc/rts/Interpreter.c
@@ -672,12 +672,8 @@ run_BCO_return:
Sp--; Sp[0] = (W_)&stg_enter_info;
RETURN_TO_SCHEDULER(ThreadInterpret, HeapOverflow);
}
-
- // "Standard" stack check
- if (Sp - (INTERP_STACK_CHECK_THRESH+1) < SpLim) {
- Sp--; Sp[0] = (W_)&stg_enter_info;
- RETURN_TO_SCHEDULER(ThreadInterpret, StackOverflow);
- }
+ // Stack checks aren't necessary at return points, the stack use
+ // is aggregated into the enclosing function entry point.
goto run_BCO;
run_BCO_return_unboxed:
@@ -685,11 +681,8 @@ run_BCO_return_unboxed:
if (doYouWantToGC()) {
RETURN_TO_SCHEDULER(ThreadInterpret, HeapOverflow);
}
-
- // "Standard" stack check
- if (Sp - (INTERP_STACK_CHECK_THRESH+1) < SpLim) {
- RETURN_TO_SCHEDULER(ThreadInterpret, StackOverflow);
- }
+ // Stack checks aren't necessary at return points, the stack use
+ // is aggregated into the enclosing function entry point.
goto run_BCO;
run_BCO_fun:
@@ -709,8 +702,8 @@ run_BCO_fun:
RETURN_TO_SCHEDULER(ThreadInterpret, HeapOverflow);
}
- // "Standard" stack check
- if (Sp - (INTERP_STACK_CHECK_THRESH+1) < SpLim) {
+ // Stack check
+ if (Sp - INTERP_STACK_CHECK_THRESH < SpLim) {
Sp -= 2;
Sp[1] = (W_)obj;
Sp[0] = (W_)&stg_apply_interp_info; // placeholder, really
@@ -766,15 +759,19 @@ run_BCO:
switch (BCO_NEXT) {
- case bci_STKCHECK:
- {
- // An explicit stack check; we hope these will be rare.
+ case bci_STKCHECK: {
+ // Explicit stack check at the beginning of a function
+ // *only* (stack checks in case alternatives are
+ // propagated to the enclosing function).
int stk_words_reqd = BCO_NEXT + 1;
if (Sp - stk_words_reqd < SpLim) {
- Sp--; Sp[0] = (W_)obj;
+ Sp -= 2;
+ Sp[1] = (W_)obj;
+ Sp[0] = (W_)&stg_apply_interp_info;
RETURN_TO_SCHEDULER(ThreadInterpret, StackOverflow);
+ } else {
+ goto nextInsn;
}
- goto nextInsn;
}
case bci_PUSH_L: {