Sp--; Sp[0] = (W_)&stg_enter_info;
RETURN_TO_SCHEDULER(ThreadInterpret, HeapOverflow);
}
-
- // "Standard" stack check
- if (Sp - (INTERP_STACK_CHECK_THRESH+1) < SpLim) {
- Sp--; Sp[0] = (W_)&stg_enter_info;
- RETURN_TO_SCHEDULER(ThreadInterpret, StackOverflow);
- }
+ // Stack checks aren't necessary at return points, the stack use
+ // is aggregated into the enclosing function entry point.
goto run_BCO;
run_BCO_return_unboxed:
if (doYouWantToGC()) {
RETURN_TO_SCHEDULER(ThreadInterpret, HeapOverflow);
}
-
- // "Standard" stack check
- if (Sp - (INTERP_STACK_CHECK_THRESH+1) < SpLim) {
- RETURN_TO_SCHEDULER(ThreadInterpret, StackOverflow);
- }
+ // Stack checks aren't necessary at return points, the stack use
+ // is aggregated into the enclosing function entry point.
goto run_BCO;
run_BCO_fun:
RETURN_TO_SCHEDULER(ThreadInterpret, HeapOverflow);
}
- // "Standard" stack check
- if (Sp - (INTERP_STACK_CHECK_THRESH+1) < SpLim) {
+ // Stack check
+ if (Sp - INTERP_STACK_CHECK_THRESH < SpLim) {
Sp -= 2;
Sp[1] = (W_)obj;
Sp[0] = (W_)&stg_apply_interp_info; // placeholder, really
switch (BCO_NEXT) {
- case bci_STKCHECK:
- {
- // An explicit stack check; we hope these will be rare.
+ case bci_STKCHECK: {
+ // Explicit stack check at the beginning of a function
+ // *only* (stack checks in case alternatives are
+ // propagated to the enclosing function).
int stk_words_reqd = BCO_NEXT + 1;
if (Sp - stk_words_reqd < SpLim) {
- Sp--; Sp[0] = (W_)obj;
+ Sp -= 2;
+ Sp[1] = (W_)obj;
+ Sp[0] = (W_)&stg_apply_interp_info;
RETURN_TO_SCHEDULER(ThreadInterpret, StackOverflow);
+ } else {
+ goto nextInsn;
}
- goto nextInsn;
}
case bci_PUSH_L: {
int o_itbl = BCO_NEXT;
void(*marshall_fn)(void*) = (void (*)(void*))BCO_LIT(o_itbl);
+#ifdef RTS_SUPPORTS_THREADS
+ // Threaded RTS:
+ // Arguments on the TSO stack are not good, because garbage
+ // collection might move the TSO as soon as we call
+ // suspendThread below.
+
+ W_ arguments[stk_offset];
+
+ memcpy(arguments, Sp, sizeof(W_) * stk_offset);
+#endif
+
// There are a bunch of non-ptr words on the stack (the
// ccall args, the ccall fun address and space for the
// result), which we need to cover with an info table
SAVE_STACK_POINTERS;
tok = suspendThread(&cap->r,rtsFalse);
- // Careful: suspendThread might have shifted the stack
+#ifndef RTS_SUPPORTS_THREADS
+ // Careful:
+ // suspendThread might have shifted the stack
// around (stack squeezing), so we have to grab the real
- // Sp out of the TSO to find the ccall args again:
- marshall_fn ( (void*)(cap->r.rCurrentTSO->sp + RET_DYN_SIZE
- + sizeofW(StgRetDyn)) );
+ // Sp out of the TSO to find the ccall args again.
+
+ marshall_fn ( (void*)(cap->r.rCurrentTSO->sp + RET_DYN_SIZE + sizeofW(StgRetDyn)) );
+#else
+ // Threaded RTS:
+ // We already made a malloced copy of the arguments above.
+
+ marshall_fn ( arguments );
+#endif
// And restart the thread again, popping the RET_DYN frame.
cap = (Capability *)((void *)resumeThread(tok,rtsFalse) - sizeof(StgFunTable));
LOAD_STACK_POINTERS;
Sp += RET_DYN_SIZE + sizeofW(StgRetDyn);
+
+
+#ifdef RTS_SUPPORTS_THREADS
+ // Threaded RTS:
+ // Copy the "arguments", which might include a return value,
+ // back to the TSO stack. It would of course be enough to
+ // just copy the return value, but we don't know the offset.
+ memcpy(Sp, arguments, sizeof(W_) * stk_offset);
+#endif
+
goto nextInsn;
}