+\begin{code}
+constructSlowCall :: [CAddrMode] -> (CAddrMode, [CAddrMode])
+ -- don't forget the zero case
+constructSlowCall [] = (CLbl stg_ap_0 CodePtrRep , [])
+constructSlowCall amodes =
+ -- traceSlowCall amodes $
+ (CLbl lbl CodePtrRep, these ++ slowArgs rest)
+ where (tag, these, rest) = matchSlowPattern amodes
+ lbl = mkRtsApplyEntryLabel tag
+
+stg_ap_0 = mkRtsApplyEntryLabel SLIT("0")
+
+-- | 'slowArgs' takes a list of function arguments and prepares them for
+-- pushing on the stack for "extra" arguments to a function which requires
+-- fewer arguments than we currently have.
+slowArgs :: [CAddrMode] -> [CAddrMode]
+slowArgs [] = []
+slowArgs amodes = CLbl lbl RetRep : args ++ slowArgs rest
+ where (tag, args, rest) = matchSlowPattern amodes
+ lbl = mkRtsApplyInfoLabel tag
+
+matchSlowPattern :: [CAddrMode] -> (LitString, [CAddrMode], [CAddrMode])
+matchSlowPattern amodes = (tag, these, rest)
+ where reps = map getAmodeRep amodes
+ (tag, n) = findMatch (map primRepToArgRep reps)
+ (these, rest) = splitAt n amodes
+
+-- These cases were found to cover about 99% of all slow calls:
+findMatch (RepP: RepP: RepP: RepP: RepP: RepP: RepP: _) = (SLIT("ppppppp"), 7)
+findMatch (RepP: RepP: RepP: RepP: RepP: RepP: _) = (SLIT("pppppp"), 6)
+findMatch (RepP: RepP: RepP: RepP: RepP: _) = (SLIT("ppppp"), 5)
+findMatch (RepP: RepP: RepP: RepP: _) = (SLIT("pppp"), 4)
+findMatch (RepP: RepP: RepP: _) = (SLIT("ppp"), 3)
+findMatch (RepP: RepP: RepV: _) = (SLIT("ppv"), 3)
+findMatch (RepP: RepP: _) = (SLIT("pp"), 2)
+findMatch (RepP: RepV: _) = (SLIT("pv"), 2)
+findMatch (RepP: _) = (SLIT("p"), 1)
+findMatch (RepV: _) = (SLIT("v"), 1)
+findMatch (RepN: _) = (SLIT("n"), 1)
+findMatch (RepF: _) = (SLIT("f"), 1)
+findMatch (RepD: _) = (SLIT("d"), 1)
+findMatch (RepL: _) = (SLIT("l"), 1)
+findMatch _ = panic "CgStackery.findMatch"
+
+#ifdef DEBUG
+primRepChar p | isFollowableRep p = 'p'
+primRepChar VoidRep = 'v'
+primRepChar FloatRep = 'f'
+primRepChar DoubleRep = 'd'
+primRepChar p | getPrimRepSize p == 1 = 'n'
+primRepChar p | is64BitRep p = 'l'
+
+traceSlowCall amodes and_then
+ = trace ("call: " ++ map primRepChar (map getAmodeRep amodes)) and_then
+#endif