+#elif defined(x86_64_HOST_ARCH)
+ /*
+ stack at call:
+ argn
+ ...
+ arg7
+ return address
+ %rdi,%rsi,%rdx,%rcx,%r8,%r9 = arg0..arg6
+
+ if there are <6 integer args, then we can just push the
+ StablePtr into %edi and shuffle the other args up.
+
+ If there are >=6 integer args, then we have to flush one arg
+ to the stack, and arrange to adjust the stack ptr on return.
+ The stack will be rearranged to this:
+
+ argn
+ ...
+ arg7
+ return address *** <-- dummy arg in stub fn.
+ arg6
+ obscure_ccall_ret_code
+
+ This unfortunately means that the type of the stub function
+ must have a dummy argument for the original return address
+ pointer inserted just after the 6th integer argument.
+
+ Code for the simple case:
+
+ 0: 4d 89 c1 mov %r8,%r9
+ 3: 49 89 c8 mov %rcx,%r8
+ 6: 48 89 d1 mov %rdx,%rcx
+ 9: 48 89 f2 mov %rsi,%rdx
+ c: 48 89 fe mov %rdi,%rsi
+ f: 48 8b 3d 0a 00 00 00 mov 10(%rip),%rdi
+ 16: e9 00 00 00 00 jmpq stub_function
+ ...
+ 20: .quad 0 # aligned on 8-byte boundary
+
+
+ And the version for >=6 integer arguments:
+
+ 0: 41 51 push %r9
+ 2: 68 00 00 00 00 pushq $obscure_ccall_ret_code
+ 7: 4d 89 c1 mov %r8,%r9
+ a: 49 89 c8 mov %rcx,%r8
+ d: 48 89 d1 mov %rdx,%rcx
+ 10: 48 89 f2 mov %rsi,%rdx
+ 13: 48 89 fe mov %rdi,%rsi
+ 16: 48 8b 3d 0b 00 00 00 mov 11(%rip),%rdi
+ 1d: e9 00 00 00 00 jmpq stub_function
+ ...
+ 28: .quad 0 # aligned on 8-byte boundary
+ */
+
+ /* we assume the small code model (gcc -mcmmodel=small) where
+ * all symbols are <2^32, so hence wptr should fit into 32 bits.
+ */
+ ASSERT(((long)wptr >> 32) == 0);
+
+ {
+ int i = 0;
+ char *c;
+
+ // determine whether we have 6 or more integer arguments,
+ // and therefore need to flush one to the stack.
+ for (c = typeString; *c != '\0'; c++) {
+ if (*c == 'i' || *c == 'l') i++;
+ if (i == 6) break;
+ }
+
+ if (i < 6) {
+ adjustor = mallocBytesRWX(40);
+
+ *(StgInt32 *)adjustor = 0x49c1894d;
+ *(StgInt32 *)(adjustor+4) = 0x8948c889;
+ *(StgInt32 *)(adjustor+8) = 0xf28948d1;
+ *(StgInt32 *)(adjustor+12) = 0x48fe8948;
+ *(StgInt32 *)(adjustor+16) = 0x000a3d8b;
+ *(StgInt32 *)(adjustor+20) = 0x00e90000;
+
+ *(StgInt32 *)(adjustor+23) =
+ (StgInt32)((StgInt64)wptr - (StgInt64)adjustor - 27);
+ *(StgInt64 *)(adjustor+32) = (StgInt64)hptr;
+ }
+ else
+ {
+ adjustor = mallocBytesRWX(48);
+
+ *(StgInt32 *)adjustor = 0x00685141;
+ *(StgInt32 *)(adjustor+4) = 0x4d000000;
+ *(StgInt32 *)(adjustor+8) = 0x8949c189;
+ *(StgInt32 *)(adjustor+12) = 0xd18948c8;
+ *(StgInt32 *)(adjustor+16) = 0x48f28948;
+ *(StgInt32 *)(adjustor+20) = 0x8b48fe89;
+ *(StgInt32 *)(adjustor+24) = 0x00000b3d;
+ *(StgInt32 *)(adjustor+28) = 0x0000e900;
+
+ *(StgInt32 *)(adjustor+3) =
+ (StgInt32)(StgInt64)obscure_ccall_ret_code;
+ *(StgInt32 *)(adjustor+30) =
+ (StgInt32)((StgInt64)wptr - (StgInt64)adjustor - 34);
+ *(StgInt64 *)(adjustor+40) = (StgInt64)hptr;
+ }
+ }