X-Git-Url: http://git.megacz.com/?a=blobdiff_plain;f=rts%2Fgmp%2Fmpn%2Fcray%2Fmulww.s;fp=rts%2Fgmp%2Fmpn%2Fcray%2Fmulww.s;h=890cdcf94df0e4284c8985ae532b9df5c9177a35;hb=0065d5ab628975892cea1ec7303f968c3338cbe1;hp=0000000000000000000000000000000000000000;hpb=28a464a75e14cece5db40f2765a29348273ff2d2;p=ghc-hetmet.git diff --git a/rts/gmp/mpn/cray/mulww.s b/rts/gmp/mpn/cray/mulww.s new file mode 100644 index 0000000..890cdcf --- /dev/null +++ b/rts/gmp/mpn/cray/mulww.s @@ -0,0 +1,245 @@ +* Helper for mpn_mul_1, mpn_addmul_1, and mpn_submul_1 for Cray PVP. + +* Copyright (C) 1996, 2000 Free Software Foundation, Inc. +* This file is generated from mulww.f in this same directory. + +* This file is part of the GNU MP Library. + +* The GNU MP Library is free software; you can redistribute it and/or +* modify it under the terms of the GNU Lesser General Public License as +* published by the Free Software Foundation; either version 2.1 of the +* License, or (at your option) any later version. + +* The GNU MP Library is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +* Lesser General Public License for more details. + +* You should have received a copy of the GNU Lesser General Public +* License along with the GNU MP Library; see the file COPYING.LIB. If +* not, write to the Free Software Foundation, Inc., 59 Temple Place - +* Suite 330, Boston, MA 02111-1307, USA. + + IDENT GMPN_MULWW +********************************************** +* Assemble with Cal Version 2.0 * +* * +* Generated by CFT77 6.0.4.19 * +* on 06/27/00 at 04:34:13 * +* * +********************************************** +* ALLOW UNDERSCORES IN IDENTIFIERS + EDIT OFF + FORMAT NEW +@DATA SECTION DATA,CM +@DATA = W.* + CON O'0000000000040000000000 + CON O'0435152404713723252514 ;GMPN_MUL 1 + CON O'0535270000000000000000 ;WW 1 + CON O'0000000000000001200012 ;trbk tbl 1 + VWD 32/0,32/P.GMPN_MULWW ;trbk tbl 1 + CON O'0014003000000000001416 ;trbk tbl 1 + CON O'0000000000000000000011 ;trbk tbl 1 + CON O'0000000000000000000215 ;trbk tbl 1 + BSSZ 1 ;trbk tbl 1 +@CODE SECTION CODE +@CODE = P.* +L3 = P.* ; 1 + A0 A6 ;arg base 1 + A5 6 ;num Darg 1 + B03,A5 0,A0 ;load DAs 1 + A0 A1+A2 ; 1 + A5 1 ;num Ts 1 + 0,A0 T00,A5 ; 1 + B02 A2 ;new base 1 + B66 A3 ;stk top 1 + B01 A6 ;arg base 1 + A7 P.L4 ;ofrn rtn 1 + B00 A7 ;return 1 + A6 @DATA ; 1 + J $STKOFEN ;$STKOFEN 1 +GMPN_MULWW = P.* ; 1 + A0 @DATA+3 ;(trbk) 1 + B77 A0 ;(trbk) 1 + A1 13 ;num Bs 1 + A0 B66 ;stk top 1 + A2 B66 ;stk tmp 1 + A4 B67 ;stk limt 1 + 0,A0 B77,A1 ; 1 + A7 782 ;stk size 1 + A3 A2+A7 ; 1 + A0 A4-A3 ; 1 + JAM L3 ;overflow 1 + A0 A6 ;arg base 1 + A5 6 ;num Darg 1 + B03,A5 0,A0 ;load DAs 1 + A0 A1+A2 ; 1 + A5 1 ;num Ts 1 + 0,A0 T00,A5 ; 1 + B02 A2 ;new base 1 + B66 A3 ;new top 1 + B01 A6 ;arg base 1 +L4 = P.* ;ofrn rtn 1 + A7 B07 ;regs 14 + S7 0,A7 ; 14 + A6 B10 ;regs 9 + S6 0,A6 ; 9 + S5 1 ; 14 + S4 <22 ; 9 + S7 S7-S5 ; 14 + S5 #S7 ; 14 + T00 S6 ;regs 10 + S6 S6>22 ; 10 + S7 T00 ;regs 11 + S7 S7>44 ; 11 + S3 T00 ;regs 9 + S3 S3&S4 ; 9 + S6 S6&S4 ; 10 + S7 S7&S4 ; 11 + S3 S3<24 ; 9 + S6 S6<24 ; 10 + S7 S7<24 ; 11 + S0 S5 ;regs 14 + S4 S5 ;regs 14 + S1 S6 ;regs 14 + S2 S3 ;regs 14 + S3 S7 ;regs 14 + JSP L5 ; 14 +L6 = P.* ; 14 + S7 -S4 ; 14 + A2 S7 ;regs 14 + VL A2 ;regs 14 + A3 B06 ;s_bt_sp 14 + A5 B05 ;s_bt_sp 14 + A4 B04 ;s_bt_sp 14 + A1 VL ; 14 + A2 S4 ;regs 14 +L7 = P.* ; 14 + A0 A3 ;regs 15 + VL A1 ;regs 15 + V7 ,A0,1 ; 15 + B11 A5 ;s_bt_sp 15 + A7 22 ; 17 + B12 A4 ;s_bt_sp 17 + V6 V7>A7 ; 17 + B13 A3 ;s_bt_sp 17 + S7 <22 ; 17 + A3 B02 ;s_bt_sp 17 + V5 S7&V6 ; 17 + A6 24 ; 17 + V4 V5A5 ; 18 + V2 S1*FV1 ; 21 + V3 S7&V5 ; 18 + A0 14 ; 34 + B77 A0 ;regs 34 + A4 B77 ;regs 34 + A0 A4+A3 ; 34 + ,A0,1 V2 ;v_ld_str 34 + V0 V3A7 ; 28 + V2 S2*FV0 ; 22 + V3 V6+V2 ; 22 + S7 <20 ; 28 + V1 S7&V3 ; 28 + A4 270 ; 34 + A0 A4+A3 ; 34 + ,A0,1 V0 ;v_ld_str 34 + A4 14 ; 34 + A0 A4+A3 ; 34 + V7 ,A0,1 ;v_ld_str 34 + V6 V1A5 ; 32 + V0 S1*FV4 ; 23 + A5 654 ; 34 + A0 A5+A3 ; 34 + ,A0,1 V1 ;v_ld_str 34 + V6 V7+V0 ; 23 + A5 2 ; 32 + V2 V6A6 ; 28 + A5 654 ; 34 + CPW ;cmr_vrsp 34 + A0 A5+A3 ; 34 + V1 ,A0,1 ;v_ld_str 34 + A5 398 ; 34 + A0 A5+A3 ; 34 + V3 ,A0,1 ;v_ld_str 34 + V6 V4+V1 ; 32 + V2 V3>A6 ; 32 + V5 V6+V2 ; 32 + A6 B12 ;s_bt_sp 32 + V4 V3