33 KiB
Executable File
;; thumb.md Machine description for ARM/Thumb processors ;; Copyright (C) 1996, 1997, 1998, 2002 Free Software Foundation, Inc. ;; The basis of this contribution was generated by ;; Richard Earnshaw, Advanced RISC Machines Ltd
;; This file is part of GNU CC.
;; GNU CC is free software; you can redistribute it and/or modify ;; it under the terms of the GNU General Public License as published by ;; the Free Software Foundation; either version 2, or (at your option) ;; any later version.
;; GNU CC is distributed in the hope that it will be useful, ;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;; GNU General Public License for more details.
;; You should have received a copy of the GNU General Public License ;; along with GNU CC; see the file COPYING. If not, write to ;; the Free Software Foundation, 59 Temple Place - Suite 330, ;; Boston, MA 02111-1307, USA.
;; LENGTH of an instruction is 2 bytes (define_attr "length" "" (const_int 2))
;; CONDS is set to UNCHANGED when an insn does not affect the condition codes ;; Most insns change the condition codes (define_attr "conds" "changed,unchanged" (const_string "changed"))
;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a ;; distant label. (define_attr "far_jump" "yes,no" (const_string "no"))
;; Start with move insns
(define_expand "movsi" [(set (match_operand:SI 0 "general_operand" "") (match_operand:SI 1 "general_operand" ""))] "" " if (! (reload_in_progress || reload_completed)) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (SImode, operands[1]); } ")
(define_insn "*movsi_insn" [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l,m,*r,*h") (match_operand:SI 1 "general_operand" "l,I,J,K,>,l,mi,l,*h,*r"))] "register_operand (operands[0], SImode) || register_operand (operands[1], SImode)" "@ add\t%0, %1, #0 mov\t%0, %1
ldmia\t%1, {%0} stmia\t%0, {%1} ldr\t%0, %1 str\t%1, %0 mov\t%0, %1 mov\t%0, %1" [(set_attr "length" "2,2,4,4,2,2,2,2,2,2")])
(define_split [(set (match_operand:SI 0 "register_operand" "") (match_operand:SI 1 "const_int_operand" ""))] "thumb_shiftable_const (INTVAL (operands[1]))" [(set (match_dup 0) (match_dup 1)) (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))] " { HOST_WIDE_UINT val = INTVAL (operands[1]); HOST_WIDE_UINT mask = 0xff; int i; for (i = 0; i < 25; i++) if ((val & (mask << i)) == val) break;
if (i == 0) FAIL;
operands[1] = GEN_INT (val >> i); operands[2] = GEN_INT (i); }")
(define_split [(set (match_operand:SI 0 "register_operand" "") (match_operand:SI 1 "const_int_operand" ""))] "INTVAL (operands[1]) < 0 && INTVAL (operands[1]) > -256" [(set (match_dup 0) (match_dup 1)) (set (match_dup 0) (neg:SI (match_dup 0)))] " operands[1] = GEN_INT (- INTVAL (operands[1])); ")
(define_expand "movhi" [(set (match_operand:HI 0 "general_operand" "") (match_operand:HI 1 "general_operand" ""))] "" " { if (! (reload_in_progress || reload_completed)) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (HImode, operands[1]);
/* ??? We shouldn't really get invalid addresses here, but this can
happen if we are passed a SP (never OK for HImode/QImode) or virtual
register (rejected by GO_IF_LEGITIMATE_ADDRESS for HImode/QImode)
relative address. */
/* ??? This should perhaps be fixed elsewhere, for instance, in
fixup_stack_1, by checking for other kinds of invalid addresses,
e.g. a bare reference to a virtual register. This may confuse the
alpha though, which must handle this case differently. */
if (GET_CODE (operands[0]) == MEM
&& ! memory_address_p (GET_MODE (operands[0]),
XEXP (operands[0], 0)))
{
rtx temp = copy_to_reg (XEXP (operands[0], 0));
operands[0] = change_address (operands[0], VOIDmode, temp);
}
if (GET_CODE (operands[1]) == MEM
&& ! memory_address_p (GET_MODE (operands[1]),
XEXP (operands[1], 0)))
{
rtx temp = copy_to_reg (XEXP (operands[1], 0));
operands[1] = change_address (operands[1], VOIDmode, temp);
}
}
/* Handle loading a large integer during reload / else if (GET_CODE (operands[1]) == CONST_INT && ! CONST_OK_FOR_LETTER_P (INTVAL (operands[1]), 'I')) { / Writing a constant to memory needs a scratch, which should be handled with SECONDARY_RELOADs. */ if (GET_CODE (operands[0]) != REG) abort ();
operands[0] = gen_rtx (SUBREG, SImode, operands[0], 0);
emit_insn (gen_movsi (operands[0], operands[1]));
DONE;
}
}")
(define_insn "*movhi_insn" [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l") (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))] "register_operand (operands[0], HImode) || register_operand (operands[1], HImode)" "@ add\t%0, %1, #0 ldrh\t%0, %1 strh\t%1, %0 mov\t%0, %1 mov\t%0, %1 mov\t%0, %1")
(define_expand "movqi" [(set (match_operand:QI 0 "general_operand" "") (match_operand:QI 1 "general_operand" ""))] "" " { if (! (reload_in_progress || reload_completed)) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (QImode, operands[1]);
/* ??? We shouldn't really get invalid addresses here, but this can
happen if we are passed a SP (never OK for HImode/QImode) or virtual
register (rejected by GO_IF_LEGITIMATE_ADDRESS for HImode/QImode)
relative address. */
/* ??? This should perhaps be fixed elsewhere, for instance, in
fixup_stack_1, by checking for other kinds of invalid addresses,
e.g. a bare reference to a virtual register. This may confuse the
alpha though, which must handle this case differently. */
if (GET_CODE (operands[0]) == MEM
&& ! memory_address_p (GET_MODE (operands[0]),
XEXP (operands[0], 0)))
{
rtx temp = copy_to_reg (XEXP (operands[0], 0));
operands[0] = change_address (operands[0], VOIDmode, temp);
}
if (GET_CODE (operands[1]) == MEM
&& ! memory_address_p (GET_MODE (operands[1]),
XEXP (operands[1], 0)))
{
rtx temp = copy_to_reg (XEXP (operands[1], 0));
operands[1] = change_address (operands[1], VOIDmode, temp);
}
}
/* Handle loading a large integer during reload / else if (GET_CODE (operands[1]) == CONST_INT && ! CONST_OK_FOR_LETTER_P (INTVAL (operands[1]), 'I')) { / Writing a constant to memory needs a scratch, which should be handled with SECONDARY_RELOADs. */ if (GET_CODE (operands[0]) != REG) abort ();
operands[0] = gen_rtx (SUBREG, SImode, operands[0], 0);
emit_insn (gen_movsi (operands[0], operands[1]));
DONE;
}
}")
(define_insn "*movqi_insn" [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l") (match_operand:QI 1 "general_operand" "l,m,l,*h,*r,I"))] "register_operand (operands[0], QImode) || register_operand (operands[1], QImode)" "@ add\t%0, %1, #0 ldrb\t%0, %1 strb\t%1, %0 mov\t%0, %1 mov\t%0, %1 mov\t%0, %1")
(define_expand "movdi" [(set (match_operand:DI 0 "general_operand" "") (match_operand:DI 1 "general_operand" ""))] "" " if (! (reload_in_progress || reload_completed)) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (DImode, operands[1]); } ")
;;; ??? This should have alternatives for constants. ;;; ??? This was originally identical to the movdf_insn pattern. ;;; ??? The 'i' constraint looks funny, but it should always be replaced by ;;; thumb_reorg with a memory reference. (define_insn "*movdi_insn" [(set (match_operand:DI 0 "general_operand" "=l,l,l,l,>,l,m,*r") (match_operand:DI 1 "general_operand" "l,I,J,>,l,mi,l,r"))] "register_operand (operands[0], DImode) || register_operand (operands[1], DImode)" " { switch (which_alternative) { case 0: if (REGNO (operands[1]) == REGNO (operands[0]) + 1) return "add\t%0, %1, #0;add\t%H0, %H1, #0"; return "add\t%H0, %H1, #0;add\t%0, %1, #0"; case 1: return "mov\t%Q0, %1;mov\t%R0, #0"; case 2: operands[1] = GEN_INT (- INTVAL (operands[1])); return "mov\t%Q0, %1;neg\t%Q0, %Q0;asr\t%R0, %Q0, #31"; case 3: return "ldmia\t%1, {%0, %H0}"; case 4: return "stmia\t%0, {%1, %H1}"; case 5: return thumb_load_double_from_address (operands); case 6: operands[2] = gen_rtx (MEM, SImode, plus_constant (XEXP (operands[0], 0), 4)); output_asm_insn ("str\t%1, %0;str\t%H1, %2", operands); return ""; case 7: if (REGNO (operands[1]) == REGNO (operands[0]) + 1) return "mov\t%0, %1;mov\t%H0, %H1"; return "mov\t%H0, %H1;mov\t%0, %1"; } }"[(set_attr "length" "4,4,6,2,2,6,4,4")])
(define_expand "movdf" [(set (match_operand:DF 0 "general_operand" "") (match_operand:DF 1 "general_operand" ""))] "" " if (! (reload_in_progress || reload_completed)) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (DFmode, operands[1]); } ")
;;; ??? This should have alternatives for constants. ;;; ??? This was originally identical to the movdi_insn pattern. ;;; ??? The 'F' constraint looks funny, but it should always be replaced by ;;; thumb_reorg with a memory reference. (define_insn "*movdf_insn" [(set (match_operand:DF 0 "general_operand" "=l,l,>,l,m,*r") (match_operand:DF 1 "general_operand" "l,>,l,mF,l,r"))] "register_operand (operands[0], DFmode) || register_operand (operands[1], DFmode)" " switch (which_alternative) { case 0: if (REGNO (operands[1]) == REGNO (operands[0]) + 1) return "add\t%0, %1, #0;add\t%H0, %H1, #0"; return "add\t%H0, %H1, #0;add\t%0, %1, #0"; case 1: return "ldmia\t%1, {%0, %H0}"; case 2: return "stmia\t%0, {%1, %H1}"; case 3: return thumb_load_double_from_address (operands); case 4: operands[2] = gen_rtx (MEM, SImode, plus_constant (XEXP (operands[0], 0), 4)); output_asm_insn ("str\t%1, %0;str\t%H1, %2", operands); return ""; case 5: if (REGNO (operands[1]) == REGNO (operands[0]) + 1) return "mov\t%0, %1;mov\t%H0, %H1"; return "mov\t%H0, %H1;mov\t%0, %1"; } "[(set_attr "length" "4,2,2,6,4,4")])
(define_expand "movsf" [(set (match_operand:SF 0 "general_operand" "") (match_operand:SF 1 "general_operand" ""))] "" " if (! (reload_in_progress || reload_completed)) { if (GET_CODE (operands[0]) != REG) operands[1] = force_reg (SFmode, operands[1]); } ")
;;; ??? This should have alternatives for constants. (define_insn "*movsf_insn" [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l,m,*r,*h") (match_operand:SF 1 "general_operand" "l,>,l,mF,l,*h,*r"))] "register_operand (operands[0], SFmode) || register_operand (operands[1], SFmode)" "@ add\t%0, %1, #0 ldmia\t%1, {%0} stmia\t%0, {%1} ldr\t%0, %1 str\t%1, %0 mov\t%0, %1 mov\t%0, %1")
;; Widening move insns
(define_expand "zero_extendhisi2" [(set (match_operand:SI 0 "s_register_operand" "") (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))] "" " if (GET_CODE (operands[1]) != MEM) { rtx temp = gen_reg_rtx (SImode);
operands[1] = force_reg (HImode, operands[1]);
operands[1] = gen_lowpart (SImode, operands[1]);
emit_insn (gen_ashlsi3 (temp, operands[1], GEN_INT (16)));
emit_insn (gen_lshrsi3 (operands[0], temp, GEN_INT (16)));
DONE;
}
")
(define_insn "*zero_extendhisi2_insn" [(set (match_operand:SI 0 "s_register_operand" "=l") (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))] "" "ldrh\t%0, %1")
(define_expand "zero_extendqisi2" [(set (match_operand:SI 0 "s_register_operand" "") (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))] "" " if (GET_CODE (operands[1]) != MEM) { rtx temp = gen_reg_rtx (SImode);
operands[1] = force_reg (QImode, operands[1]);
operands[1] = gen_lowpart (SImode, operands[1]);
emit_insn (gen_ashlsi3 (temp, operands[1], GEN_INT (24)));
emit_insn (gen_lshrsi3 (operands[0], temp, GEN_INT (24)));
DONE;
}
")
(define_insn "*zero_extendqisi2_insn" [(set (match_operand:SI 0 "s_register_operand" "=l") (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))] "" "ldrb\t%0, %1")
(define_expand "extendhisi2" [(parallel [(set (match_operand:SI 0 "s_register_operand" "") (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" ""))) (clobber (match_scratch:SI 2 ""))])] "" " if (GET_CODE (operands[1]) != MEM) { rtx temp = gen_reg_rtx (SImode);
operands[1] = force_reg (HImode, operands[1]);
operands[1] = gen_lowpart (SImode, operands[1]);
emit_insn (gen_ashlsi3 (temp, operands[1], GEN_INT (16)));
emit_insn (gen_ashrsi3 (operands[0], temp, GEN_INT (16)));
DONE;
}
")
(define_insn "extendhisi2_insn" [(set (match_operand:SI 0 "s_register_operand" "=l") (sign_extend:SI (match_operand:HI 1 "memory_operand" "m"))) (clobber (match_scratch:SI 2 "=&l"))] "" " { rtx ops[4]; /* This code used to try to use 'V', and fix the address only if it was offsettable, but this fails for e.g. REG+48 because 48 is outside the range of QImode offsets, and offsettable_address_p does a QImode address check. */
if (GET_CODE (XEXP (operands[1], 0)) == PLUS) { ops[1] = XEXP (XEXP (operands[1], 0), 0); ops[2] = XEXP (XEXP (operands[1], 0), 1); } else { ops[1] = XEXP (operands[1], 0); ops[2] = const0_rtx; } if (GET_CODE (ops[2]) == REG) return "ldrsh\t%0, %1";
ops[0] = operands[0]; ops[3] = operands[2]; output_asm_insn ("mov\t%3, %2;ldrsh\t%0, [%1, %3]", ops); return ""; }" [(set_attr "length" "4")])
(define_expand "extendqisi2" [(set (match_operand:SI 0 "s_register_operand" "") (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))] "" " if (GET_CODE (operands[1]) != MEM) { rtx temp = gen_reg_rtx (SImode);
operands[1] = force_reg (QImode, operands[1]);
operands[1] = gen_lowpart (SImode, operands[1]);
emit_insn (gen_ashlsi3 (temp, operands[1], GEN_INT (24)));
emit_insn (gen_ashrsi3 (operands[0], temp, GEN_INT (24)));
DONE;
}
")
(define_insn "extendqisi2_insn" [(set (match_operand:SI 0 "s_register_operand" "=l,l") (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))] "" " { rtx ops[3];
if (which_alternative == 0) return "ldrsb\t%0, %1"; ops[0] = operands[0]; if (GET_CODE (XEXP (operands[1], 0)) == PLUS) { ops[1] = XEXP (XEXP (operands[1], 0), 0); ops[2] = XEXP (XEXP (operands[1], 0), 1);
if (GET_CODE (ops[1]) == REG && GET_CODE (ops[2]) == REG)
output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
else if (GET_CODE (ops[1]) == REG)
{
if (REGNO (ops[1]) == REGNO (operands[0]))
output_asm_insn (\"ldrb\\t%0, [%1, %2]\;lsl\\t%0, %0, #24\;asr\\t%0, %0, #24\", ops);
else
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
}
else
{
if (REGNO (ops[2]) == REGNO (operands[0]))
output_asm_insn (\"ldrb\\t%0, [%2, %1]\;lsl\\t%0, %0, #24\;asr\\t%0, %0, #24\", ops);
else
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
}
}
else if (REGNO (operands[0]) == REGNO (XEXP (operands[1], 0))) { output_asm_insn ("ldrb\t%0, [%0, #0];lsl\t%0, %0, #24;asr\t%0, %0, #24", ops); } else { ops[1] = XEXP (operands[1], 0); ops[2] = const0_rtx; output_asm_insn ("mov\t%0, %2;ldrsb\t%0, [%1, %0]", ops); } return ""; }" [(set_attr "length" "2,6")])
;; We don't really have extzv, but defining this using shifts helps ;; to reduce register pressure later on.
(define_expand "extzv" [(set (match_dup 4) (ashift:SI (match_operand:SI 1 "register_operand" "") (match_operand:SI 2 "const_int_operand" ""))) (set (match_operand:SI 0 "register_operand" "") (lshiftrt:SI (match_dup 4) (match_operand:SI 3 "const_int_operand" "")))] "" " { HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]); HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]); operands[3] = GEN_INT (rshift); if (lshift == 0) { emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3])); DONE; } operands[2] = GEN_INT (lshift); operands[4] = gen_reg_rtx (SImode); } ")
;; Block-move insns
(define_expand "movstrqi" [(match_operand:BLK 0 "general_operand" "") (match_operand:BLK 1 "general_operand" "") (match_operand:SI 2 "" "") (match_operand:SI 3 "const_int_operand" "")] "" " if (INTVAL (operands[3]) != 4 || GET_CODE (operands[2]) != CONST_INT || INTVAL (operands[2]) > 48) FAIL;
thumb_expand_movstrqi (operands); DONE; ")
(define_insn "movmem12b" [(set (mem:SI (match_operand:SI 0 "register_operand" "+&l")) (mem:SI (match_operand:SI 1 "register_operand" "+&l"))) (set (mem:SI (plus:SI (match_dup 0) (const_int 4))) (mem:SI (plus:SI (match_dup 1) (const_int 4)))) (set (mem:SI (plus:SI (match_dup 0) (const_int 8))) (mem:SI (plus:SI (match_dup 1) (const_int 8)))) (set (match_dup 0) (plus:SI (match_dup 0) (const_int 12))) (set (match_dup 1) (plus:SI (match_dup 1) (const_int 12))) (clobber (match_scratch:SI 2 "=&l")) (clobber (match_scratch:SI 3 "=&l")) (clobber (match_scratch:SI 4 "=&l"))] "" "* return output_move_mem_multiple (3, operands);" [(set_attr "length" "4")])
(define_insn "movmem8b" [(set (mem:SI (match_operand:SI 0 "register_operand" "+&l")) (mem:SI (match_operand:SI 1 "register_operand" "+&l"))) (set (mem:SI (plus:SI (match_dup 0) (const_int 4))) (mem:SI (plus:SI (match_dup 1) (const_int 4)))) (set (match_dup 0) (plus:SI (match_dup 0) (const_int 8))) (set (match_dup 1) (plus:SI (match_dup 1) (const_int 8))) (clobber (match_scratch:SI 2 "=&l")) (clobber (match_scratch:SI 3 "=&l"))] "" "* return output_move_mem_multiple (2, operands);" [(set_attr "length" "4")])
;; Arithmetic insns
(define_insn "adddi3" [(set (match_operand:DI 0 "s_register_operand" "=l") (plus:DI (match_operand:DI 1 "s_register_operand" "%0") (match_operand:DI 2 "s_register_operand" "l")))] "" "add\t%Q0, %Q0, %Q2;adc\t%R0, %R0, %R2" [(set_attr "conds" "changed") (set_attr "length" "8")])
;; register group 'k' is a single register group containing only the stack ;; register. Trying to reload it will always fail catastrophically, ;; so never allow those alternatives to match if reloading is needed. (define_insn "addsi3" [(set (match_operand:SI 0 "s_register_operand" "=l,l,l,*r,*h,l,!k") (plus:SI (match_operand:SI 1 "s_register_operand" "%0,0,l,*0,*0,!k,!k") (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*h,r,!M,!O")))] "" " static char *asms[] = { "add\t%0, %0, %2", "sub\t%0, %0, #%n2", "add\t%0, %1, %2", "add\t%0, %0, %2", "add\t%0, %0, %2", "add\t%0, %1, %2", "add\t%0, %1, %2" }; if (which_alternative == 2 && GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) < 0) return "sub\t%0, %1, #%n2"; return asms[which_alternative]; ")
; reloading and elimination of the frame pointer can sometimes cause this ; optimization to be missed. (define_peephole [(set (match_operand:SI 0 "register_operand" "=l") (match_operand:SI 1 "const_int_operand" "M")) (set (match_dup 0) (plus:SI (match_dup 0) (match_operand:SI 2 "register_operand" "k")))] "REGNO (operands[2]) == STACK_POINTER_REGNUM && (HOST_WIDE_UINT) (INTVAL (operands[1])) < 1024 && (INTVAL (operands[1]) & 3) == 0" "add\t%0, %2, %1")
(define_insn "subdi3" [(set (match_operand:DI 0 "s_register_operand" "=l") (minus:DI (match_operand:DI 1 "s_register_operand" "0") (match_operand:DI 2 "s_register_operand" "l")))] "" "sub\t%Q0, %Q0, %Q2;sbc\t%R0, %R0, %R2" [(set_attr "conds" "changed") (set_attr "length" "8")])
(define_insn "subsi3" [(set (match_operand:SI 0 "s_register_operand" "=l") (minus:SI (match_operand:SI 1 "s_register_operand" "l") (match_operand:SI 2 "s_register_operand" "l")))] "" "sub\t%0, %1, %2")
;; We must ensure that one input matches the output, and that the other input ;; does not match the output. Using 0 satisfies the first, and using & ;; satisfies the second. Unfortunately, this fails when operands 1 and 2 ;; are the same, because reload will make operand 0 match operand 1 without ;; realizing that this conflicts with operand 2. We fix this by adding another ;; alternative to match this case, and then `reload' it ourselves. This ;; alternative must come first. (define_insn "mulsi3" [(set (match_operand:SI 0 "s_register_operand" "=&l,&l,&l") (mult:SI (match_operand:SI 1 "s_register_operand" "%l,h,0") (match_operand:SI 2 "s_register_operand" "l,l,l")))] "" " { if (which_alternative < 2) return "mov\t%0, %1;mul\t%0, %0, %2"; else return "mul\t%0, %0, %2"; }" [(set_attr "length" "4,4,2")])
(define_insn "negsi2" [(set (match_operand:SI 0 "s_register_operand" "=l") (neg:SI (match_operand:SI 1 "s_register_operand" "l")))] "" "neg\t%0, %1")
;; Logical insns
(define_expand "andsi3" [(set (match_operand:SI 0 "s_register_operand" "") (and:SI (match_operand:SI 1 "s_register_operand" "") (match_operand:SI 2 "nonmemory_operand" "")))] "" " if (GET_CODE (operands[2]) != CONST_INT) operands[2] = force_reg (SImode, operands[2]); else { int i; if (((HOST_WIDE_UINT) ~ INTVAL (operands[2])) < 256) { operands[2] = force_reg (SImode, GEN_INT (~INTVAL (operands[2]))); emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1])); DONE; }
for (i = 9; i <= 31; i++)
if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
{
emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
const0_rtx));
DONE;
}
else if ((((HOST_WIDE_INT) 1) << i) - 1 == ~ INTVAL (operands[2]))
{
rtx shift = GEN_INT (i);
rtx reg = gen_reg_rtx (SImode);
emit_insn (gen_lshrsi3 (reg, operands[1], shift));
emit_insn (gen_ashlsi3 (operands[0], reg, shift));
DONE;
}
operands[2] = force_reg (SImode, operands[2]);
}
")
(define_insn "*andsi3_insn" [(set (match_operand:SI 0 "s_register_operand" "=l") (and:SI (match_operand:SI 1 "s_register_operand" "%0") (match_operand:SI 2 "s_register_operand" "l")))] "" "and\t%0, %0, %2")
(define_insn "bicsi3" [(set (match_operand:SI 0 "s_register_operand" "=l") (and:SI (not:SI (match_operand:SI 1 "s_register_operand" "l")) (match_operand:SI 2 "s_register_operand" "0")))] "" "bic\t%0, %0, %1")
(define_insn "iorsi3" [(set (match_operand:SI 0 "s_register_operand" "=l") (ior:SI (match_operand:SI 1 "s_register_operand" "%0") (match_operand:SI 2 "s_register_operand" "l")))] "" "orr\t%0, %0, %2")
(define_insn "xorsi3" [(set (match_operand:SI 0 "s_register_operand" "=l") (xor:SI (match_operand:SI 1 "s_register_operand" "%0") (match_operand:SI 2 "s_register_operand" "l")))] "" "eor\t%0, %0, %2")
(define_insn "one_cmplsi2" [(set (match_operand:SI 0 "s_register_operand" "=l") (not:SI (match_operand:SI 1 "s_register_operand" "l")))] "" "mvn\t%0, %1")
;; Shift and rotation insns
(define_insn "ashlsi3" [(set (match_operand:SI 0 "s_register_operand" "=l,l") (ashift:SI (match_operand:SI 1 "s_register_operand" "l,0") (match_operand:SI 2 "nonmemory_operand" "N,l")))] "" "@ lsl\t%0, %1, %2 lsl\t%0, %0, %2")
(define_insn "ashrsi3" [(set (match_operand:SI 0 "s_register_operand" "=l,l") (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "l,0") (match_operand:SI 2 "nonmemory_operand" "N,l")))] "" "@ asr\t%0, %1, %2 asr\t%0, %0, %2")
(define_insn "lshrsi3" [(set (match_operand:SI 0 "s_register_operand" "=l,l") (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "l,0") (match_operand:SI 2 "nonmemory_operand" "N,l")))] "" "@ lsr\t%0, %1, %2 lsr\t%0, %0, %2")
(define_insn "rotrsi3" [(set (match_operand:SI 0 "s_register_operand" "=l") (rotatert:SI (match_operand:SI 1 "s_register_operand" "0") (match_operand:SI 2 "s_register_operand" "l")))] "" "ror\t%0, %0, %2")
;; Comparison insns
(define_expand "cmpsi" [(set (cc0) (compare (match_operand:SI 0 "s_register_operand" "") (match_operand:SI 1 "nonmemory_operand" "")))] "" " if (GET_CODE (operands[1]) != REG && GET_CODE (operands[1]) != SUBREG) { if (GET_CODE (operands[1]) != CONST_INT || (HOST_WIDE_UINT) (INTVAL (operands[1])) >= 256) { if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < -255 || INTVAL (operands[1]) > 0) operands[1] = force_reg (SImode, operands[1]); else { operands[1] = force_reg (SImode, GEN_INT (- INTVAL (operands[1]))); emit_insn (gen_cmnsi (operands[0], operands[1])); DONE; } } } ")
(define_insn "*cmpsi_insn" [(set (cc0) (compare (match_operand:SI 0 "s_register_operand" "l,*r,*h") (match_operand:SI 1 "thumb_cmp_operand" "lI,*h,*r")))] "" "@ cmp\t%0, %1 cmp\t%0, %1 cmp\t%0, %1")
(define_insn "tstsi" [(set (cc0) (match_operand:SI 0 "s_register_operand" "l"))] "" "cmp\t%0, #0")
(define_insn "cmnsi" [(set (cc0) (compare (match_operand:SI 0 "s_register_operand" "l") (neg:SI (match_operand:SI 1 "s_register_operand" "l"))))] "" "cmn\t%0, %1")
;; Jump insns
(define_insn "jump" [(set (pc) (label_ref (match_operand 0 "" "")))] "" "* if (get_attr_length (insn) == 2) return "b\t%l0"; return "bl\t%l0\t%@ far jump"; "[(set (attr "far_jump") (if_then_else (eq_attr "length" "4") (const_string "yes") (const_string "no"))) (set (attr "length") (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -2048)) (le (minus (match_dup 0) (pc)) (const_int 2044))) (const_int 2) (const_int 4)))])
(define_expand "beq" [(set (pc) (if_then_else (eq (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "bne" [(set (pc) (if_then_else (ne (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "bge" [(set (pc) (if_then_else (ge (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "ble" [(set (pc) (if_then_else (le (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "bgt" [(set (pc) (if_then_else (gt (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "blt" [(set (pc) (if_then_else (lt (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "bgeu" [(set (pc) (if_then_else (geu (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "bleu" [(set (pc) (if_then_else (leu (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "bgtu" [(set (pc) (if_then_else (gtu (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_expand "bltu" [(set (pc) (if_then_else (ltu (cc0) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "")
(define_insn "cond_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(cc0) (const_int 0)]) (label_ref (match_operand 0 "" "")) (pc)))] "" " switch (get_attr_length (insn)) { case 2: return "b%d1\t%l0\t%@cond_branch"; case 4: return "b%D1\t.LCB%=;b\t%l0\t%@long jump\n.LCB%=:"; default: return "b%D1\t.LCB%=;bl\t%l0\t%@far jump\n.LCB%=:"; } "[(set (attr "far_jump") (if_then_else (eq_attr "length" "6") (const_string "yes") (const_string "no"))) (set (attr "length") (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -252)) (le (minus (match_dup 0) (pc)) (const_int 254))) (const_int 2) (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -2044)) (le (minus (match_dup 0) (pc)) (const_int 2044))) (const_int 4) (const_int 6))))])
(define_insn "cond_branch_reversed" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(cc0) (const_int 0)]) (pc) (label_ref (match_operand 0 "" ""))))] "" " switch (get_attr_length (insn)) { case 2: return "b%D1\t%l0\t%@cond_branch_reversed"; case 4: return "b%d1\t.LCBR%=;b\t%l0\t%@long jump\n.LCBR%=:"; default: return "b%d1\t.LCBR%=;bl\t%l0\t%@far jump\n.LCBR%=:"; } return ""; "[(set (attr "far_jump") (if_then_else (eq_attr "length" "6") (const_string "yes") (const_string "no"))) (set (attr "length") (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -252)) (le (minus (match_dup 0) (pc)) (const_int 254))) (const_int 2) (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -2044)) (le (minus (match_dup 0) (pc)) (const_int 2044))) (const_int 4) (const_int 6))))])
(define_insn "indirect_jump" [(set (pc) (match_operand:SI 0 "s_register_operand" "l*r"))] "" "mov\tpc, %0")
(define_insn "tablejump" [(set (pc) (match_operand:SI 0 "s_register_operand" "l*r")) (use (label_ref (match_operand 1 "" "")))] "" "mov\tpc, %0")
;; Call insns
(define_expand "call" [(call (match_operand:SI 0 "memory_operand" "") (match_operand 1 "" ""))] "" " { if (TARGET_LONG_CALLS && GET_CODE (XEXP (operands[0], 0)) != REG) XEXP (operands[0], 0) = force_reg (Pmode, XEXP (operands[0], 0)); }")
(define_insn "call_indirect" [(call (mem:SI (match_operand:SI 0 "s_register_operand" "lr")) (match_operand 1 "" ""))] "" "bl\t_call_via_%0" [(set_attr "length" "4")])
(define_expand "call_value" [(set (match_operand 0 "" "") (call (match_operand 1 "memory_operand" "") (match_operand 2 "" "")))] "" " { if (TARGET_LONG_CALLS && GET_CODE (XEXP (operands[1], 0)) != REG) XEXP (operands[1], 0) = force_reg (Pmode, XEXP (operands[1], 0)); }")
(define_insn "call_value_indirect" [(set (match_operand 0 "" "=l") (call (mem:SI (match_operand:SI 1 "s_register_operand" "lr")) (match_operand 2 "" "")))] "" "bl\t_call_via_%1" [(set_attr "length" "4")]) ;; See comment for call_indirect pattern
(define_insn "*call_insn" [(call (mem:SI (match_operand:SI 0 "" "i")) (match_operand:SI 1 "" ""))] "! TARGET_LONG_CALLS && GET_CODE (operands[0]) == SYMBOL_REF" "bl\t%a0" [(set_attr "length" "4")])
(define_insn "*call_value_insn" [(set (match_operand 0 "s_register_operand" "=l") (call (mem:SI (match_operand 1 "" "i")) (match_operand 2 "" "")))] "! TARGET_LONG_CALLS && GET_CODE (operands[1]) == SYMBOL_REF" "bl\t%a1" [(set_attr "length" "4")])
;; Untyped call not required, since all funcs return in r0
;; Miscellaneous patterns
(define_insn "nop" [(clobber (const_int 0))] "" "mov\tr8, r8")
(define_insn "blockage" [(unspec_volatile [(const_int 0)] 0)] "" "" [(set_attr "length" "0")])
(define_expand "prologue" [(const_int 0)] "" " thumb_expand_prologue (); DONE; ")
(define_expand "epilogue" [(unspec_volatile [(const_int 0)] 1)] "" " thumb_expand_epilogue (); ")
(define_insn "epilogue_insns" [(unspec_volatile [(const_int 0)] 1)] "" " return thumb_unexpanded_epilogue (); " [(set_attr "length" "42")])
;; Special patterns for dealing with the constant pool
(define_insn "consttable_4" [(unspec_volatile [(match_operand 0 "" "")] 2)] "" "* { switch (GET_MODE_CLASS (GET_MODE (operands[0]))) { case MODE_FLOAT: { int i; union real_extract u; for (i = 0; i < sizeof (REAL_VALUE_TYPE) / sizeof (HOST_WIDE_INT); i++) u.i[i] = XWINT(operands[0], 2 + i); assemble_real (u.d, GET_MODE (operands[0])); break; } default: assemble_integer (operands[0], 4, 1); break; } return ""; }" [(set_attr "length" "4")])
(define_insn "consttable_8" [(unspec_volatile [(match_operand 0 "" "")] 3)] "" "* { switch (GET_MODE_CLASS (GET_MODE (operands[0]))) { case MODE_FLOAT: { int i; union real_extract u; for (i = 0; i < sizeof (REAL_VALUE_TYPE) / sizeof (HOST_WIDE_INT); i++) u.i[i] = XWINT(operands[0], 2 + i); assemble_real (u.d, GET_MODE (operands[0])); break; } default: assemble_integer (operands[0], 8, 1); break; } return ""; }" [(set_attr "length" "8")])
(define_insn "consttable_end" [(unspec_volatile [(const_int 0)] 4)] "" "* /* Nothing to do (currently). */ return ""; ")
(define_insn "align_4" [(unspec_volatile [(const_int 0)] 5)] "" "* assemble_align (32); return ""; ")