
(define_expand "cbranch<mode>4"
[(set(reg:CC FLAGS_REG)(compare:CC(match_operand:SDWIM 1 "nonimmediate_operand" "")(match_operand:SDWIM 2 "<general_operand>" "")))(set(pc)(if_then_else(match_operator 0 "ordered_comparison_operator" [(reg:CC FLAGS_REG)(const_int 0)])(label_ref(match_operand 3 "" ""))(pc)))]
	""
{
  if (MEM_P (operands[1]) && MEM_P (operands[2]))
    operands[1] = force_reg (<MODE>mode, operands[1]);
  ix86_expand_branch (GET_CODE (operands[0]),
		      operands[1], operands[2], operands[3]);
  DONE;
}
)




(define_expand "cstore<mode>4"
[(set(reg:CC FLAGS_REG)(compare:CC(match_operand:SWIM 2 "nonimmediate_operand" "")(match_operand:SWIM 3 "<general_operand>" "")))(set(match_operand:QI 0 "register_operand" "")(match_operator 1 "ordered_comparison_operator" [(reg:CC FLAGS_REG)(const_int 0)]))]
	""
{
  if (MEM_P (operands[2]) && MEM_P (operands[3]))
    operands[2] = force_reg (<MODE>mode, operands[2]);
  ix86_expand_setcc (operands[0], GET_CODE (operands[1]),
		     operands[2], operands[3]);
  DONE;
}
)




(define_expand "cbranchxf4"
[(set(reg:CC FLAGS_REG)(compare:CC(match_operand:XF 1 "nonmemory_operand" "")(match_operand:XF 2 "nonmemory_operand" "")))(set(pc)(if_then_else(match_operator 0 "ix86_fp_comparison_operator" [(reg:CC FLAGS_REG)(const_int 0)])(label_ref(match_operand 3 "" ""))(pc)))]
"TARGET_80387"
{
  ix86_expand_branch (GET_CODE (operands[0]),
		      operands[1], operands[2], operands[3]);
  DONE;
}
)




(define_expand "cstorexf4"
[(set(reg:CC FLAGS_REG)(compare:CC(match_operand:XF 2 "nonmemory_operand" "")(match_operand:XF 3 "nonmemory_operand" "")))(set(match_operand:QI 0 "register_operand" "")(match_operator 1 "ix86_fp_comparison_operator" [(reg:CC FLAGS_REG)(const_int 0)]))]
"TARGET_80387"
{
  ix86_expand_setcc (operands[0], GET_CODE (operands[1]),
		     operands[2], operands[3]);
  DONE;
}
)




(define_expand "cbranchcc4"
[(set(pc)(if_then_else(match_operator 0 "comparison_operator" [(match_operand 1 "flags_reg_operand" "")(match_operand 2 "const0_operand" "")])(label_ref(match_operand 3 "" ""))(pc)))]
""
{
  ix86_expand_branch (GET_CODE (operands[0]),
		      operands[1], operands[2], operands[3]);
  DONE;
}
)




(define_expand "cstorecc4"
[(set(match_operand:QI 0 "register_operand" "")(match_operator 1 "comparison_operator" [(match_operand 2 "flags_reg_operand" "")(match_operand 3 "const0_operand" "")]))]
""
{
  ix86_expand_setcc (operands[0], GET_CODE (operands[1]),
		     operands[2], operands[3]);
  DONE;
}
)




(define_insn "*push<mode>2"
[(set(match_operand:DWI 0 "push_operand" "=<")(match_operand:DWI 1 "general_no_elim_operand" "riF*m"))]
""
  "#"
)




(define_split 
[(set(match_operand:TI 0 "push_operand" "")(match_operand:TI 1 "general_operand" ""))]
"TARGET_64BIT && reload_completed
   && !SSE_REG_P (operands[1])"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*pushdi2_rex64"
[(set(match_operand:DI 0 "push_operand" "=<,!<")(match_operand:DI 1 "general_no_elim_operand" "re*m,n"))]
"TARGET_64BIT"
  "@
   push{q}\t%1
   #"
  [(set_attr "type" "push,multi")
   (set_attr "mode" "DI")]

)




(define_peephole2 
[(match_scratch:DI 2 "r")(set(match_operand:DI 0 "push_operand" "")(match_operand:DI 1 "immediate_operand" ""))]
  "TARGET_64BIT && !symbolic_operand (operands[1], DImode)
   && !x86_64_immediate_operand (operands[1], DImode)"
[(set(match_dup 2)(match_dup 1))(set(match_dup 0)(match_dup 2))])
(define_peephole2 
[(set(match_operand:DI 0 "push_operand" "")(match_operand:DI 1 "immediate_operand" ""))]
  "TARGET_64BIT && !symbolic_operand (operands[1], DImode)
   && !x86_64_immediate_operand (operands[1], DImode) && 1"
[(set(match_dup 0)(match_dup 1))(set(match_dup 2)(match_dup 3))]
  {
  split_double_mode (DImode, &operands[1], 1, &operands[2], &operands[3]);

  operands[1] = gen_lowpart (DImode, operands[2]);
  operands[2] = gen_rtx_MEM (SImode, gen_rtx_PLUS (DImode, stack_pointer_rtx,
						   GEN_INT (4)));
}
)
(define_split 
[(set(match_operand:DI 0 "push_operand" "")(match_operand:DI 1 "immediate_operand" ""))]
"TARGET_64BIT && ((optimize > 0 && flag_peephole2)
		    ? epilogue_completed : reload_completed)
   && !symbolic_operand (operands[1], DImode)
   && !x86_64_immediate_operand (operands[1], DImode)"
[(set(match_dup 0)(match_dup 1))(set(match_dup 2)(match_dup 3))]
{
  split_double_mode (DImode, &operands[1], 1, &operands[2], &operands[3]);

  operands[1] = gen_lowpart (DImode, operands[2]);
  operands[2] = gen_rtx_MEM (SImode, gen_rtx_PLUS (DImode, stack_pointer_rtx,
						   GEN_INT (4)));
}
)
(define_split 
[(set(match_operand:DI 0 "push_operand" "")(match_operand:DI 1 "general_operand" ""))]
  "!TARGET_64BIT && reload_completed
   && !(MMX_REG_P (operands[1]) || SSE_REG_P (operands[1]))"
[(const_int 0)]
"ix86_split_long_move (operands); DONE;"
)
(define_insn "*pushsi2"
[(set(match_operand:SI 0 "push_operand" "=<")(match_operand:SI 1 "general_no_elim_operand" "ri*m"))]
"!TARGET_64BIT"
  "push{l}\t%1"
  [(set_attr "type" "push")
   (set_attr "mode" "SI")]
)




(define_insn "*push<mode>2_rex64"
[(set(match_operand:SWI124 0 "push_operand" "=X")(match_operand:SWI124 1 "nonmemory_no_elim_operand" "r<i>"))]
"TARGET_64BIT"
  "push{q}\t%q1"
  [(set_attr "type" "push")
   (set_attr "mode" "DI")]
)




(define_insn "*push<mode>2"
[(set(match_operand:SWI12 0 "push_operand" "=X")(match_operand:SWI12 1 "nonmemory_no_elim_operand" "rn"))]
"!TARGET_64BIT"
  "push{l}\t%k1"
  [(set_attr "type" "push")
   (set_attr "mode" "SI")]
)




(define_insn "*push<mode>2_prologue"
[(set(match_operand:P 0 "push_operand" "=<")(match_operand:P 1 "general_no_elim_operand" "r<i>*m"))(clobber(mem:BLK(scratch)))]
""
  "push{<imodesuffix>}\t%1"
  [(set_attr "type" "push")
   (set_attr "mode" "<MODE>")]
  
)




(define_insn "*pop<mode>1"
[(set(match_operand:P 0 "nonimmediate_operand" "=r*m")(match_operand:P 1 "pop_operand" ">"))]
  ""
  "pop{<imodesuffix>}\t%0"
  [(set_attr "type" "pop")
   (set_attr "mode" "<MODE>")]

)




(define_insn "*pop<mode>1_epilogue"
[(set(match_operand:P 0 "nonimmediate_operand" "=r*m")(match_operand:P 1 "pop_operand" ">"))(clobber(mem:BLK(scratch)))]
  ""
  "pop{<imodesuffix>}\t%0"
  [(set_attr "type" "pop")
   (set_attr "mode" "<MODE>")]

)




(define_expand "movoi"
[(set(match_operand:OI 0 "nonimmediate_operand" "")(match_operand:OI 1 "general_operand" ""))]
"TARGET_AVX"
  "ix86_expand_move (OImode, operands); DONE;"
)




(define_expand "movti"
[(set(match_operand:TI 0 "nonimmediate_operand" "")(match_operand:TI 1 "nonimmediate_operand" ""))]
	"TARGET_64BIT || TARGET_SSE"
{
  if (TARGET_64BIT)
    ix86_expand_move (TImode, operands);
  else if (push_operand (operands[0], TImode))
    ix86_expand_push (TImode, operands[1]);
  else
    ix86_expand_vector_move (TImode, operands);
  DONE;
}
)




(define_expand "movcdi"
[(set(match_operand:CDI 0 "nonimmediate_operand" "")(match_operand:CDI 1 "general_operand" ""))]
""
{
  if (push_operand (operands[0], CDImode))
    emit_move_complex_push (CDImode, operands[0], operands[1]);
  else
    emit_move_complex_parts (operands[0], operands[1]);
  DONE;
}
)




(define_expand "mov<mode>"
[(set(match_operand:SWI1248x 0 "nonimmediate_operand" "")(match_operand:SWI1248x 1 "general_operand" ""))]
""
  "ix86_expand_move (<MODE>mode, operands); DONE;"

)




(define_insn "*mov<mode>_xor"
[(set(match_operand:SWI48 0 "register_operand" "=r")(match_operand:SWI48 1 "const0_operand" ""))(clobber(reg:CC FLAGS_REG))]
"reload_completed"
  "xor{l}\t%k0, %k0"
  [(set_attr "type" "alu1")
   (set_attr "mode" "SI")
   (set_attr "length_immediate" "0")]
)




(define_insn "*mov<mode>_or"
[(set(match_operand:SWI48 0 "register_operand" "=r")(match_operand:SWI48 1 "const_int_operand" ""))(clobber(reg:CC FLAGS_REG))]
"reload_completed
   && operands[1] == constm1_rtx"
  "or{<imodesuffix>}\t{%1, %0|%0, %1}"
  [(set_attr "type" "alu1")
   (set_attr "mode" "<MODE>")
   (set_attr "length_immediate" "1")]
)




(define_insn "*movoi_internal_avx"
[(set(match_operand:OI 0 "nonimmediate_operand" "=x,x,m")(match_operand:OI 1 "vector_move_operand" "C,xm,x"))]
"TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
  switch (which_alternative)
    {
    case 0:
      return "vxorps\t%0, %0, %0";
    case 1:
    case 2:
      if (misaligned_operand (operands[0], OImode)
	  || misaligned_operand (operands[1], OImode))
	return "vmovdqu\t{%1, %0|%0, %1}";
      else
	return "vmovdqa\t{%1, %0|%0, %1}";
    default:
      gcc_unreachable ();
    }
}
  [(set_attr "type" "sselog1,ssemov,ssemov")
   (set_attr "prefix" "vex")
   (set_attr "mode" "OI")]
)




(define_insn "*movti_internal_rex64"
[(set(match_operand:TI 0 "nonimmediate_operand" "=!r,o,x,x,xm")(match_operand:TI 1 "general_operand" "riFo,riF,C,xm,x"))]
  "TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
  switch (which_alternative)
    {
    case 0:
    case 1:
      return "#";
    case 2:
      if (get_attr_mode (insn) == MODE_V4SF)
	return "%vxorps\t%0, %d0";
      else
	return "%vpxor\t%0, %d0";
    case 3:
    case 4:
      /* TDmode values are passed as TImode on the stack.  Moving them
	 to stack may result in unaligned memory access.  */
      if (misaligned_operand (operands[0], TImode)
	  || misaligned_operand (operands[1], TImode))
	{
	  if (get_attr_mode (insn) == MODE_V4SF)
	    return "%vmovups\t{%1, %0|%0, %1}";
	 else
	   return "%vmovdqu\t{%1, %0|%0, %1}";
	}
      else
	{
	  if (get_attr_mode (insn) == MODE_V4SF)
	    return "%vmovaps\t{%1, %0|%0, %1}";
	 else
	   return "%vmovdqa\t{%1, %0|%0, %1}";
	}
    default:
      gcc_unreachable ();
    }
}
  [(set_attr "type" "*,*,sselog1,ssemov,ssemov")
   (set_attr "prefix" "*,*,maybe_vex,maybe_vex,maybe_vex")
   (set (attr "mode")
   	(cond [(eq_attr "alternative" "2,3")
		 (if_then_else
		   (ne (symbol_ref "optimize_function_for_size_p (cfun)")
		       (const_int 0))
		   (const_string "V4SF")
		   (const_string "TI"))
	       (eq_attr "alternative" "4")
		 (if_then_else
		   (ior (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
			    (const_int 0))
			(ne (symbol_ref "optimize_function_for_size_p (cfun)")
			    (const_int 0)))
		   (const_string "V4SF")
		   (const_string "TI"))]
	       (const_string "DI")))]
)




(define_split 
[(set(match_operand:TI 0 "nonimmediate_operand" "")(match_operand:TI 1 "general_operand" ""))]
  "reload_completed
   && !SSE_REG_P (operands[0]) && !SSE_REG_P (operands[1])"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*movti_internal_sse"
[(set(match_operand:TI 0 "nonimmediate_operand" "=x,x,m")(match_operand:TI 1 "vector_move_operand" "C,xm,x"))]
"TARGET_SSE && !TARGET_64BIT
   && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
  switch (which_alternative)
    {
    case 0:
      if (get_attr_mode (insn) == MODE_V4SF)
	return "%vxorps\t%0, %d0";
      else
	return "%vpxor\t%0, %d0";
    case 1:
    case 2:
      /* TDmode values are passed as TImode on the stack.  Moving them
	 to stack may result in unaligned memory access.  */
      if (misaligned_operand (operands[0], TImode)
	  || misaligned_operand (operands[1], TImode))
	{
	  if (get_attr_mode (insn) == MODE_V4SF)
	    return "%vmovups\t{%1, %0|%0, %1}";
	 else
	   return "%vmovdqu\t{%1, %0|%0, %1}";
	}
      else
	{
	  if (get_attr_mode (insn) == MODE_V4SF)
	    return "%vmovaps\t{%1, %0|%0, %1}";
	 else
	   return "%vmovdqa\t{%1, %0|%0, %1}";
	}
    default:
      gcc_unreachable ();
    }
}
  [(set_attr "type" "sselog1,ssemov,ssemov")
   (set_attr "prefix" "maybe_vex")
   (set (attr "mode")
	(cond [(ior (eq (symbol_ref "TARGET_SSE2") (const_int 0))
		    (ne (symbol_ref "optimize_function_for_size_p (cfun)")
			(const_int 0)))
		 (const_string "V4SF")
	       (and (eq_attr "alternative" "2")
		    (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
			(const_int 0)))
		 (const_string "V4SF")]
	      (const_string "TI")))]

)




(define_insn "*movdi_internal_rex64"
[(set(match_operand:DI 0 "nonimmediate_operand" "=r,r  ,r,m ,!m,*y,*y,?r ,m ,?*Ym,?*y,*x,*x,?r ,m,?*Yi,*x,?*x,?*Ym")(match_operand:DI 1 "general_operand" "Z ,rem,i,re,n ,C ,*y,*Ym,*y,r   ,m  ,C ,*x,*Yi,*x,r  ,m ,*Ym,*x"))]
"TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
  switch (get_attr_type (insn))
    {
    case TYPE_SSECVT:
      if (SSE_REG_P (operands[0]))
	return "movq2dq\t{%1, %0|%0, %1}";
      else
	return "movdq2q\t{%1, %0|%0, %1}";

    case TYPE_SSEMOV:
      if (TARGET_AVX)
	{
	  if (get_attr_mode (insn) == MODE_TI)
	    return "vmovdqa\t{%1, %0|%0, %1}";
	  else
	    return "vmovq\t{%1, %0|%0, %1}";
	}

      if (get_attr_mode (insn) == MODE_TI)
	return "movdqa\t{%1, %0|%0, %1}";
      /* FALLTHRU */

    case TYPE_MMXMOV:
      /* Moves from and into integer register is done using movd
	 opcode with REX prefix.  */
      if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
	return "movd\t{%1, %0|%0, %1}";
      return "movq\t{%1, %0|%0, %1}";

    case TYPE_SSELOG1:
      return "%vpxor\t%0, %d0";

    case TYPE_MMX:
      return "pxor\t%0, %0";

    case TYPE_MULTI:
      return "#";

    case TYPE_LEA:
      return "lea{q}\t{%a1, %0|%0, %a1}";

    default:
      gcc_assert (!flag_pic || LEGITIMATE_PIC_OPERAND_P (operands[1]));
      if (get_attr_mode (insn) == MODE_SI)
	return "mov{l}\t{%k1, %k0|%k0, %k1}";
      else if (which_alternative == 2)
	return "movabs{q}\t{%1, %0|%0, %1}";
      else
	return "mov{q}\t{%1, %0|%0, %1}";
    }
}
  [(set (attr "type")
     (cond [(eq_attr "alternative" "5")
	      (const_string "mmx")
	    (eq_attr "alternative" "6,7,8,9,10")
	      (const_string "mmxmov")
	    (eq_attr "alternative" "11")
	      (const_string "sselog1")
	    (eq_attr "alternative" "12,13,14,15,16")
	      (const_string "ssemov")
	    (eq_attr "alternative" "17,18")
	      (const_string "ssecvt")
	    (eq_attr "alternative" "4")
	      (const_string "multi")
 	    (match_operand:DI 1 "pic_32bit_operand" "")
	      (const_string "lea")
	   ]
	   (const_string "imov")))
   (set (attr "modrm")
     (if_then_else
       (and (eq_attr "alternative" "2") (eq_attr "type" "imov"))
	 (const_string "0")
	 (const_string "*")))
   (set (attr "length_immediate")
     (if_then_else
       (and (eq_attr "alternative" "2") (eq_attr "type" "imov"))
	 (const_string "8")
	 (const_string "*")))
   (set_attr "prefix_rex" "*,*,*,*,*,*,*,1,*,1,*,*,*,*,*,*,*,*,*")
   (set_attr "prefix_data16" "*,*,*,*,*,*,*,*,*,*,*,*,*,*,*,1,*,*,*")
   (set (attr "prefix")
     (if_then_else (eq_attr "alternative" "11,12,13,14,15,16")
       (const_string "maybe_vex")
       (const_string "orig")))
   (set_attr "mode" "SI,DI,DI,DI,SI,DI,DI,DI,DI,DI,DI,TI,TI,DI,DI,DI,DI,DI,DI")]

)




(define_peephole2 
[(match_scratch:DI 2 "r")(set(match_operand:DI 0 "memory_operand" "")(match_operand:DI 1 "immediate_operand" ""))]
  "TARGET_64BIT && !symbolic_operand (operands[1], DImode)
   && !x86_64_immediate_operand (operands[1], DImode)"
[(set(match_dup 2)(match_dup 1))(set(match_dup 0)(match_dup 2))])
(define_peephole2 
[(set(match_operand:DI 0 "memory_operand" "")(match_operand:DI 1 "immediate_operand" ""))]
  "TARGET_64BIT && !symbolic_operand (operands[1], DImode)
   && !x86_64_immediate_operand (operands[1], DImode) && 1"
[(set(match_dup 2)(match_dup 3))(set(match_dup 4)(match_dup 5))]
  "split_double_mode (DImode, &operands[0], 2, &operands[2], &operands[4]);"
)
(define_split 
[(set(match_operand:DI 0 "memory_operand" "")(match_operand:DI 1 "immediate_operand" ""))]
  "TARGET_64BIT && ((optimize > 0 && flag_peephole2)
		    ? epilogue_completed : reload_completed)
   && !symbolic_operand (operands[1], DImode)
   && !x86_64_immediate_operand (operands[1], DImode)"
[(set(match_dup 2)(match_dup 3))(set(match_dup 4)(match_dup 5))]
  "split_double_mode (DImode, &operands[0], 2, &operands[2], &operands[4]);"
)
(define_insn "*movdi_internal"
[(set(match_operand:DI 0 "nonimmediate_operand" "=r  ,o  ,*y,m*y,*y,*Y2,m  ,*Y2,*Y2,*x,m ,*x,*x")(match_operand:DI 1 "general_operand" "riFo,riF,C ,*y ,m ,C  ,*Y2,*Y2,m  ,C ,*x,*x,m "))]
  "!TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
  "@
   #
   #
   pxor\t%0, %0
   movq\t{%1, %0|%0, %1}
   movq\t{%1, %0|%0, %1}
   %vpxor\t%0, %d0
   %vmovq\t{%1, %0|%0, %1}
   %vmovdqa\t{%1, %0|%0, %1}
   %vmovq\t{%1, %0|%0, %1}
   xorps\t%0, %0
   movlps\t{%1, %0|%0, %1}
   movaps\t{%1, %0|%0, %1}
   movlps\t{%1, %0|%0, %1}"
  [(set_attr "type" "*,*,mmx,mmxmov,mmxmov,sselog1,ssemov,ssemov,ssemov,sselog1,ssemov,ssemov,ssemov")
   (set (attr "prefix")
     (if_then_else (eq_attr "alternative" "5,6,7,8")
       (const_string "vex")
       (const_string "orig")))
   (set_attr "mode" "DI,DI,DI,DI,DI,TI,DI,TI,DI,V4SF,V2SF,V4SF,V2SF")]
)




(define_split 
[(set(match_operand:DI 0 "nonimmediate_operand" "")(match_operand:DI 1 "general_operand" ""))]
  "!TARGET_64BIT && reload_completed
   && !(MMX_REG_P (operands[0]) || SSE_REG_P (operands[0]))
   && !(MMX_REG_P (operands[1]) || SSE_REG_P (operands[1]))"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*movsi_internal"
[(set(match_operand:SI 0 "nonimmediate_operand" "=r,m ,*y,*y,?rm,?*y,*x,*x,?r ,m ,?*Yi,*x")(match_operand:SI 1 "general_operand" "g ,ri,C ,*y,*y ,rm ,C ,*x,*Yi,*x,r   ,m "))]
  "!(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
  switch (get_attr_type (insn))
    {
    case TYPE_SSELOG1:
      if (get_attr_mode (insn) == MODE_TI)
        return "%vpxor\t%0, %d0";
      return "%vxorps\t%0, %d0";

    case TYPE_SSEMOV:
      switch (get_attr_mode (insn))
	{
	case MODE_TI:
	  return "%vmovdqa\t{%1, %0|%0, %1}";
	case MODE_V4SF:
	  return "%vmovaps\t{%1, %0|%0, %1}";
	case MODE_SI:
          return "%vmovd\t{%1, %0|%0, %1}";
	case MODE_SF:
          return "%vmovss\t{%1, %0|%0, %1}";
	default:
	  gcc_unreachable ();
	}

    case TYPE_MMX:
      return "pxor\t%0, %0";

    case TYPE_MMXMOV:
      if (get_attr_mode (insn) == MODE_DI)
	return "movq\t{%1, %0|%0, %1}";
      return "movd\t{%1, %0|%0, %1}";

    case TYPE_LEA:
      return "lea{l}\t{%a1, %0|%0, %a1}";

    default:
      gcc_assert (!flag_pic || LEGITIMATE_PIC_OPERAND_P (operands[1]));
      return "mov{l}\t{%1, %0|%0, %1}";
    }
}
  [(set (attr "type")
     (cond [(eq_attr "alternative" "2")
	      (const_string "mmx")
	    (eq_attr "alternative" "3,4,5")
	      (const_string "mmxmov")
	    (eq_attr "alternative" "6")
	      (const_string "sselog1")
	    (eq_attr "alternative" "7,8,9,10,11")
	      (const_string "ssemov")
 	    (match_operand:DI 1 "pic_32bit_operand" "")
	      (const_string "lea")
	   ]
	   (const_string "imov")))
   (set (attr "prefix")
     (if_then_else (eq_attr "alternative" "0,1,2,3,4,5")
       (const_string "orig")
       (const_string "maybe_vex")))
   (set (attr "prefix_data16")
     (if_then_else (and (eq_attr "type" "ssemov") (eq_attr "mode" "SI"))
       (const_string "1")
       (const_string "*")))
   (set (attr "mode")
     (cond [(eq_attr "alternative" "2,3")
	      (const_string "DI")
	    (eq_attr "alternative" "6,7")
	      (if_then_else
	        (eq (symbol_ref "TARGET_SSE2") (const_int 0))
	        (const_string "V4SF")
	        (const_string "TI"))
	    (and (eq_attr "alternative" "8,9,10,11")
	         (eq (symbol_ref "TARGET_SSE2") (const_int 0)))
	      (const_string "SF")
	   ]
	   (const_string "SI")))]
)




(define_insn "*movhi_internal"
[(set(match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m")(match_operand:HI 1 "general_operand" "r,rn,rm,rn"))]
  "!(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
  switch (get_attr_type (insn))
    {
    case TYPE_IMOVX:
      /* movzwl is faster than movw on p2 due to partial word stalls,
	 though not as fast as an aligned movl.  */
      return "movz{wl|x}\t{%1, %k0|%k0, %1}";
    default:
      if (get_attr_mode (insn) == MODE_SI)
        return "mov{l}\t{%k1, %k0|%k0, %k1}";
      else
        return "mov{w}\t{%1, %0|%0, %1}";
    }
}
  [(set (attr "type")
     (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
		(const_int 0))
	      (const_string "imov")
	    (and (eq_attr "alternative" "0")
		 (ior (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
			  (const_int 0))
		      (eq (symbol_ref "TARGET_HIMODE_MATH")
			  (const_int 0))))
	      (const_string "imov")
	    (and (eq_attr "alternative" "1,2")
		 (match_operand:HI 1 "aligned_operand" ""))
	      (const_string "imov")
	    (and (ne (symbol_ref "TARGET_MOVX")
		     (const_int 0))
		 (eq_attr "alternative" "0,2"))
	      (const_string "imovx")
	   ]
	   (const_string "imov")))
    (set (attr "mode")
      (cond [(eq_attr "type" "imovx")
	       (const_string "SI")
	     (and (eq_attr "alternative" "1,2")
		  (match_operand:HI 1 "aligned_operand" ""))
	       (const_string "SI")
	     (and (eq_attr "alternative" "0")
		  (ior (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
			   (const_int 0))
		       (eq (symbol_ref "TARGET_HIMODE_MATH")
			   (const_int 0))))
	       (const_string "SI")
	    ]
	    (const_string "HI")))]
)




(define_insn "*movqi_internal"
[(set(match_operand:QI 0 "nonimmediate_operand" "=q,q ,q ,r,r ,?r,m")(match_operand:QI 1 "general_operand" " q,qn,qm,q,rn,qm,qn"))]
  "!(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
  switch (get_attr_type (insn))
    {
    case TYPE_IMOVX:
      gcc_assert (ANY_QI_REG_P (operands[1]) || MEM_P (operands[1]));
      return "movz{bl|x}\t{%1, %k0|%k0, %1}";
    default:
      if (get_attr_mode (insn) == MODE_SI)
        return "mov{l}\t{%k1, %k0|%k0, %k1}";
      else
        return "mov{b}\t{%1, %0|%0, %1}";
    }
}
  [(set (attr "type")
     (cond [(and (eq_attr "alternative" "5")
		 (not (match_operand:QI 1 "aligned_operand" "")))
	      (const_string "imovx")
	    (ne (symbol_ref "optimize_function_for_size_p (cfun)")
		(const_int 0))
	      (const_string "imov")
	    (and (eq_attr "alternative" "3")
		 (ior (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
			  (const_int 0))
		      (eq (symbol_ref "TARGET_QIMODE_MATH")
			  (const_int 0))))
	      (const_string "imov")
	    (eq_attr "alternative" "3,5")
	      (const_string "imovx")
	    (and (ne (symbol_ref "TARGET_MOVX")
		     (const_int 0))
		 (eq_attr "alternative" "2"))
	      (const_string "imovx")
	   ]
	   (const_string "imov")))
   (set (attr "mode")
      (cond [(eq_attr "alternative" "3,4,5")
	       (const_string "SI")
	     (eq_attr "alternative" "6")
	       (const_string "QI")
	     (eq_attr "type" "imovx")
	       (const_string "SI")
	     (and (eq_attr "type" "imov")
		  (and (eq_attr "alternative" "0,1")
		       (and (ne (symbol_ref "TARGET_PARTIAL_REG_DEPENDENCY")
				(const_int 0))
			    (and (eq (symbol_ref "optimize_function_for_size_p (cfun)")
				     (const_int 0))
				 (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
				     (const_int 0))))))
	       (const_string "SI")
	     ;; Avoid partial register stalls when not using QImode arithmetic
	     (and (eq_attr "type" "imov")
		  (and (eq_attr "alternative" "0,1")
		       (and (ne (symbol_ref "TARGET_PARTIAL_REG_STALL")
				(const_int 0))
			    (eq (symbol_ref "TARGET_QIMODE_MATH")
				(const_int 0)))))
	       (const_string "SI")
	   ]
	   (const_string "QI")))]
)




(define_insn "*movabs<mode>_1"
[(set(mem:SWI1248x(match_operand:DI 0 "x86_64_movabs_operand" "i,r"))(match_operand:SWI1248x 1 "nonmemory_operand" "a,er"))]
"TARGET_64BIT && ix86_check_movabs (insn, 0)"
  "@
   movabs{<imodesuffix>}\t{%1, %P0|%P0, %1}
   mov{<imodesuffix>}\t{%1, %a0|%a0, %1}"
  [(set_attr "type" "imov")
   (set_attr "modrm" "0,*")
   (set_attr "length_address" "8,0")
   (set_attr "length_immediate" "0,*")
   (set_attr "memory" "store")
   (set_attr "mode" "<MODE>")]
)




(define_insn "*movabs<mode>_2"
[(set(match_operand:SWI1248x 0 "register_operand" "=a,r")(mem:SWI1248x(match_operand:DI 1 "x86_64_movabs_operand" "i,r")))]
"TARGET_64BIT && ix86_check_movabs (insn, 1)"
  "@
   movabs{<imodesuffix>}\t{%P1, %0|%0, %P1}
   mov{<imodesuffix>}\t{%a1, %0|%0, %a1}"
  [(set_attr "type" "imov")
   (set_attr "modrm" "0,*")
   (set_attr "length_address" "8,0")
   (set_attr "length_immediate" "0")
   (set_attr "memory" "load")
   (set_attr "mode" "<MODE>")]
)




(define_insn "*swap<mode>"
[(set(match_operand:SWI48 0 "register_operand" "+r")(match_operand:SWI48 1 "register_operand" "+r"))(set(match_dup 1)(match_dup 0))]
""
  "xchg{<imodesuffix>}\t%1, %0"
  [(set_attr "type" "imov")
   (set_attr "mode" "<MODE>")
   (set_attr "pent_pair" "np")
   (set_attr "athlon_decode" "vector")
   (set_attr "amdfam10_decode" "double")
   (set_attr "bdver1_decode" "double")]

)




(define_insn "*swap<mode>_1"
[(set(match_operand:SWI12 0 "register_operand" "+r")(match_operand:SWI12 1 "register_operand" "+r"))(set(match_dup 1)(match_dup 0))]
"!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun)"
  "xchg{l}\t%k1, %k0"
  [(set_attr "type" "imov")
   (set_attr "mode" "SI")
   (set_attr "pent_pair" "np")
   (set_attr "athlon_decode" "vector")
   (set_attr "amdfam10_decode" "double")
   (set_attr "bdver1_decode" "double")]

)




(define_insn "*swap<mode>_2"
[(set(match_operand:SWI12 0 "register_operand" "+<r>")(match_operand:SWI12 1 "register_operand" "+<r>"))(set(match_dup 1)(match_dup 0))]
  "TARGET_PARTIAL_REG_STALL"
  "xchg{<imodesuffix>}\t%1, %0"
  [(set_attr "type" "imov")
   (set_attr "mode" "<MODE>")
   (set_attr "pent_pair" "np")
   (set_attr "athlon_decode" "vector")]

)




(define_expand "movstrict<mode>"
[(set(strict_low_part(match_operand:SWI12 0 "nonimmediate_operand" ""))(match_operand:SWI12 1 "general_operand" ""))]
""
{
  if (TARGET_PARTIAL_REG_STALL && optimize_function_for_speed_p (cfun))
    FAIL;
  /* Don't generate memory->memory moves, go through a register */
  if (MEM_P (operands[0]) && MEM_P (operands[1]))
    operands[1] = force_reg (<MODE>mode, operands[1]);
}
)




(define_insn "*movstrict<mode>_1"
[(set(strict_low_part(match_operand:SWI12 0 "nonimmediate_operand" "+<r>m,<r>"))(match_operand:SWI12 1 "general_operand" "<r>n,m"))]
"(!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
   && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
  "mov{<imodesuffix>}\t{%1, %0|%0, %1}"
  [(set_attr "type" "imov")
   (set_attr "mode" "<MODE>")]
)




(define_insn "*movstrict<mode>_xor"
[(set(strict_low_part(match_operand:SWI12 0 "register_operand" "+<r>"))(match_operand:SWI12 1 "const0_operand" ""))(clobber(reg:CC FLAGS_REG))]
"reload_completed"
  "xor{<imodesuffix>}\t%0, %0"
  [(set_attr "type" "alu1")
   (set_attr "mode" "<MODE>")
   (set_attr "length_immediate" "0")]
)




(define_insn "*mov<mode>_extv_1"
[(set(match_operand:SWI24 0 "register_operand" "=R")(sign_extract:SWI24(match_operand 1 "ext_register_operand" "Q")(const_int 8)(const_int 8)))]
""
  "movs{bl|x}\t{%h1, %k0|%k0, %h1}"
  [(set_attr "type" "imovx")
   (set_attr "mode" "SI")]
)




(define_insn "*movqi_extv_1_rex64"
[(set(match_operand:QI 0 "register_operand" "=Q,?R")(sign_extract:QI(match_operand 1 "ext_register_operand" "Q,Q")(const_int 8)(const_int 8)))]
"TARGET_64BIT"
{
  switch (get_attr_type (insn))
    {
    case TYPE_IMOVX:
      return "movs{bl|x}\t{%h1, %k0|%k0, %h1}";
    default:
      return "mov{b}\t{%h1, %0|%0, %h1}";
    }
}
  [(set (attr "type")
     (if_then_else (and (match_operand:QI 0 "register_operand" "")
			(ior (not (match_operand:QI 0 "q_regs_operand" ""))
			     (ne (symbol_ref "TARGET_MOVX")
				 (const_int 0))))
	(const_string "imovx")
	(const_string "imov")))
   (set (attr "mode")
     (if_then_else (eq_attr "type" "imovx")
	(const_string "SI")
	(const_string "QI")))]

)




(define_insn "*movqi_extv_1"
[(set(match_operand:QI 0 "nonimmediate_operand" "=Qm,?r")(sign_extract:QI(match_operand 1 "ext_register_operand" "Q,Q")(const_int 8)(const_int 8)))]
"!TARGET_64BIT"
{
  switch (get_attr_type (insn))
    {
    case TYPE_IMOVX:
      return "movs{bl|x}\t{%h1, %k0|%k0, %h1}";
    default:
      return "mov{b}\t{%h1, %0|%0, %h1}";
    }
}
  [(set (attr "type")
     (if_then_else (and (match_operand:QI 0 "register_operand" "")
			(ior (not (match_operand:QI 0 "q_regs_operand" ""))
			     (ne (symbol_ref "TARGET_MOVX")
				 (const_int 0))))
	(const_string "imovx")
	(const_string "imov")))
   (set (attr "mode")
     (if_then_else (eq_attr "type" "imovx")
	(const_string "SI")
	(const_string "QI")))]
)




(define_insn "*mov<mode>_extzv_1"
[(set(match_operand:SWI48 0 "register_operand" "=R")(zero_extract:SWI48(match_operand 1 "ext_register_operand" "Q")(const_int 8)(const_int 8)))]
""
  "movz{bl|x}\t{%h1, %k0|%k0, %h1}"
  [(set_attr "type" "imovx")
   (set_attr "mode" "SI")]
)




(define_insn "*movqi_extzv_2_rex64"
[(set(match_operand:QI 0 "register_operand" "=Q,?R")(subreg:QI(zero_extract:SI(match_operand 1 "ext_register_operand" "Q,Q")(const_int 8)(const_int 8)) 0))]
"TARGET_64BIT"
{
  switch (get_attr_type (insn))
    {
    case TYPE_IMOVX:
      return "movz{bl|x}\t{%h1, %k0|%k0, %h1}";
    default:
      return "mov{b}\t{%h1, %0|%0, %h1}";
    }
}
  [(set (attr "type")
     (if_then_else (ior (not (match_operand:QI 0 "q_regs_operand" ""))
			(ne (symbol_ref "TARGET_MOVX")
			    (const_int 0)))
	(const_string "imovx")
	(const_string "imov")))
   (set (attr "mode")
     (if_then_else (eq_attr "type" "imovx")
	(const_string "SI")
	(const_string "QI")))]

)




(define_insn "*movqi_extzv_2"
[(set(match_operand:QI 0 "nonimmediate_operand" "=Qm,?R")(subreg:QI(zero_extract:SI(match_operand 1 "ext_register_operand" "Q,Q")(const_int 8)(const_int 8)) 0))]
"!TARGET_64BIT"
{
  switch (get_attr_type (insn))
    {
    case TYPE_IMOVX:
      return "movz{bl|x}\t{%h1, %k0|%k0, %h1}";
    default:
      return "mov{b}\t{%h1, %0|%0, %h1}";
    }
}
  [(set (attr "type")
     (if_then_else (and (match_operand:QI 0 "register_operand" "")
			(ior (not (match_operand:QI 0 "q_regs_operand" ""))
			     (ne (symbol_ref "TARGET_MOVX")
				 (const_int 0))))
	(const_string "imovx")
	(const_string "imov")))
   (set (attr "mode")
     (if_then_else (eq_attr "type" "imovx")
	(const_string "SI")
	(const_string "QI")))]

)




(define_expand "mov<mode>_insv_1"
[(set(zero_extract:SWI48(match_operand 0 "ext_register_operand" "")(const_int 8)(const_int 8))(match_operand:SWI48 1 "nonmemory_operand" ""))]

)




(define_insn "*mov<mode>_insv_1_rex64"
[(set(zero_extract:SWI48x(match_operand 0 "ext_register_operand" "+Q")(const_int 8)(const_int 8))(match_operand:SWI48x 1 "nonmemory_operand" "Qn"))]
"TARGET_64BIT"
  "mov{b}\t{%b1, %h0|%h0, %b1}"
  [(set_attr "type" "imov")
   (set_attr "mode" "QI")]
)




(define_insn "*movsi_insv_1"
[(set(zero_extract:SI(match_operand 0 "ext_register_operand" "+Q")(const_int 8)(const_int 8))(match_operand:SI 1 "general_operand" "Qmn"))]
"!TARGET_64BIT"
  "mov{b}\t{%b1, %h0|%h0, %b1}"
  [(set_attr "type" "imov")
   (set_attr "mode" "QI")]
)




(define_insn "*movqi_insv_2"
[(set(zero_extract:SI(match_operand 0 "ext_register_operand" "+Q")(const_int 8)(const_int 8))(lshiftrt:SI(match_operand:SI 1 "register_operand" "Q")(const_int 8)))]
""
  "mov{b}\t{%h1, %h0|%h0, %h1}"
  [(set_attr "type" "imov")
   (set_attr "mode" "QI")]
)




(define_insn "*pushtf"
[(set(match_operand:TF 0 "push_operand" "=<,<,<")(match_operand:TF 1 "general_no_elim_operand" "x,Fo,*r"))]
"TARGET_SSE2"
{
  /* This insn should be already split before reg-stack.  */
  gcc_unreachable ();
}
  [(set_attr "type" "multi")
   (set_attr "unit" "sse,*,*")
   (set_attr "mode" "TF,SI,SI")]
)




(define_split 
[(set(match_operand:TF 0 "push_operand" "")(match_operand:TF 1 "sse_reg_operand" ""))]
  "TARGET_SSE2 && reload_completed"
[(set(reg:P SP_REG)(plus:P(reg:P SP_REG)(const_int -16)))(set(mem:TF(reg:P SP_REG))(match_dup 1))])
(define_split 
[(set(match_operand:TF 0 "push_operand" "")(match_operand:TF 1 "general_operand" ""))]
  "TARGET_SSE2 && reload_completed
   && !SSE_REG_P (operands[1])"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*pushxf"
[(set(match_operand:XF 0 "push_operand" "=<,<")(match_operand:XF 1 "general_no_elim_operand" "f,ro"))]
  "optimize_function_for_speed_p (cfun)"
{
  /* This insn should be already split before reg-stack.  */
  gcc_unreachable ();
}
  [(set_attr "type" "multi")
   (set_attr "unit" "i387,*")
   (set_attr "mode" "XF,SI")]
)




(define_insn "*pushxf_nointeger"
[(set(match_operand:XF 0 "push_operand" "=X,X,X")(match_operand:XF 1 "general_no_elim_operand" "f,Fo,*r"))]
"optimize_function_for_size_p (cfun)"
{
  /* This insn should be already split before reg-stack.  */
  gcc_unreachable ();
}
  [(set_attr "type" "multi")
   (set_attr "unit" "i387,*,*")
   (set_attr "mode" "XF,SI,SI")]
)




(define_split 
[(set(match_operand:XF 0 "push_operand" "")(match_operand:XF 1 "fp_register_operand" ""))]
  "reload_completed"
[(set(reg:P SP_REG)(plus:P(reg:P SP_REG)(match_dup 2)))(set(mem:XF(reg:P SP_REG))(match_dup 1))]
  "operands[2] = GEN_INT (-GET_MODE_SIZE (XFmode));"
)
(define_split 
[(set(match_operand:XF 0 "push_operand" "")(match_operand:XF 1 "general_operand" ""))]
  "reload_completed
   && !FP_REG_P (operands[1])"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*pushdf"
[(set(match_operand:DF 0 "push_operand" "=<,<,<")(match_operand:DF 1 "general_no_elim_operand" "f,rFo,Y2"))]
  "TARGET_64BIT || TARGET_INTEGER_DFMODE_MOVES"
{
  /* This insn should be already split before reg-stack.  */
  gcc_unreachable ();
}
  [(set_attr "type" "multi")
   (set_attr "unit" "i387,*,*")
   (set_attr "mode" "DF,SI,DF")]
)




(define_insn "*pushdf_nointeger"
[(set(match_operand:DF 0 "push_operand" "=<,<,<,<")(match_operand:DF 1 "general_no_elim_operand" "f,Fo,*r,Y2"))]
  "!(TARGET_64BIT || TARGET_INTEGER_DFMODE_MOVES)"
{
  /* This insn should be already split before reg-stack.  */
  gcc_unreachable ();
}
  [(set_attr "type" "multi")
   (set_attr "unit" "i387,*,*,*")
   (set_attr "mode" "DF,SI,SI,DF")]

)




(define_split 
[(set(match_operand:DF 0 "push_operand" "")(match_operand:DF 1 "any_fp_register_operand" ""))]
  "reload_completed"
[(set(reg:P SP_REG)(plus:P(reg:P SP_REG)(const_int -8)))(set(mem:DF(reg:P SP_REG))(match_dup 1))])
(define_split 
[(set(match_operand:DF 0 "push_operand" "")(match_operand:DF 1 "general_operand" ""))]
  "reload_completed
   && !ANY_FP_REG_P (operands[1])"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*pushsf_rex64"
[(set(match_operand:SF 0 "push_operand" "=X,X,X")(match_operand:SF 1 "nonmemory_no_elim_operand" "f,rF,x"))]
"TARGET_64BIT"
{
  /* Anything else should be already split before reg-stack.  */
  gcc_assert (which_alternative == 1);
  return "push{q}\t%q1";
}
  [(set_attr "type" "multi,push,multi")
   (set_attr "unit" "i387,*,*")
   (set_attr "mode" "SF,DI,SF")]
)




(define_insn "*pushsf"
[(set(match_operand:SF 0 "push_operand" "=<,<,<")(match_operand:SF 1 "general_no_elim_operand" "f,rFm,x"))]
"!TARGET_64BIT"
{
  /* Anything else should be already split before reg-stack.  */
  gcc_assert (which_alternative == 1);
  return "push{l}\t%1";
}
  [(set_attr "type" "multi,push,multi")
   (set_attr "unit" "i387,*,*")
   (set_attr "mode" "SF,SI,SF")]
)




(define_split 
[(set(match_operand:SF 0 "push_operand" "")(match_operand:SF 1 "memory_operand" ""))]
  "reload_completed
   && MEM_P (operands[1])
   && (operands[2] = find_constant_src (insn))"
[(set(match_dup 0)(match_dup 2))])
(define_split 
[(set(match_operand:SF 0 "push_operand" "")(match_operand:SF 1 "any_fp_register_operand" ""))]
  "reload_completed"
[(set(reg:P SP_REG)(plus:P(reg:P SP_REG)(match_dup 2)))(set(mem:SF(reg:P SP_REG))(match_dup 1))]
  "operands[2] = GEN_INT (-GET_MODE_SIZE (<MODE>mode));"
)
(define_expand "movtf"
[(set(match_operand:TF 0 "nonimmediate_operand" "")(match_operand:TF 1 "nonimmediate_operand" ""))]
  "TARGET_SSE2"
{
  ix86_expand_move (TFmode, operands);
  DONE;
}
)




(define_expand "mov<mode>"
[(set(match_operand:X87MODEF 0 "nonimmediate_operand" "")(match_operand:X87MODEF 1 "general_operand" ""))]
""
  "ix86_expand_move (<MODE>mode, operands); DONE;"
)




(define_insn "*movtf_internal"
[(set(match_operand:TF 0 "nonimmediate_operand" "=x,m,x,?r,?o")(match_operand:TF 1 "general_operand" "xm,x,C,roF,Fr"))]
"TARGET_SSE2
   && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
  switch (which_alternative)
    {
    case 0:
    case 1:
      if (get_attr_mode (insn) == MODE_V4SF)
	return "%vmovaps\t{%1, %0|%0, %1}";
      else
	return "%vmovdqa\t{%1, %0|%0, %1}";
    case 2:
      if (get_attr_mode (insn) == MODE_V4SF)
	return "%vxorps\t%0, %d0";
      else
	return "%vpxor\t%0, %d0";
    case 3:
    case 4:
	return "#";
    default:
      gcc_unreachable ();
    }
}
  [(set_attr "type" "ssemov,ssemov,sselog1,*,*")
   (set_attr "prefix" "maybe_vex,maybe_vex,maybe_vex,*,*")
   (set (attr "mode")
        (cond [(eq_attr "alternative" "0,2")
		 (if_then_else
		   (ne (symbol_ref "optimize_function_for_size_p (cfun)")
		       (const_int 0))
		   (const_string "V4SF")
		   (const_string "TI"))
	       (eq_attr "alternative" "1")
		 (if_then_else
		   (ior (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
			    (const_int 0))
			(ne (symbol_ref "optimize_function_for_size_p (cfun)")
			    (const_int 0)))
		   (const_string "V4SF")
		   (const_string "TI"))]
	       (const_string "DI")))]
)




(define_split 
[(set(match_operand:TF 0 "nonimmediate_operand" "")(match_operand:TF 1 "general_operand" ""))]
  "reload_completed
   && !(SSE_REG_P (operands[0]) || SSE_REG_P (operands[1]))"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*movxf_internal"
[(set(match_operand:XF 0 "nonimmediate_operand" "=f,m,f,r,o")(match_operand:XF 1 "general_operand" "fm,f,G,roF,Fr"))]
"optimize_function_for_speed_p (cfun)
   && !(MEM_P (operands[0]) && MEM_P (operands[1]))
   && (reload_in_progress || reload_completed
       || GET_CODE (operands[1]) != CONST_DOUBLE
       || memory_operand (operands[0], XFmode))"
{
  switch (which_alternative)
    {
    case 0:
    case 1:
      return output_387_reg_move (insn, operands);

    case 2:
      return standard_80387_constant_opcode (operands[1]);

    case 3: case 4:
      return "#";

    default:
      gcc_unreachable ();
    }
}
  [(set_attr "type" "fmov,fmov,fmov,multi,multi")
   (set_attr "mode" "XF,XF,XF,SI,SI")]
)




(define_insn "*movxf_internal_nointeger"
[(set(match_operand:XF 0 "nonimmediate_operand" "=f,m,f,*r,o")(match_operand:XF 1 "general_operand" "fm,f,G,*roF,F*r"))]
"optimize_function_for_size_p (cfun)
   && !(MEM_P (operands[0]) && MEM_P (operands[1]))
   && (reload_in_progress || reload_completed
       || standard_80387_constant_p (operands[1])
       || GET_CODE (operands[1]) != CONST_DOUBLE
       || memory_operand (operands[0], XFmode))"
{
  switch (which_alternative)
    {
    case 0:
    case 1:
      return output_387_reg_move (insn, operands);

    case 2:
      return standard_80387_constant_opcode (operands[1]);

    case 3: case 4:
      return "#";
    default:
      gcc_unreachable ();
    }
}
  [(set_attr "type" "fmov,fmov,fmov,multi,multi")
  (set_attr "mode" "XF,XF,XF,SI,SI")]
)




(define_split 
[(set(match_operand:XF 0 "nonimmediate_operand" "")(match_operand:XF 1 "general_operand" ""))]
  "reload_completed
   && !(MEM_P (operands[0]) && MEM_P (operands[1]))
   && ! (FP_REG_P (operands[0]) ||
	 (GET_CODE (operands[0]) == SUBREG
	  && FP_REG_P (SUBREG_REG (operands[0]))))
   && ! (FP_REG_P (operands[1]) ||
	 (GET_CODE (operands[1]) == SUBREG
	  && FP_REG_P (SUBREG_REG (operands[1]))))"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*movdf_internal_rex64"
[(set(match_operand:DF 0 "nonimmediate_operand" "=f,m,f,r  ,m ,Y2*x,Y2*x,Y2*x,m   ,Yi,r ")(match_operand:DF 1 "general_operand" "fm,f,G,rmF,Fr,C   ,Y2*x,m   ,Y2*x,r ,Yi"))]
  "TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))
   && (reload_in_progress || reload_completed
       || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
       || (!(TARGET_SSE2 && TARGET_SSE_MATH)
           && optimize_function_for_size_p (cfun)
	   && standard_80387_constant_p (operands[1]))
       || GET_CODE (operands[1]) != CONST_DOUBLE
       || memory_operand (operands[0], DFmode))"
{
  switch (which_alternative)
    {
    case 0:
    case 1:
      return output_387_reg_move (insn, operands);

    case 2:
      return standard_80387_constant_opcode (operands[1]);

    case 3:
    case 4:
      return "#";

    case 5:
      switch (get_attr_mode (insn))
	{
	case MODE_V4SF:
	  return "%vxorps\t%0, %d0";
	case MODE_V2DF:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "%vxorps\t%0, %d0";
	  else
	    return "%vxorpd\t%0, %d0";
	case MODE_TI:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "%vxorps\t%0, %d0";
	  else
	    return "%vpxor\t%0, %d0";
	default:
	  gcc_unreachable ();
	}
    case 6:
    case 7:
    case 8:
      switch (get_attr_mode (insn))
	{
	case MODE_V4SF:
	  return "%vmovaps\t{%1, %0|%0, %1}";
	case MODE_V2DF:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "%vmovaps\t{%1, %0|%0, %1}";
	  else
	    return "%vmovapd\t{%1, %0|%0, %1}";
	case MODE_TI:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "%vmovaps\t{%1, %0|%0, %1}";
	  else
	    return "%vmovdqa\t{%1, %0|%0, %1}";
	case MODE_DI:
	  return "%vmovq\t{%1, %0|%0, %1}";
	case MODE_DF:
	  if (TARGET_AVX)
	    {
	      if (REG_P (operands[0]) && REG_P (operands[1]))
		return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
	      else
		return "vmovsd\t{%1, %0|%0, %1}";
	    }
	  else
	    return "movsd\t{%1, %0|%0, %1}";
	case MODE_V1DF:
	  return "%vmovlpd\t{%1, %d0|%d0, %1}";
	case MODE_V2SF:
	  return "%vmovlps\t{%1, %d0|%d0, %1}";
	default:
	  gcc_unreachable ();
	}

    case 9:
    case 10:
    return "%vmovd\t{%1, %0|%0, %1}";

    default:
      gcc_unreachable();
    }
}
  [(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov,ssemov,ssemov")
   (set (attr "prefix")
     (if_then_else (eq_attr "alternative" "0,1,2,3,4")
       (const_string "orig")
       (const_string "maybe_vex")))
   (set (attr "prefix_data16")
     (if_then_else (eq_attr "mode" "V1DF")
       (const_string "1")
       (const_string "*")))
   (set (attr "mode")
        (cond [(eq_attr "alternative" "0,1,2")
		 (const_string "DF")
	       (eq_attr "alternative" "3,4,9,10")
		 (const_string "DI")

	       /* For SSE1, we have many fewer alternatives.  */
	       (eq (symbol_ref "TARGET_SSE2") (const_int 0))
		 (cond [(eq_attr "alternative" "5,6")
			  (const_string "V4SF")
		       ]
		   (const_string "V2SF"))

	       /* xorps is one byte shorter.  */
	       (eq_attr "alternative" "5")
		 (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
			    (const_int 0))
			  (const_string "V4SF")
			(ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
			    (const_int 0))
			  (const_string "TI")
		       ]
		       (const_string "V2DF"))

	       /* For architectures resolving dependencies on
		  whole SSE registers use APD move to break dependency
		  chains, otherwise use short move to avoid extra work.

		  movaps encodes one byte shorter.  */
	       (eq_attr "alternative" "6")
		 (cond
		   [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
		        (const_int 0))
		      (const_string "V4SF")
		    (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
		        (const_int 0))
		      (const_string "V2DF")
		   ]
		   (const_string "DF"))
	       /* For architectures resolving dependencies on register
		  parts we may avoid extra work to zero out upper part
		  of register.  */
	       (eq_attr "alternative" "7")
		 (if_then_else
		   (ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
		       (const_int 0))
		   (const_string "V1DF")
		   (const_string "DF"))
	      ]
	      (const_string "DF")))]

)




(define_insn "*movdf_internal"
[(set(match_operand:DF 0 "nonimmediate_operand" "=f,m,f,r  ,o ,Y2*x,Y2*x,Y2*x,m   ")(match_operand:DF 1 "general_operand" "fm,f,G,roF,Fr,C   ,Y2*x,m   ,Y2*x"))]
  "!(MEM_P (operands[0]) && MEM_P (operands[1]))
   && optimize_function_for_speed_p (cfun)
   && TARGET_INTEGER_DFMODE_MOVES
   && (reload_in_progress || reload_completed
       || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
       || (!(TARGET_SSE2 && TARGET_SSE_MATH)
           && optimize_function_for_size_p (cfun)
	   && standard_80387_constant_p (operands[1]))
       || GET_CODE (operands[1]) != CONST_DOUBLE
       || memory_operand (operands[0], DFmode))"
{
  switch (which_alternative)
    {
    case 0:
    case 1:
      return output_387_reg_move (insn, operands);

    case 2:
      return standard_80387_constant_opcode (operands[1]);

    case 3:
    case 4:
      return "#";

    case 5:
      switch (get_attr_mode (insn))
	{
	case MODE_V4SF:
	  return "xorps\t%0, %0";
	case MODE_V2DF:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "xorps\t%0, %0";
	  else
	    return "xorpd\t%0, %0";
	case MODE_TI:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "xorps\t%0, %0";
	  else
	    return "pxor\t%0, %0";
	default:
	  gcc_unreachable ();
	}
    case 6:
    case 7:
    case 8:
      switch (get_attr_mode (insn))
	{
	case MODE_V4SF:
	  return "movaps\t{%1, %0|%0, %1}";
	case MODE_V2DF:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "movaps\t{%1, %0|%0, %1}";
	  else
	    return "movapd\t{%1, %0|%0, %1}";
	case MODE_TI:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "movaps\t{%1, %0|%0, %1}";
	  else
	    return "movdqa\t{%1, %0|%0, %1}";
	case MODE_DI:
	  return "movq\t{%1, %0|%0, %1}";
	case MODE_DF:
	  return "movsd\t{%1, %0|%0, %1}";
	case MODE_V1DF:
	  return "movlpd\t{%1, %0|%0, %1}";
	case MODE_V2SF:
	  return "movlps\t{%1, %0|%0, %1}";
	default:
	  gcc_unreachable ();
	}

    default:
      gcc_unreachable();
    }
}
  [(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov")
   (set (attr "prefix_data16")
     (if_then_else (eq_attr "mode" "V1DF")
       (const_string "1")
       (const_string "*")))
   (set (attr "mode")
        (cond [(eq_attr "alternative" "0,1,2")
		 (const_string "DF")
	       (eq_attr "alternative" "3,4")
		 (const_string "SI")

	       /* For SSE1, we have many fewer alternatives.  */
	       (eq (symbol_ref "TARGET_SSE2") (const_int 0))
		 (cond [(eq_attr "alternative" "5,6")
			  (const_string "V4SF")
		       ]
		   (const_string "V2SF"))

	       /* xorps is one byte shorter.  */
	       (eq_attr "alternative" "5")
		 (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
			    (const_int 0))
			  (const_string "V4SF")
			(ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
			    (const_int 0))
			  (const_string "TI")
		       ]
		       (const_string "V2DF"))

	       /* For architectures resolving dependencies on
		  whole SSE registers use APD move to break dependency
		  chains, otherwise use short move to avoid extra work.

		  movaps encodes one byte shorter.  */
	       (eq_attr "alternative" "6")
		 (cond
		   [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
		        (const_int 0))
		      (const_string "V4SF")
		    (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
		        (const_int 0))
		      (const_string "V2DF")
		   ]
		   (const_string "DF"))
	       /* For architectures resolving dependencies on register
		  parts we may avoid extra work to zero out upper part
		  of register.  */
	       (eq_attr "alternative" "7")
		 (if_then_else
		   (ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
		       (const_int 0))
		   (const_string "V1DF")
		   (const_string "DF"))
	      ]
	      (const_string "DF")))]
)




(define_insn "*movdf_internal_nointeger"
[(set(match_operand:DF 0 "nonimmediate_operand" "=f,m,f,*r  ,o  ,Y2*x,Y2*x,Y2*x ,m  ")(match_operand:DF 1 "general_operand" "fm,f,G,*roF,*Fr,C   ,Y2*x,mY2*x,Y2*x"))]
  "!(MEM_P (operands[0]) && MEM_P (operands[1]))
   && ((optimize_function_for_size_p (cfun)
       || !TARGET_INTEGER_DFMODE_MOVES) && !TARGET_64BIT)
   && (reload_in_progress || reload_completed
       || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
       || (!(TARGET_SSE2 && TARGET_SSE_MATH)
           && optimize_function_for_size_p (cfun)
           && !memory_operand (operands[0], DFmode)
	   && standard_80387_constant_p (operands[1]))
       || GET_CODE (operands[1]) != CONST_DOUBLE
       || ((optimize_function_for_size_p (cfun)
            || !TARGET_MEMORY_MISMATCH_STALL
	    || reload_in_progress || reload_completed)
 	   && memory_operand (operands[0], DFmode)))"
{
  switch (which_alternative)
    {
    case 0:
    case 1:
      return output_387_reg_move (insn, operands);

    case 2:
      return standard_80387_constant_opcode (operands[1]);

    case 3:
    case 4:
      return "#";

    case 5:
      switch (get_attr_mode (insn))
	{
	case MODE_V4SF:
	  return "%vxorps\t%0, %d0";
	case MODE_V2DF:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "%vxorps\t%0, %d0";
	  else
	    return "%vxorpd\t%0, %d0";
	case MODE_TI:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "%vxorps\t%0, %d0";
	  else
	    return "%vpxor\t%0, %d0";
	default:
	  gcc_unreachable ();
	}
    case 6:
    case 7:
    case 8:
      switch (get_attr_mode (insn))
	{
	case MODE_V4SF:
	  return "%vmovaps\t{%1, %0|%0, %1}";
	case MODE_V2DF:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "%vmovaps\t{%1, %0|%0, %1}";
	  else
	    return "%vmovapd\t{%1, %0|%0, %1}";
	case MODE_TI:
	  if (TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL)
	    return "%vmovaps\t{%1, %0|%0, %1}";
	  else
	    return "%vmovdqa\t{%1, %0|%0, %1}";
	case MODE_DI:
	  return "%vmovq\t{%1, %0|%0, %1}";
	case MODE_DF:
	  if (TARGET_AVX)
	    {
	      if (REG_P (operands[0]) && REG_P (operands[1]))
		return "vmovsd\t{%1, %0, %0|%0, %0, %1}";
	      else
		return "vmovsd\t{%1, %0|%0, %1}";
	    }
	  else
	    return "movsd\t{%1, %0|%0, %1}";
	case MODE_V1DF:
	  if (TARGET_AVX)
	    {
	      if (REG_P (operands[0]))
		return "vmovlpd\t{%1, %0, %0|%0, %0, %1}";
	      else
		return "vmovlpd\t{%1, %0|%0, %1}";
	    }
	  else
	    return "movlpd\t{%1, %0|%0, %1}";
	case MODE_V2SF:
	  if (TARGET_AVX)
	    {
	      if (REG_P (operands[0]))
		return "vmovlps\t{%1, %0, %0|%0, %0, %1}";
	      else
		return "vmovlps\t{%1, %0|%0, %1}";
	    }
	  else
	    return "movlps\t{%1, %0|%0, %1}";
	default:
	  gcc_unreachable ();
	}

    default:
      gcc_unreachable ();
    }
}
  [(set_attr "type" "fmov,fmov,fmov,multi,multi,sselog1,ssemov,ssemov,ssemov")
   (set (attr "prefix")
     (if_then_else (eq_attr "alternative" "0,1,2,3,4")
       (const_string "orig")
       (const_string "maybe_vex")))
   (set (attr "prefix_data16")
     (if_then_else (eq_attr "mode" "V1DF")
       (const_string "1")
       (const_string "*")))
   (set (attr "mode")
        (cond [(eq_attr "alternative" "0,1,2")
		 (const_string "DF")
	       (eq_attr "alternative" "3,4")
		 (const_string "SI")

	       /* For SSE1, we have many fewer alternatives.  */
	       (eq (symbol_ref "TARGET_SSE2") (const_int 0))
		 (cond [(eq_attr "alternative" "5,6")
			  (const_string "V4SF")
		       ]
		   (const_string "V2SF"))

	       /* xorps is one byte shorter.  */
	       (eq_attr "alternative" "5")
		 (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
			    (const_int 0))
			  (const_string "V4SF")
			(ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
			    (const_int 0))
			  (const_string "TI")
		       ]
		       (const_string "V2DF"))

	       /* For architectures resolving dependencies on
		  whole SSE registers use APD move to break dependency
		  chains, otherwise use short move to avoid extra work.

		  movaps encodes one byte shorter.  */
	       (eq_attr "alternative" "6")
		 (cond
		   [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
		        (const_int 0))
		      (const_string "V4SF")
		    (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
		        (const_int 0))
		      (const_string "V2DF")
		   ]
		   (const_string "DF"))
	       /* For architectures resolving dependencies on register
		  parts we may avoid extra work to zero out upper part
		  of register.  */
	       (eq_attr "alternative" "7")
		 (if_then_else
		   (ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
		       (const_int 0))
		   (const_string "V1DF")
		   (const_string "DF"))
	      ]
	      (const_string "DF")))]
)




(define_split 
[(set(match_operand:DF 0 "nonimmediate_operand" "")(match_operand:DF 1 "general_operand" ""))]
  "reload_completed
   && !(MEM_P (operands[0]) && MEM_P (operands[1]))
   && ! (ANY_FP_REG_P (operands[0]) ||
	 (GET_CODE (operands[0]) == SUBREG
	  && ANY_FP_REG_P (SUBREG_REG (operands[0]))))
   && ! (ANY_FP_REG_P (operands[1]) ||
	 (GET_CODE (operands[1]) == SUBREG
	  && ANY_FP_REG_P (SUBREG_REG (operands[1]))))"
[(const_int 0)]
  "ix86_split_long_move (operands); DONE;"
)
(define_insn "*movsf_internal"
[(set(match_operand:SF 0 "nonimmediate_operand" "=f,m,f,r  ,m ,x,x,x ,m,!*y,!m,!*y,?Yi,?r,!*Ym,!r")(match_operand:SF 1 "general_operand" "fm,f,G,rmF,Fr,C,x,xm,x,m  ,*y,*y ,r  ,Yi,r   ,*Ym"))]
  "!(MEM_P (operands[0]) && MEM_P (operands[1]))
   && (reload_in_progress || reload_completed
       || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
       || (!TARGET_SSE_MATH && optimize_function_for_size_p (cfun)
	   && standard_80387_constant_p (operands[1]))
       || GET_CODE (operands[1]) != CONST_DOUBLE
       || memory_operand (operands[0], SFmode))"
{
  switch (which_alternative)
    {
    case 0:
    case 1:
      return output_387_reg_move (insn, operands);

    case 2:
      return standard_80387_constant_opcode (operands[1]);

    case 3:
    case 4:
      return "mov{l}\t{%1, %0|%0, %1}";
    case 5:
      if (get_attr_mode (insn) == MODE_TI)
	return "%vpxor\t%0, %d0";
      else
	return "%vxorps\t%0, %d0";
    case 6:
      if (get_attr_mode (insn) == MODE_V4SF)
	return "%vmovaps\t{%1, %0|%0, %1}";
      else
	return "%vmovss\t{%1, %d0|%d0, %1}";
    case 7:
      if (TARGET_AVX)
	return REG_P (operands[1]) ? "vmovss\t{%1, %0, %0|%0, %0, %1}"
				   : "vmovss\t{%1, %0|%0, %1}";
      else
	return "movss\t{%1, %0|%0, %1}";
    case 8:
      return "%vmovss\t{%1, %0|%0, %1}";

    case 9: case 10: case 14: case 15:
      return "movd\t{%1, %0|%0, %1}";
    case 12: case 13:
      return "%vmovd\t{%1, %0|%0, %1}";

    case 11:
      return "movq\t{%1, %0|%0, %1}";

    default:
      gcc_unreachable ();
    }
}
  [(set_attr "type" "fmov,fmov,fmov,imov,imov,sselog1,ssemov,ssemov,ssemov,mmxmov,mmxmov,mmxmov,ssemov,ssemov,mmxmov,mmxmov")
   (set (attr "prefix")
     (if_then_else (eq_attr "alternative" "5,6,7,8,12,13")
       (const_string "maybe_vex")
       (const_string "orig")))
   (set (attr "mode")
        (cond [(eq_attr "alternative" "3,4,9,10")
		 (const_string "SI")
	       (eq_attr "alternative" "5")
		 (if_then_else
		   (and (and (ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
			    	 (const_int 0))
			     (ne (symbol_ref "TARGET_SSE2")
				 (const_int 0)))
			(eq (symbol_ref "optimize_function_for_size_p (cfun)")
			    (const_int 0)))
		   (const_string "TI")
		   (const_string "V4SF"))
	       /* For architectures resolving dependencies on
		  whole SSE registers use APS move to break dependency
		  chains, otherwise use short move to avoid extra work.

		  Do the same for architectures resolving dependencies on
		  the parts.  While in DF mode it is better to always handle
		  just register parts, the SF mode is different due to lack
		  of instructions to load just part of the register.  It is
		  better to maintain the whole registers in single format
		  to avoid problems on using packed logical operations.  */
	       (eq_attr "alternative" "6")
		 (if_then_else
		   (ior (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
			    (const_int 0))
			(ne (symbol_ref "TARGET_SSE_SPLIT_REGS")
			    (const_int 0)))
		   (const_string "V4SF")
		   (const_string "SF"))
	       (eq_attr "alternative" "11")
		 (const_string "DI")]
	       (const_string "SF")))]
)




(define_split 
[(set(match_operand 0 "register_operand" "")(match_operand 1 "memory_operand" ""))]
  "reload_completed
   && MEM_P (operands[1])
   && (GET_MODE (operands[0]) == TFmode
       || GET_MODE (operands[0]) == XFmode
       || GET_MODE (operands[0]) == DFmode
       || GET_MODE (operands[0]) == SFmode)
   && (operands[2] = find_constant_src (insn))"
[(set(match_dup 0)(match_dup 2))]
  {
  rtx c = operands[2];
  rtx r = operands[0];

  if (GET_CODE (r) == SUBREG)
    r = SUBREG_REG (r);

  if (SSE_REG_P (r))
    {
      if (!standard_sse_constant_p (c))
	FAIL;
    }
  else if (FP_REG_P (r))
    {
      if (!standard_80387_constant_p (c))
	FAIL;
    }
  else if (MMX_REG_P (r))
    FAIL;
}
)
(define_split 
[(set(match_operand 0 "register_operand" "")(float_extend(match_operand 1 "memory_operand" "")))]
  "reload_completed
   && MEM_P (operands[1])
   && (GET_MODE (operands[0]) == TFmode
       || GET_MODE (operands[0]) == XFmode
       || GET_MODE (operands[0]) == DFmode
       || GET_MODE (operands[0]) == SFmode)
   && (operands[2] = find_constant_src (insn))"
[(set(match_dup 0)(match_dup 2))]
  {
  rtx c = operands[2];
  rtx r = operands[0];

  if (GET_CODE (r) == SUBREG)
    r = SUBREG_REG (r);

  if (SSE_REG_P (r))
    {
      if (!standard_sse_constant_p (c))
	FAIL;
    }
  else if (FP_REG_P (r))
    {
      if (!standard_80387_constant_p (c))
	FAIL;
    }
  else if (MMX_REG_P (r))
    FAIL;
}
)
(define_split 
[(set(match_operand:X87MODEF 0 "register_operand" "")(match_operand:X87MODEF 1 "immediate_operand" ""))]
  "reload_completed && FP_REGNO_P (REGNO (operands[0]))
   && (standard_80387_constant_p (operands[1]) == 8
       || standard_80387_constant_p (operands[1]) == 9)"
[(set(match_dup 0)(match_dup 1))(set(match_dup 0)(neg:X87MODEF(match_dup 0)))]
  {
  REAL_VALUE_TYPE r;

  REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
  if (real_isnegzero (&r))
    operands[1] = CONST0_RTX (<MODE>mode);
  else
    operands[1] = CONST1_RTX (<MODE>mode);
}
)
(define_insn "swapxf"
[(set(match_operand:XF 0 "register_operand" "+f")(match_operand:XF 1 "register_operand" "+f"))(set(match_dup 1)(match_dup 0))]	
  "TARGET_80387"
{
  if (STACK_TOP_P (operands[0]))
    return "fxch\t%1";
  else
    return "fxch\t%0";
}
  [(set_attr "type" "fxch")
   (set_attr "mode" "XF")]
)




(define_insn "*swap<mode>"
[(set(match_operand:MODEF 0 "fp_register_operand" "+f")(match_operand:MODEF 1 "fp_register_operand" "+f"))(set(match_dup 1)(match_dup 0))]
  "TARGET_80387 || reload_completed"
{
  if (STACK_TOP_P (operands[0]))
    return "fxch\t%1";
  else
    return "fxch\t%0";
}
  [(set_attr "type" "fxch")
   (set_attr "mode" "<MODE>")]
)




(define_insn_and_split"*setcc_di_1"
[(set(match_operand:DI 0 "register_operand" "=q")(match_operator:DI 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)]))]
"TARGET_64BIT && !TARGET_PARTIAL_REG_STALL"
  "#"
  "&& reload_completed"
[(set(match_dup 2)(match_dup 1))(set(match_dup 0)(zero_extend:DI(match_dup 2)))]
{
  PUT_MODE (operands[1], QImode);
  operands[2] = gen_lowpart (QImode, operands[0]);
}
)
(define_insn_and_split"*setcc_si_1_and"
[(set(match_operand:SI 0 "register_operand" "=q")(match_operator:SI 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)]))(clobber(reg:CC FLAGS_REG))]
"!TARGET_PARTIAL_REG_STALL
   && TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun)"
  "#"
  "&& reload_completed"
[(set(match_dup 2)(match_dup 1))(parallel[(set(match_dup 0)(zero_extend:SI(match_dup 2)))(clobber(reg:CC FLAGS_REG))])]
{
  PUT_MODE (operands[1], QImode);
  operands[2] = gen_lowpart (QImode, operands[0]);
}
)
(define_insn_and_split"*setcc_si_1_movzbl"
[(set(match_operand:SI 0 "register_operand" "=q")(match_operator:SI 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)]))]
"!TARGET_PARTIAL_REG_STALL
   && (!TARGET_ZERO_EXTEND_WITH_AND || optimize_function_for_size_p (cfun))"
  "#"
  "&& reload_completed"
[(set(match_dup 2)(match_dup 1))(set(match_dup 0)(zero_extend:SI(match_dup 2)))]
{
   PUT_MODE (operands[1], QImode);
  operands[2] = gen_lowpart (QImode, operands[0]);
}
)
(define_insn "*setcc_qi"
[(set(match_operand:QI 0 "nonimmediate_operand" "=qm")(match_operator:QI 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)]))]
  ""
  "set%C1\t%0"
  [(set_attr "type" "setcc")
   (set_attr "mode" "QI")]
)




(define_insn "*setcc_qi_slp"
[(set(strict_low_part(match_operand:QI 0 "nonimmediate_operand" "+qm"))(match_operator:QI 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)]))]
  ""
  "set%C1\t%0"
  [(set_attr "type" "setcc")
   (set_attr "mode" "QI")]
)




(define_split 
[(set(match_operand:QI 0 "nonimmediate_operand" "")(ne:QI(match_operator 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)])(const_int 0)))]
  ""
[(set(match_dup 0)(match_dup 1))]
  "PUT_MODE (operands[1], QImode);"
)
(define_split 
[(set(strict_low_part(match_operand:QI 0 "nonimmediate_operand" ""))(ne:QI(match_operator 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)])(const_int 0)))]
  ""
[(set(match_dup 0)(match_dup 1))]
  "PUT_MODE (operands[1], QImode);"
)
(define_split 
[(set(match_operand:QI 0 "nonimmediate_operand" "")(eq:QI(match_operator 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)])(const_int 0)))]
  ""
[(set(match_dup 0)(match_dup 1))]
  {
  rtx new_op1 = copy_rtx (operands[1]);
  operands[1] = new_op1;
  PUT_MODE (new_op1, QImode);
  PUT_CODE (new_op1, ix86_reverse_condition (GET_CODE (new_op1),
					     GET_MODE (XEXP (new_op1, 0))));

  /* Make sure that (a) the CCmode we have for the flags is strong
     enough for the reversed compare or (b) we have a valid FP compare.  */
  if (! ix86_comparison_operator (new_op1, VOIDmode))
    FAIL;
}
)
(define_split 
[(set(strict_low_part(match_operand:QI 0 "nonimmediate_operand" ""))(eq:QI(match_operator 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)])(const_int 0)))]
  ""
[(set(match_dup 0)(match_dup 1))]
  {
  rtx new_op1 = copy_rtx (operands[1]);
  operands[1] = new_op1;
  PUT_MODE (new_op1, QImode);
  PUT_CODE (new_op1, ix86_reverse_condition (GET_CODE (new_op1),
					     GET_MODE (XEXP (new_op1, 0))));

  /* Make sure that (a) the CCmode we have for the flags is strong
     enough for the reversed compare or (b) we have a valid FP compare.  */
  if (! ix86_comparison_operator (new_op1, VOIDmode))
    FAIL;
}
)
(define_insn "*avx_setcc<mode>"
[(set(match_operand:MODEF 0 "register_operand" "=x")(match_operator:MODEF 1 "avx_comparison_float_operator" [(match_operand:MODEF 2 "register_operand" "x")(match_operand:MODEF 3 "nonimmediate_operand" "xm")]))]
"TARGET_AVX"
  "vcmp%D1s<ssemodefsuffix>\t{%3, %2, %0|%0, %2, %3}"
  [(set_attr "type" "ssecmp")
   (set_attr "prefix" "vex")
   (set_attr "length_immediate" "1")
   (set_attr "mode" "<MODE>")]

)




(define_insn "*sse_setcc<mode>"
[(set(match_operand:MODEF 0 "register_operand" "=x")(match_operator:MODEF 1 "sse_comparison_operator" [(match_operand:MODEF 2 "register_operand" "0")(match_operand:MODEF 3 "nonimmediate_operand" "xm")]))]
  "SSE_FLOAT_MODE_P (<MODE>mode)"
  "cmp%D1s<ssemodefsuffix>\t{%3, %0|%0, %3}"
  [(set_attr "type" "ssecmp")
   (set_attr "length_immediate" "1")
   (set_attr "mode" "<MODE>")]
)




(define_insn "*jcc_1"
[(set(pc)(if_then_else(match_operator 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)])(label_ref(match_operand 0 "" ""))(pc)))]
  ""
  "%+j%C1\t%l0"
  [(set_attr "type" "ibr")
   (set_attr "modrm" "0")
   (set (attr "length")
	   (if_then_else (and (ge (minus (match_dup 0) (pc))
				  (const_int -126))
			      (lt (minus (match_dup 0) (pc))
				  (const_int 128)))
	     (const_int 2)
	     (const_int 6)))]
)




(define_insn "*jcc_2"
[(set(pc)(if_then_else(match_operator 1 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)])(pc)(label_ref(match_operand 0 "" ""))))]
  ""
  "%+j%c1\t%l0"
  [(set_attr "type" "ibr")
   (set_attr "modrm" "0")
   (set (attr "length")
	   (if_then_else (and (ge (minus (match_dup 0) (pc))
				  (const_int -126))
			      (lt (minus (match_dup 0) (pc))
				  (const_int 128)))
	     (const_int 2)
	     (const_int 6)))]
)




(define_split 
[(set(pc)(if_then_else(ne(match_operator 0 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)])(const_int 0))(label_ref(match_operand 1 "" ""))(pc)))]
  ""
[(set(pc)(if_then_else(match_dup 0)(label_ref(match_dup 1))(pc)))]
  "PUT_MODE (operands[0], VOIDmode);"
)
(define_split 
[(set(pc)(if_then_else(eq(match_operator 0 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)])(const_int 0))(label_ref(match_operand 1 "" ""))(pc)))]
  ""
[(set(pc)(if_then_else(match_dup 0)(label_ref(match_dup 1))(pc)))]
  {
  rtx new_op0 = copy_rtx (operands[0]);
  operands[0] = new_op0;
  PUT_MODE (new_op0, VOIDmode);
  PUT_CODE (new_op0, ix86_reverse_condition (GET_CODE (new_op0),
					     GET_MODE (XEXP (new_op0, 0))));

  /* Make sure that (a) the CCmode we have for the flags is strong
     enough for the reversed compare or (b) we have a valid FP compare.  */
  if (! ix86_comparison_operator (new_op0, VOIDmode))
    FAIL;
}
)
(define_insn_and_split"*jcc_bt<mode>"
[(set(pc)(if_then_else(match_operator 0 "bt_comparison_operator" [(zero_extract:SWI48(match_operand:SWI48 1 "register_operand" "r")(const_int 1)(zero_extend:SI(match_operand:QI 2 "register_operand" "r")))(const_int 0)])(label_ref(match_operand 3 "" ""))(pc)))(clobber(reg:CC FLAGS_REG))]
  "TARGET_USE_BT || optimize_function_for_size_p (cfun)"
  "#"
  "&& 1"
[(set(reg:CCC FLAGS_REG)(compare:CCC(zero_extract:SWI48(match_dup 1)(const_int 1)(match_dup 2))(const_int 0)))(set(pc)(if_then_else(match_op_dup 0 [(reg:CCC FLAGS_REG)(const_int 0)])(label_ref(match_dup 3))(pc)))]
  {
  operands[2] = simplify_gen_subreg (<MODE>mode, operands[2], QImode, 0);

  PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));
}
)
(define_insn_and_split"*jcc_bt<mode>_mask"
[(set(pc)(if_then_else(match_operator 0 "bt_comparison_operator" [(zero_extract:SWI48(match_operand:SWI48 1 "register_operand" "r")(const_int 1)(and:SI(match_operand:SI 2 "register_operand" "r")(match_operand:SI 3 "const_int_operand" "n")))])(label_ref(match_operand 4 "" ""))(pc)))(clobber(reg:CC FLAGS_REG))]
  "(TARGET_USE_BT || optimize_function_for_size_p (cfun))
   && (INTVAL (operands[3]) & (GET_MODE_BITSIZE (<MODE>mode)-1))
      == GET_MODE_BITSIZE (<MODE>mode)-1"
  "#"
  "&& 1"
[(set(reg:CCC FLAGS_REG)(compare:CCC(zero_extract:SWI48(match_dup 1)(const_int 1)(match_dup 2))(const_int 0)))(set(pc)(if_then_else(match_op_dup 0 [(reg:CCC FLAGS_REG)(const_int 0)])(label_ref(match_dup 4))(pc)))]
  {
  operands[2] = simplify_gen_subreg (<MODE>mode, operands[2], SImode, 0);

  PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));
}
)
(define_insn_and_split"*jcc_btsi_1"
[(set(pc)(if_then_else(match_operator 0 "bt_comparison_operator" [(and:SI(lshiftrt:SI(match_operand:SI 1 "register_operand" "r")(match_operand:QI 2 "register_operand" "r"))(const_int 1))(const_int 0)])(label_ref(match_operand 3 "" ""))(pc)))(clobber(reg:CC FLAGS_REG))]
  "TARGET_USE_BT || optimize_function_for_size_p (cfun)"
  "#"
  "&& 1"
[(set(reg:CCC FLAGS_REG)(compare:CCC(zero_extract:SI(match_dup 1)(const_int 1)(match_dup 2))(const_int 0)))(set(pc)(if_then_else(match_op_dup 0 [(reg:CCC FLAGS_REG)(const_int 0)])(label_ref(match_dup 3))(pc)))]
 {
  operands[2] = simplify_gen_subreg (SImode, operands[2], QImode, 0);

  PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));
}
)
(define_insn_and_split"*jcc_btsi_mask_1"
[(set(pc)(if_then_else(match_operator 0 "bt_comparison_operator" [(and:SI(lshiftrt:SI(match_operand:SI 1 "register_operand" "r")(subreg:QI(and:SI(match_operand:SI 2 "register_operand" "r")(match_operand:SI 3 "const_int_operand" "n")) 0))(const_int 1))(const_int 0)])(label_ref(match_operand 4 "" ""))(pc)))(clobber(reg:CC FLAGS_REG))]
  "(TARGET_USE_BT || optimize_function_for_size_p (cfun))
   && (INTVAL (operands[3]) & 0x1f) == 0x1f"
  "#"
  "&& 1"
[(set(reg:CCC FLAGS_REG)(compare:CCC(zero_extract:SI(match_dup 1)(const_int 1)(match_dup 2))(const_int 0)))(set(pc)(if_then_else(match_op_dup 0 [(reg:CCC FLAGS_REG)(const_int 0)])(label_ref(match_dup 4))(pc)))]
  "PUT_CODE (operands[0], reverse_condition (GET_CODE (operands[0])));"
)
(define_insn "*fp_jcc_1_387"
[(set(pc)(if_then_else(match_operator 0 "ix86_fp_comparison_operator" [(match_operand 1 "register_operand" "f")(match_operand 2 "nonimmediate_operand" "fm")])(label_ref(match_operand 3 "" ""))(pc)))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 4 "=a"))]
     "TARGET_80387
   && (GET_MODE (operands[1]) == SFmode || GET_MODE (operands[1]) == DFmode)
   && GET_MODE (operands[1]) == GET_MODE (operands[2])
   && SELECT_CC_MODE (GET_CODE (operands[0]),
		      operands[1], operands[2]) == CCFPmode
   && !TARGET_CMOVE"
  "#"
)




(define_insn "*fp_jcc_1r_387"
[(set(pc)(if_then_else(match_operator 0 "ix86_fp_comparison_operator" [(match_operand 1 "register_operand" "f")(match_operand 2 "nonimmediate_operand" "fm")])(pc)(label_ref(match_operand 3 "" ""))))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 4 "=a"))]
  "TARGET_80387
   && (GET_MODE (operands[1]) == SFmode || GET_MODE (operands[1]) == DFmode)
   && GET_MODE (operands[1]) == GET_MODE (operands[2])
   && SELECT_CC_MODE (GET_CODE (operands[0]),
		      operands[1], operands[2]) == CCFPmode
   && !TARGET_CMOVE"
  "#"
)




(define_insn "*fp_jcc_2_387"
[(set(pc)(if_then_else(match_operator 0 "ix86_fp_comparison_operator" [(match_operand 1 "register_operand" "f")(match_operand 2 "register_operand" "f")])(label_ref(match_operand 3 "" ""))(pc)))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 4 "=a"))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
   && GET_MODE (operands[1]) == GET_MODE (operands[2])
   && !TARGET_CMOVE"
  "#"
)




(define_insn "*fp_jcc_2r_387"
[(set(pc)(if_then_else(match_operator 0 "ix86_fp_comparison_operator" [(match_operand 1 "register_operand" "f")(match_operand 2 "register_operand" "f")])(pc)(label_ref(match_operand 3 "" ""))))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 4 "=a"))]
    "X87_FLOAT_MODE_P (GET_MODE (operands[1]))
   && GET_MODE (operands[1]) == GET_MODE (operands[2])
   && !TARGET_CMOVE"
  "#"
)




(define_insn "*fp_jcc_3_387"
[(set(pc)(if_then_else(match_operator 0 "ix86_fp_comparison_operator" [(match_operand 1 "register_operand" "f")(match_operand 2 "const0_operand" "")])(label_ref(match_operand 3 "" ""))(pc)))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 4 "=a"))]
"X87_FLOAT_MODE_P (GET_MODE (operands[1]))
   && GET_MODE (operands[1]) == GET_MODE (operands[2])
   && SELECT_CC_MODE (GET_CODE (operands[0]),
		      operands[1], operands[2]) == CCFPmode
   && !TARGET_CMOVE"
  "#"
)




(define_split 
[(set(pc)(if_then_else(match_operator 0 "ix86_fp_comparison_operator" [(match_operand 1 "register_operand" "")(match_operand 2 "nonimmediate_operand" "")])(match_operand 3 "" "")(match_operand 4 "" "")))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))]
  "reload_completed"
[(const_int 0)]
 {
  ix86_split_fp_branch (GET_CODE (operands[0]), operands[1], operands[2],
	                operands[3], operands[4], NULL_RTX, NULL_RTX);
  DONE;
}
)
(define_split 
[(set(pc)(if_then_else(match_operator 0 "ix86_fp_comparison_operator" [(match_operand 1 "register_operand" "")(match_operand 2 "general_operand" "")])(match_operand 3 "" "")(match_operand 4 "" "")))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 5 "=a"))]
  "reload_completed"
[(const_int 0)]
  {
  ix86_split_fp_branch (GET_CODE (operands[0]), operands[1], operands[2],
	     		operands[3], operands[4], operands[5], NULL_RTX);
  DONE;
}
)
(define_insn "*fp_jcc_4_<mode>_387"
[(set(pc)(if_then_else(match_operator 0 "ix86_swapped_fp_comparison_operator" [(match_operator 1 "float_operator" [(match_operand:X87MODEI12 2 "nonimmediate_operand" "m,?r")])(match_operand 3 "register_operand" "f,f")])(label_ref(match_operand 4 "" ""))(pc)))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 5 "=a,a"))]
"X87_FLOAT_MODE_P (GET_MODE (operands[3]))
   && (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))
   && GET_MODE (operands[1]) == GET_MODE (operands[3])
   && ix86_fp_compare_mode (swap_condition (GET_CODE (operands[0]))) == CCFPmode
   && !TARGET_CMOVE"
  "#"
)




(define_split 
[(set(pc)(if_then_else(match_operator 0 "ix86_swapped_fp_comparison_operator" [(match_operator 1 "float_operator" [(match_operand:X87MODEI12 2 "memory_operand" "")])(match_operand 3 "register_operand" "")])(match_operand 4 "" "")(match_operand 5 "" "")))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 6 "=a"))]
  "reload_completed"
[(const_int 0)]
  {
  operands[7] = gen_rtx_FLOAT (GET_MODE (operands[1]), operands[2]);

  ix86_split_fp_branch (swap_condition (GET_CODE (operands[0])),
			operands[3], operands[7],
			operands[4], operands[5], operands[6], NULL_RTX);
  DONE;
}
)
(define_split 
[(set(pc)(if_then_else(match_operator 0 "ix86_swapped_fp_comparison_operator" [(match_operator 1 "float_operator" [(match_operand:X87MODEI12 2 "register_operand" "")])(match_operand 3 "register_operand" "")])(match_operand 4 "" "")(match_operand 5 "" "")))(clobber(reg:CCFP FPSR_REG))(clobber(reg:CCFP FLAGS_REG))(clobber(match_scratch:HI 6 "=a"))]
  "reload_completed"
[(const_int 0)]
  {
  operands[7] = ix86_force_to_memory (GET_MODE (operands[2]), operands[2]);
  operands[7] = gen_rtx_FLOAT (GET_MODE (operands[1]), operands[7]);

  ix86_split_fp_branch (swap_condition (GET_CODE (operands[0])),
			operands[3], operands[7],
			operands[4], operands[5], operands[6], operands[2]);
  DONE;
}
)
(define_insn "jump"
[(set(pc)(label_ref(match_operand 0 "" "")))]
  ""
  "jmp\t%l0"
  [(set_attr "type" "ibr")
   (set (attr "length")
	   (if_then_else (and (ge (minus (match_dup 0) (pc))
				  (const_int -126))
			      (lt (minus (match_dup 0) (pc))
				  (const_int 128)))
	     (const_int 2)
	     (const_int 5)))
   (set_attr "modrm" "0")]
)




(define_expand "indirect_jump"
[(set(pc)(match_operand 0 "nonimmediate_operand" ""))]
  ""
  ""
)




(define_insn "*indirect_jump"
[(set(pc)(match_operand:P 0 "nonimmediate_operand" "rm"))]
  ""
  "jmp\t%A0"
  [(set_attr "type" "ibr")
   (set_attr "length_immediate" "0")]
)




(define_expand "tablejump"
[(parallel[(set(pc)(match_operand 0 "nonimmediate_operand" ""))(use(label_ref(match_operand 1 "" "")))])]
  ""
{
  /* In PIC mode, the table entries are stored GOT (32-bit) or PC (64-bit)
     relative.  Convert the relative address to an absolute address.  */
  if (flag_pic)
    {
      rtx op0, op1;
      enum rtx_code code;

      /* We can't use @GOTOFF for text labels on VxWorks;
	 see gotoff_operand.  */
      if (TARGET_64BIT || TARGET_VXWORKS_RTP)
	{
	  code = PLUS;
	  op0 = operands[0];
	  op1 = gen_rtx_LABEL_REF (Pmode, operands[1]);
	}
      else if (TARGET_MACHO || HAVE_AS_GOTOFF_IN_DATA)
	{
	  code = PLUS;
	  op0 = operands[0];
	  op1 = pic_offset_table_rtx;
	}
      else
	{
	  code = MINUS;
	  op0 = pic_offset_table_rtx;
	  op1 = operands[0];
	}

      operands[0] = expand_simple_binop (Pmode, code, op0, op1, NULL_RTX, 0,
					 OPTAB_DIRECT);
    }
}
)




(define_insn "*tablejump_1"
[(set(pc)(match_operand:P 0 "nonimmediate_operand" "rm"))(use(label_ref(match_operand 1 "" "")))]
  ""
  "jmp\t%A0"
  [(set_attr "type" "ibr")
   (set_attr "length_immediate" "0")]
)




(define_peephole2 
[(set(reg FLAGS_REG)(match_operand 0 "" ""))(set(match_operand:QI 1 "register_operand" "")(match_operator:QI 2 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)]))(set(match_operand 3 "q_regs_operand" "")(zero_extend(match_dup 1)))]
  "(peep2_reg_dead_p (3, operands[1])
    || operands_match_p (operands[1], operands[3]))
   && ! reg_overlap_mentioned_p (operands[3], operands[0])"
[(set(match_dup 4)(match_dup 0))(set(strict_low_part(match_dup 5))(match_dup 2))]
  {
  operands[4] = gen_rtx_REG (GET_MODE (operands[0]), FLAGS_REG);
  operands[5] = gen_lowpart (QImode, operands[3]);
  ix86_expand_clear (operands[3]);
}
)
(define_peephole2 
[(set(reg FLAGS_REG)(match_operand 0 "" ""))(set(match_operand:QI 1 "register_operand" "")(match_operator:QI 2 "ix86_comparison_operator" [(reg FLAGS_REG)(const_int 0)]))(parallel[(set(match_operand 3 "q_regs_operand" "")(zero_extend(match_dup 1)))(clobber(reg:CC FLAGS_REG))])]
  "(peep2_reg_dead_p (3, operands[1])
    || operands_match_p (operands[1], operands[3]))
   && ! reg_overlap_mentioned_p (operands[3], operands[0])"
[(set(match_dup 4)(match_dup 0))(set(strict_low_part(match_dup 5))(match_dup 2))]
  {
  operands[4] = gen_rtx_REG (GET_MODE (operands[0]), FLAGS_REG);
  operands[5] = gen_lowpart (QImode, operands[3]);
  ix86_expand_clear (operands[3]);
}
)
(define_expand "call_pop"
[(parallel[(call(match_operand:QI 0 "" "")(match_operand:SI 1 "" ""))(set(reg:SI SP_REG)(plus:SI(reg:SI SP_REG)(match_operand:SI 3 "" "")))])]
"!TARGET_64BIT"
{
  ix86_expand_call (NULL, operands[0], operands[1],
		    operands[2], operands[3], 0);
  DONE;
}
)




(define_insn_and_split"*call_pop_0_vzeroupper"
[(parallel[(call(mem:QI(match_operand:SI 0 "constant_call_address_operand" ""))(match_operand:SI 1 "" ""))(set(reg:SI SP_REG)(plus:SI(reg:SI SP_REG)(match_operand:SI 2 "immediate_operand" "")))])(unspec[(match_operand 3 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && !TARGET_64BIT"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*call_pop_0"
[(call(mem:QI(match_operand:SI 0 "constant_call_address_operand" ""))(match_operand:SI 1 "" ""))(set(reg:SI SP_REG)(plus:SI(reg:SI SP_REG)(match_operand:SI 2 "immediate_operand" "")))]
	"!TARGET_64BIT"
{
  if (SIBLING_CALL_P (insn))
    return "jmp\t%P0";
  else
    return "call\t%P0";
}
  [(set_attr "type" "call")]

)




(define_insn_and_split"*call_pop_1_vzeroupper"
[(parallel[(call(mem:QI(match_operand:SI 0 "call_insn_operand" "lsm"))(match_operand:SI 1 "" ""))(set(reg:SI SP_REG)(plus:SI(reg:SI SP_REG)(match_operand:SI 2 "immediate_operand" "i")))])(unspec[(match_operand 3 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*call_pop_1"
[(call(mem:QI(match_operand:SI 0 "call_insn_operand" "lsm"))(match_operand:SI 1 "" ""))(set(reg:SI SP_REG)(plus:SI(reg:SI SP_REG)(match_operand:SI 2 "immediate_operand" "i")))]
  "!TARGET_64BIT && !SIBLING_CALL_P (insn)"
{
  if (constant_call_address_operand (operands[0], Pmode))
    return "call\t%P0";
  return "call\t%A0";
}
  [(set_attr "type" "call")]
)




(define_insn_and_split"*sibcall_pop_1_vzeroupper"
[(parallel[(call(mem:QI(match_operand:SI 0 "sibcall_insn_operand" "s,U"))(match_operand:SI 1 "" ""))(set(reg:SI SP_REG)(plus:SI(reg:SI SP_REG)(match_operand:SI 2 "immediate_operand" "i,i")))])(unspec[(match_operand 3 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[3]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*sibcall_pop_1"
[(call(mem:QI(match_operand:SI 0 "sibcall_insn_operand" "s,U"))(match_operand:SI 1 "" ""))(set(reg:SI SP_REG)(plus:SI(reg:SI SP_REG)(match_operand:SI 2 "immediate_operand" "i,i")))]
 "!TARGET_64BIT && SIBLING_CALL_P (insn)"
  "@
   jmp\t%P0
   jmp\t%A0"
  [(set_attr "type" "call")]
)




(define_expand "call"
[(call(match_operand:QI 0 "" "")(match_operand 1 "" ""))(use(match_operand 2 "" ""))]
  ""
{
  ix86_expand_call (NULL, operands[0], operands[1], operands[2], NULL, 0);
  DONE;
}
)




(define_expand "sibcall"
[(call(match_operand:QI 0 "" "")(match_operand 1 "" ""))(use(match_operand 2 "" ""))]
""
{
  ix86_expand_call (NULL, operands[0], operands[1], operands[2], NULL, 1);
  DONE;
}
)




(define_insn_and_split"*call_0_vzeroupper"
[(call(mem:QI(match_operand 0 "constant_call_address_operand" ""))(match_operand 1 "" ""))(unspec[(match_operand 2 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*call_0"
[(call(mem:QI(match_operand 0 "constant_call_address_operand" ""))(match_operand 1 "" ""))]
  ""
  { return ix86_output_call_insn (insn, operands[0], 0); }
  [(set_attr "type" "call")]
)




(define_insn_and_split"*call_1_vzeroupper"
[(call(mem:QI(match_operand:SI 0 "call_insn_operand" "lsm"))(match_operand 1 "" ""))(unspec[(match_operand 2 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && !TARGET_64BIT && !SIBLING_CALL_P (insn)"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*call_1"
[(call(mem:QI(match_operand:SI 0 "call_insn_operand" "lsm"))(match_operand 1 "" ""))]
  "!TARGET_64BIT && !SIBLING_CALL_P (insn)"
  { return ix86_output_call_insn (insn, operands[0], 0); }
  [(set_attr "type" "call")]
)




(define_insn_and_split"*sibcall_1_vzeroupper"
[(call(mem:QI(match_operand:SI 0 "sibcall_insn_operand" "s,U"))(match_operand 1 "" ""))(unspec[(match_operand 2 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && !TARGET_64BIT && SIBLING_CALL_P (insn)"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*sibcall_1"
[(call(mem:QI(match_operand:SI 0 "sibcall_insn_operand" "s,U"))(match_operand 1 "" ""))]
  "!TARGET_64BIT && SIBLING_CALL_P (insn)"
  { return ix86_output_call_insn (insn, operands[0], 0); }
  [(set_attr "type" "call")]
)




(define_insn_and_split"*call_1_rex64_vzeroupper"
[(call(mem:QI(match_operand:DI 0 "call_insn_operand" "rsm"))(match_operand 1 "" ""))(unspec[(match_operand 2 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)
   && ix86_cmodel != CM_LARGE && ix86_cmodel != CM_LARGE_PIC"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*call_1_rex64"
[(call(mem:QI(match_operand:DI 0 "call_insn_operand" "rsm"))(match_operand 1 "" ""))]
  "TARGET_64BIT && !SIBLING_CALL_P (insn)
   && ix86_cmodel != CM_LARGE && ix86_cmodel != CM_LARGE_PIC"
  { return ix86_output_call_insn (insn, operands[0], 0); }
  [(set_attr "type" "call")]
)




(define_insn_and_split"*call_1_rex64_ms_sysv_vzeroupper"
[(parallel[(call(mem:QI(match_operand:DI 0 "call_insn_operand" "rsm"))(match_operand 1 "" ""))(unspec[(const_int 0) ]UNSPEC_MS_TO_SYSV_CALL)(clobber(reg:TI XMM6_REG))(clobber(reg:TI XMM7_REG))(clobber(reg:TI XMM8_REG))(clobber(reg:TI XMM9_REG))(clobber(reg:TI XMM10_REG))(clobber(reg:TI XMM11_REG))(clobber(reg:TI XMM12_REG))(clobber(reg:TI XMM13_REG))(clobber(reg:TI XMM14_REG))(clobber(reg:TI XMM15_REG))(clobber(reg:DI SI_REG))(clobber(reg:DI DI_REG))])(unspec[(match_operand 2 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*call_1_rex64_ms_sysv"
[(call(mem:QI(match_operand:DI 0 "call_insn_operand" "rsm"))(match_operand 1 "" ""))(unspec[(const_int 0) ]UNSPEC_MS_TO_SYSV_CALL)(clobber(reg:TI XMM6_REG))(clobber(reg:TI XMM7_REG))(clobber(reg:TI XMM8_REG))(clobber(reg:TI XMM9_REG))(clobber(reg:TI XMM10_REG))(clobber(reg:TI XMM11_REG))(clobber(reg:TI XMM12_REG))(clobber(reg:TI XMM13_REG))(clobber(reg:TI XMM14_REG))(clobber(reg:TI XMM15_REG))(clobber(reg:DI SI_REG))(clobber(reg:DI DI_REG))]
  "TARGET_64BIT && !SIBLING_CALL_P (insn)"
  { return ix86_output_call_insn (insn, operands[0], 0); }
  [(set_attr "type" "call")]
)




(define_insn_and_split"*call_1_rex64_large_vzeroupper"
[(call(mem:QI(match_operand:DI 0 "call_insn_operand" "rm"))(match_operand 1 "" ""))(unspec[(match_operand 2 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && TARGET_64BIT && !SIBLING_CALL_P (insn)"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*call_1_rex64_large"
[(call(mem:QI(match_operand:DI 0 "call_insn_operand" "rm"))(match_operand 1 "" ""))]
  "TARGET_64BIT && !SIBLING_CALL_P (insn)"
  { return ix86_output_call_insn (insn, operands[0], 0); }
  [(set_attr "type" "call")]
)




(define_insn_and_split"*sibcall_1_rex64_vzeroupper"
[(call(mem:QI(match_operand:DI 0 "sibcall_insn_operand" "s,U"))(match_operand 1 "" ""))(unspec[(match_operand 2 "const_int_operand" "") ]UNSPEC_CALL_NEEDS_VZEROUPPER)]
  "TARGET_VZEROUPPER && TARGET_64BIT && SIBLING_CALL_P (insn)"
  "#"
  "&& reload_completed"
[(const_int 0)]
  "ix86_split_call_vzeroupper (curr_insn, operands[2]); DONE;"
[(set_attr "type" "call")]
)
(define_insn "*sibcall_1_rex64"
[(call(mem:QI(match_operand:DI 0 "sibcall_insn_operand" "s,U"))(match_operand 1 "" ""))]
  "TARGET_64BIT && SIBLING_CALL_P (insn)"
  { return ix86_output_call_insn (insn, operands[0], 0); }
  [(set_attr "type" "call")]
)




(define_expand "call_value_pop"
[(parallel[(set(match_operand 0 "" "")(call(match_operand:QI 1 "" "")(match_operand:SI 2 "" "")))(set(reg:SI SP_REG)(plus:SI(reg:SI SP_REG)(match_operand:SI 4 "" "")))])]
  "!TARGET_64BIT"
{
  ix86_expand_call (operands[0], operands[1], operands[2],
		    operands[3], operands[4], 0);
  DONE;
}
)




(define_expand "call_value"
[(set(match_operand 0 "" "")(call(match_operand:QI 1 "" "")(match_operand:SI 2 "" "")))(use(match_operand:SI 3 "" ""))]
  ""
{
  ix86_expand_call (operands[0], operands[1], operands[2],
		    operands[3], NULL, 0);
  DONE;
}
)




(define_expand "sibcall_value"
[(set(match_operand 0 "" "")(call(match_operand:QI 1 "" "")(match_operand:SI 2 "" "")))(use(match_operand:SI 3 "" ""))]
  ""
{
  ix86_expand_call (operands[0], operands[1], operands[2],
		    operands[3], NULL, 1);
  DONE;
}
)




(define_expand "untyped_call"
[(parallel[(call(match_operand 0 "" "")(const_int 0))(match_operand 1 "" "")(match_operand 2 "" "")])]
  ""
{
  int i;

  /* In order to give reg-stack an easier job in validating two
     coprocessor registers as containing a possible return value,
     simply pretend the untyped call returns a complex long double
     value. 

     We can't use SSE_REGPARM_MAX here since callee is unprototyped
     and should have the default ABI.  */

  ix86_expand_call ((TARGET_FLOAT_RETURNS_IN_80387
		     ? gen_rtx_REG (XCmode, FIRST_FLOAT_REG) : NULL),
		    operands[0], const0_rtx,
		    GEN_INT ((TARGET_64BIT
			      ? (ix86_abi == SYSV_ABI
				 ? X86_64_SSE_REGPARM_MAX
				 : X86_64_MS_SSE_REGPARM_MAX)
			      : X86_32_SSE_REGPARM_MAX)
		    	     - 1),
		    NULL, 0);

  for (i = 0; i < XVECLEN (operands[2], 0); i++)
    {
      rtx set = XVECEXP (operands[2], 0, i);
      emit_move_insn (SET_DEST (set), SET_SRC (set));
    }

  /* The optimizer does not know that the call sets the function value
     registers we stored in the result block.  We avoid problems by
     claiming that all hard registers are used and clobbered at this
     point.  */
  emit_insn (gen_blockage ());

  DONE;
}
)




(define_insn "blockage"
[(unspec_volatile[(const_int 0) ]UNSPECV_BLOCKAGE)]
  ""
  ""
  [(set_attr "length" "0")]
)




(define_expand "memory_blockage"
[(set(match_dup 0)(unspec:BLK[(match_dup 0) ]UNSPEC_MEMORY_BLOCKAGE))]
  ""
{
  operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
  MEM_VOLATILE_P (operands[0]) = 1;
}
)




(define_insn "*memory_blockage"
[(set(match_operand:BLK 0 "" "")(unspec:BLK[(match_dup 0) ]UNSPEC_MEMORY_BLOCKAGE))]
  ""
  ""
  [(set_attr "length" "0")]
)




(define_insn "prologue_use"
[(unspec_volatile[(match_operand 0 "" "") ]UNSPECV_PROLOGUE_USE)]
  ""
  ""
  [(set_attr "length" "0")]
)




(define_expand "return"
[(return)]
  "ix86_can_use_return_insn_p ()"
{
  if (crtl->args.pops_args)
    {
      rtx popc = GEN_INT (crtl->args.pops_args);
      emit_jump_insn (gen_return_pop_internal (popc));
      DONE;
    }
}
)




(define_insn "return_internal"
[(return)]
  "reload_completed"
  "ret"
  [(set_attr "length" "1")
   (set_attr "atom_unit" "jeu")
   (set_attr "length_immediate" "0")
   (set_attr "modrm" "0")]
)




(define_insn "return_internal_long"
[(return)(unspec[(const_int 0) ]UNSPEC_REP)]
  "reload_completed"
  "rep\;ret"
  [(set_attr "length" "2")
   (set_attr "atom_unit" "jeu")
   (set_attr "length_immediate" "0")
   (set_attr "prefix_rep" "1")
   (set_attr "modrm" "0")]
)




(define_insn "return_pop_internal"
[(return)(use(match_operand:SI 0 "const_int_operand" ""))]
  "reload_completed"
  "ret\t%0"
  [(set_attr "length" "3")
   (set_attr "atom_unit" "jeu")
   (set_attr "length_immediate" "2")
   (set_attr "modrm" "0")]
)




(define_insn "return_indirect_internal"
[(return)(use(match_operand:SI 0 "register_operand" "r"))]
  "reload_completed"
  "jmp\t%A0"
  [(set_attr "type" "ibr")
   (set_attr "length_immediate" "0")]
)




(define_insn "nop"
[(const_int 0)]
  ""
  "nop"
  [(set_attr "length" "1")
   (set_attr "length_immediate" "0")
   (set_attr "modrm" "0")]
)




(define_insn "nops"
[(unspec_volatile[(match_operand 0 "const_int_operand" "") ]UNSPECV_NOPS)]
  "reload_completed"
{
  int num = INTVAL (operands[0]);

  gcc_assert (num >= 1 && num <= 8);

  while (num--)
    fputs ("\tnop\n", asm_out_file);

  return "";
}
  [(set (attr "length") (symbol_ref "INTVAL (operands[0])"))
   (set_attr "length_immediate" "0")
   (set_attr "modrm" "0")]
)




(define_insn "pad"
[(unspec_volatile[(match_operand 0 "" "") ]UNSPECV_ALIGN)]

  ""
{
#ifdef ASM_OUTPUT_MAX_SKIP_PAD
  ASM_OUTPUT_MAX_SKIP_PAD (asm_out_file, 4, (int)INTVAL (operands[0]));
#else
  /* It is tempting to use ASM_OUTPUT_ALIGN here, but we don't want to do that.
     The align insn is used to avoid 3 jump instructions in the row to improve
     branch prediction and the benefits hardly outweigh the cost of extra 8
     nops on the average inserted by full alignment pseudo operation.  */
#endif
  return "";
}
  [(set_attr "length" "16")]
)




(define_expand "prologue"
[(const_int 0)]
  ""
  "ix86_expand_prologue (); DONE;"
)




(define_insn "set_got"
[(set(match_operand:SI 0 "register_operand" "=r")(unspec:SI[(const_int 0) ]UNSPEC_SET_GOT))(clobber(reg:CC FLAGS_REG))]
  "!TARGET_64BIT"
  "* return output_set_got (operands[0], NULL_RTX);"
  [(set_attr "type" "multi")
   (set_attr "length" "12")]
)




(define_insn "set_got_labelled"
[(set(match_operand:SI 0 "register_operand" "=r")(unspec:SI[(label_ref(match_operand 1 "" "")) ]UNSPEC_SET_GOT))(clobber(reg:CC FLAGS_REG))]
  "!TARGET_64BIT"
  "* return output_set_got (operands[0], operands[1]);"
  [(set_attr "type" "multi")
   (set_attr "length" "12")]
)




(define_insn "set_got_rex64"
[(set(match_operand:DI 0 "register_operand" "=r")(unspec:DI[(const_int 0) ]UNSPEC_SET_GOT))]
  "TARGET_64BIT"
  "lea{q}\t{_GLOBAL_OFFSET_TABLE_(%%rip), %0|%0, _GLOBAL_OFFSET_TABLE_[rip]}"
  [(set_attr "type" "lea")
   (set_attr "length_address" "4")
   (set_attr "mode" "DI")]
)




(define_insn "set_rip_rex64"
[(set(match_operand:DI 0 "register_operand" "=r")(unspec:DI[(label_ref(match_operand 1 "" "")) ]UNSPEC_SET_RIP))]
  "TARGET_64BIT"
  "lea{q}\t{%l1(%%rip), %0|%0, %l1[rip]}"
  [(set_attr "type" "lea")
   (set_attr "length_address" "4")
   (set_attr "mode" "DI")]
)




(define_insn "set_got_offset_rex64"
[(set(match_operand:DI 0 "register_operand" "=r")(unspec:DI[(label_ref(match_operand 1 "" "")) ]UNSPEC_SET_GOT_OFFSET))]
	  "TARGET_64BIT"
  "movabs{q}\t{$_GLOBAL_OFFSET_TABLE_-%l1, %0|%0, OFFSET FLAT:_GLOBAL_OFFSET_TABLE_-%l1}"
  [(set_attr "type" "imov")
   (set_attr "length_immediate" "0")
   (set_attr "length_address" "8")
   (set_attr "mode" "DI")]
)




(define_expand "epilogue"
[(const_int 0)]
  ""
  "ix86_expand_epilogue (1); DONE;"
)




(define_expand "sibcall_epilogue"
[(const_int 0)]
  ""
  "ix86_expand_epilogue (0); DONE;"
)




(define_expand "eh_return"
[(use(match_operand 0 "register_operand" ""))]
  ""
{
  rtx tmp, sa = EH_RETURN_STACKADJ_RTX, ra = operands[0];

  /* Tricky bit: we write the address of the handler to which we will
     be returning into someone else's stack frame, one word below the
     stack address we wish to restore.  */
  tmp = gen_rtx_PLUS (Pmode, arg_pointer_rtx, sa);
  tmp = plus_constant (tmp, -UNITS_PER_WORD);
  tmp = gen_rtx_MEM (Pmode, tmp);
  emit_move_insn (tmp, ra);

  emit_jump_insn (gen_eh_return_internal ());
  emit_barrier ();
  DONE;
}
)




(define_insn_and_split"eh_return_internal"
[(eh_return)]
  ""
  "#"
  "epilogue_completed"
[(const_int 0)]
  "ix86_expand_epilogue (2); DONE;"
)
(define_insn "leave"
[(set(reg:SI SP_REG)(plus:SI(reg:SI BP_REG)(const_int 4)))(set(reg:SI BP_REG)(mem:SI(reg:SI BP_REG)))(clobber(mem:BLK(scratch)))]
  "!TARGET_64BIT"
  "leave"
  [(set_attr "type" "leave")]
)




(define_insn "leave_rex64"
[(set(reg:DI SP_REG)(plus:DI(reg:DI BP_REG)(const_int 8)))(set(reg:DI BP_REG)(mem:DI(reg:DI BP_REG)))(clobber(mem:BLK(scratch)))]
  "TARGET_64BIT"
  "leave"
  [(set_attr "type" "leave")]
)




(define_expand "split_stack_prologue"
[(const_int 0)]
  ""
{
  ix86_expand_split_stack_prologue ();
  DONE;
}
)




(define_insn "split_stack_return"
[(unspec_volatile[(match_operand:SI 0 "const_int_operand" "") ]UNSPECV_SPLIT_STACK_RETURN)]
  ""
{
  if (operands[0] == const0_rtx)
    return "ret";
  else
    return "ret\t%0";
}
  [(set_attr "atom_unit" "jeu")
   (set_attr "modrm" "0")
   (set (attr "length")
	(if_then_else (match_operand:SI 0 "const0_operand" "")
		      (const_int 1)
		      (const_int 3)))
   (set (attr "length_immediate")
	(if_then_else (match_operand:SI 0 "const0_operand" "")
		      (const_int 0)
		      (const_int 2)))]
)




(define_expand "split_stack_space_check"
[(set(pc)(if_then_else(ltu(minus(reg SP_REG)(match_operand 0 "register_operand" ""))(unspec[(const_int 0) ]UNSPEC_STACK_CHECK))(label_ref(match_operand 1 "" ""))(pc)))]
  ""
{
  rtx reg, size, limit;

  reg = gen_reg_rtx (Pmode);
  size = force_reg (Pmode, operands[0]);
  emit_insn (gen_sub3_insn (reg, stack_pointer_rtx, size));
  limit = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
			  UNSPEC_STACK_CHECK);
  limit = gen_rtx_MEM (Pmode, gen_rtx_CONST (Pmode, limit));
  ix86_expand_branch (GEU, reg, limit, operands[1]);

  DONE;
}
)



