r962 - in trunk: gcc hlfs

archaic at linuxfromscratch.org archaic at linuxfromscratch.org
Wed Jun 1 16:51:39 PDT 2005


Author: archaic
Date: 2005-06-01 17:51:38 -0600 (Wed, 01 Jun 2005)
New Revision: 962

Added:
   trunk/gcc/gcc-3.4.4-ssp-1.patch
   trunk/hlfs/gcc-3.4.4-ssp-1.patch
Log:
Added ssp patch for gcc-3.4.4. (hlfs)

Added: trunk/gcc/gcc-3.4.4-ssp-1.patch
===================================================================
--- trunk/gcc/gcc-3.4.4-ssp-1.patch	2005-06-01 15:06:30 UTC (rev 961)
+++ trunk/gcc/gcc-3.4.4-ssp-1.patch	2005-06-01 23:51:38 UTC (rev 962)
@@ -0,0 +1 @@
+link ../hlfs/gcc-3.4.4-ssp-1.patch
\ No newline at end of file


Property changes on: trunk/gcc/gcc-3.4.4-ssp-1.patch
___________________________________________________________________
Name: svn:special
   + *

Added: trunk/hlfs/gcc-3.4.4-ssp-1.patch
===================================================================
--- trunk/hlfs/gcc-3.4.4-ssp-1.patch	2005-06-01 15:06:30 UTC (rev 961)
+++ trunk/hlfs/gcc-3.4.4-ssp-1.patch	2005-06-01 23:51:38 UTC (rev 962)
@@ -0,0 +1,4036 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2005-05-29
+Initial Package Version: 3.4.4
+Upstream Status: Rejected Upstream
+Origin: http://www.research.ibm.com/trl/projects/security/ssp/
+Description: Smashing Stack Protector - protector-3.4.1-1.tar.gz
+This patch is made specifically to work with the Glibc SSP patch. All guard
+functions have been removed. Developers are encouraged to check the
+differences between this patch, the original from ibm, and the Glibc patch.
+The gcc/libgcc2.c, gcc/libgcc-std.ver, gcc/config/t-linux, gcc/configure,
+and the "ENABLESSP" hunk of gcc/Makefile.in hunks were not used.
+Equivilent to using -D_LIBC_PROVIDES_SSP_ (-DHAVE_SYSLOG is related).
+The gcc/configure and gcc/Makefile.in hunks were removed because they
+conflict with other patches and we don't use the configure option anyway.
+
+To set the version string do something like this:
+
+sed -e 's/3.4.4/3.4.4 ssp/' -i gcc/version.c &&
+sed -e 's at http://gcc.gnu.org/bugs.html at http://bugs.linuxfromscratch.org/@' \
+  -i gcc/version.c
+
+This patch, and Glibc's patch, depends on erandom sysctl from:
+http://frandom.sourceforge.net/
+Thanks to Eli Billauer.
+
+Also see:
+http://www.linuxfromscratch.org/hlfs/
+http://www.linuxfromscratch.org/hints/downloads/files/ssp.txt
+http://www.linuxfromscratch.org/hints/downloads/files/entropy.txt
+
+diff -Naur gcc-3.4.4.orig/gcc/Makefile.in gcc-3.4.4/gcc/Makefile.in
+--- gcc-3.4.4.orig/gcc/Makefile.in	2005-02-24 09:26:57.000000000 +0000
++++ gcc-3.4.4/gcc/Makefile.in	2005-05-29 05:58:14.000000000 +0000
+@@ -871,7 +871,7 @@
+  sibcall.o simplify-rtx.o sreal.o stmt.o stor-layout.o stringpool.o 	   \
+  targhooks.o timevar.o toplev.o tracer.o tree.o tree-dump.o unroll.o	   \
+  varasm.o varray.o version.o vmsdbgout.o xcoffout.o alloc-pool.o	   \
+- et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o
++ et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o protector.o
+ 
+ OBJS-md = $(out_object_file)
+ OBJS-archive = $(EXTRA_OBJS) $(host_hook_obj) hashtable.o tree-inline.o	   \
+@@ -1860,6 +1860,10 @@
+ params.o : params.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(PARAMS_H) toplev.h
+ hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(HOOKS_H)
+ pretty-print.o: $(CONFIG_H) $(SYSTEM_H) pretty-print.c $(PRETTY_PRINT_H)
++protector.o : protector.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
++   flags.h function.h $(EXPR_H) $(OPTABS_H) $(REGS_H) toplev.h hard-reg-set.h \
++   insn-config.h insn-flags.h $(RECOG_H) output.h toplev.h except.h reload.h \
++   $(TM_P_H) conditions.h $(INSN_ATTR_H) real.h protector.h
+ 
+ $(out_object_file): $(out_file) $(CONFIG_H) coretypes.h $(TM_H) $(TREE_H) $(GGC_H) \
+    $(RTL_H) $(REGS_H) hard-reg-set.h real.h insn-config.h conditions.h \
+diff -Naur gcc-3.4.4.orig/gcc/c-cppbuiltin.c gcc-3.4.4/gcc/c-cppbuiltin.c
+--- gcc-3.4.4.orig/gcc/c-cppbuiltin.c	2004-03-04 10:24:54.000000000 +0000
++++ gcc-3.4.4/gcc/c-cppbuiltin.c	2005-05-29 05:58:14.000000000 +0000
+@@ -408,6 +408,12 @@
+   if (c_dialect_objc () && flag_next_runtime)
+     cpp_define (pfile, "__NEXT_RUNTIME__");
+ 
++  /* Make the choice of the stack protector runtime visible to source code.  */
++  if (flag_propolice_protection)
++    cpp_define (pfile, "__SSP__=1");
++  if (flag_stack_protection)
++    cpp_define (pfile, "__SSP_ALL__=2");
++
+   /* A straightforward target hook doesn't work, because of problems
+      linking that hook's body when part of non-C front ends.  */
+ # define preprocessing_asm_p() (cpp_get_options (pfile)->lang == CLK_ASM)
+diff -Naur gcc-3.4.4.orig/gcc/calls.c gcc-3.4.4/gcc/calls.c
+--- gcc-3.4.4.orig/gcc/calls.c	2005-04-06 21:01:44.000000000 +0000
++++ gcc-3.4.4/gcc/calls.c	2005-05-29 05:58:14.000000000 +0000
+@@ -2321,8 +2321,12 @@
+ 	  {
+ 	    /* For variable-sized objects, we must be called with a target
+ 	       specified.  If we were to allocate space on the stack here,
+-	       we would have no way of knowing when to free it.  */
+-	    rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
++	       we would have no way of knowing when to free it.
++
++	       This is the structure of a function return object and it isn't
++	       a character array for the stack protection, so it is
++	       marked using the assignment of the KEEP argument to 5.  */
++	    rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
+ 
+ 	    mark_temp_addr_taken (d);
+ 	    structure_value_addr = XEXP (d, 0);
+diff -Naur gcc-3.4.4.orig/gcc/combine.c gcc-3.4.4/gcc/combine.c
+--- gcc-3.4.4.orig/gcc/combine.c	2005-03-17 01:36:08.000000000 +0000
++++ gcc-3.4.4/gcc/combine.c	2005-05-29 05:58:14.000000000 +0000
+@@ -1402,6 +1402,10 @@
+ 	      && ! fixed_regs[REGNO (dest)]
+ 	      && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
+     return 1;
++  /* Never combine loads and stores protecting argument that use set insn
++     with used flag on.  */
++  if (SET_VOLATILE_P (set))
++    return 1;
+ 
+   return 0;
+ }
+@@ -3782,7 +3786,20 @@
+ 	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
+ 	  rtx inner_op1 = XEXP (x, 1);
+ 	  rtx inner;
+-
++	  
++#ifndef FRAME_GROWS_DOWNWARD
++	  /* For the case where the frame grows upward,
++	     the stack protector keeps the offset of the frame pointer
++	     positive integer.  */
++	  if (flag_propolice_protection
++	      && code == PLUS
++	      && other == frame_pointer_rtx
++	      && GET_CODE (inner_op0) == CONST_INT
++	      && GET_CODE (inner_op1) == CONST_INT
++	      && INTVAL (inner_op0) > 0
++	      && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
++	    return x;
++#endif
+ 	  /* Make sure we pass the constant operand if any as the second
+ 	     one if this is a commutative operation.  */
+ 	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
+@@ -4147,6 +4164,13 @@
+ 	 they are now checked elsewhere.  */
+       if (GET_CODE (XEXP (x, 0)) == PLUS
+ 	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
++#ifndef FRAME_GROWS_DOWNWARD
++	/* The stack protector keeps the addressing style of a local variable
++	   to be able to change its stack position.  */
++	if (! (flag_propolice_protection
++	       && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
++	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
++#endif
+ 	return gen_binary (PLUS, mode,
+ 			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
+ 				       XEXP (x, 1)),
+@@ -4274,8 +4298,14 @@
+ 	}
+ 
+       /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
+-	 integers.  */
+-      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
++	 integers.
++	 
++	 The stack protector keeps the addressing style of
++	 a local variable.  */
++      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
++	  && (! (flag_propolice_protection
++		 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
++		 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
+ 	return gen_binary (MINUS, mode,
+ 			   gen_binary (MINUS, mode, XEXP (x, 0),
+ 				       XEXP (XEXP (x, 1), 0)),
+diff -Naur gcc-3.4.4.orig/gcc/common.opt gcc-3.4.4/gcc/common.opt
+--- gcc-3.4.4.orig/gcc/common.opt	2004-10-28 03:43:09.000000000 +0000
++++ gcc-3.4.4/gcc/common.opt	2005-05-29 05:58:14.000000000 +0000
+@@ -152,6 +152,10 @@
+ Common
+ Warn when a variable is unused
+ 
++Wstack-protector
++Common
++Warn when not issuing stack smashing protection for some reason
++
+ aux-info
+ Common Separate
+ -aux-info <file>	Emit declaration information into <file>
+@@ -738,6 +742,14 @@
+ Common
+ Put zero initialized data in the bss section
+ 
++fstack-protector
++Common
++Enables stack protection
++
++fstack-protector-all
++Common
++Enables stack protection of every function
++
+ g
+ Common JoinedOrMissing
+ Generate debug information in default format
+diff -Naur gcc-3.4.4.orig/gcc/config/arm/arm.md gcc-3.4.4/gcc/config/arm/arm.md
+--- gcc-3.4.4.orig/gcc/config/arm/arm.md	2005-01-25 12:50:34.000000000 +0000
++++ gcc-3.4.4/gcc/config/arm/arm.md	2005-05-29 05:58:14.000000000 +0000
+@@ -3840,7 +3840,13 @@
+ 	(match_operand:DI 1 "general_operand" ""))]
+   "TARGET_EITHER"
+   "
+-  if (TARGET_THUMB)
++  if (TARGET_ARM)
++    {
++      /* Everything except mem = const or mem = mem can be done easily */
++      if (GET_CODE (operands[0]) == MEM)
++        operands[1] = force_reg (DImode, operands[1]);
++    }
++  else /* TARGET_THUMB.... */
+     {
+       if (!no_new_pseudos)
+         {
+diff -Naur gcc-3.4.4.orig/gcc/cse.c gcc-3.4.4/gcc/cse.c
+--- gcc-3.4.4.orig/gcc/cse.c	2004-10-26 18:05:42.000000000 +0000
++++ gcc-3.4.4/gcc/cse.c	2005-05-29 05:58:14.000000000 +0000
+@@ -4212,7 +4212,14 @@
+ 
+ 	      if (new_const == 0)
+ 		break;
+-
++#ifndef FRAME_GROWS_DOWNWARD
++	      if (flag_propolice_protection
++		  && GET_CODE (y) == PLUS
++		  && XEXP (y, 0) == frame_pointer_rtx
++		  && INTVAL (inner_const) > 0
++		  && INTVAL (new_const) <= 0)
++		break;
++#endif
+ 	      /* If we are associating shift operations, don't let this
+ 		 produce a shift of the size of the object or larger.
+ 		 This could occur when we follow a sign-extend by a right
+@@ -4744,6 +4751,14 @@
+       if (SET_DEST (x) == pc_rtx
+ 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
+ 	;
++      /* cut the reg propagation of stack-protected argument.  */
++      else if (SET_VOLATILE_P (x)) {
++	rtx x1 = SET_DEST (x);
++	if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
++	  x1 = SUBREG_REG (x1);
++	if (! REGNO_QTY_VALID_P(REGNO (x1)))
++	  make_new_qty (REGNO (x1), GET_MODE (x1));
++      }
+ 
+       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
+ 	 The hard function value register is used only once, to copy to
+diff -Naur gcc-3.4.4.orig/gcc/doc/invoke.texi gcc-3.4.4/gcc/doc/invoke.texi
+--- gcc-3.4.4.orig/gcc/doc/invoke.texi	2005-04-22 06:49:59.000000000 +0000
++++ gcc-3.4.4/gcc/doc/invoke.texi	2005-05-29 05:58:14.000000000 +0000
+@@ -227,7 +227,7 @@
+ -Wno-multichar  -Wnonnull  -Wpacked  -Wpadded @gol
+ -Wparentheses  -Wpointer-arith  -Wredundant-decls @gol
+ -Wreturn-type  -Wsequence-point  -Wshadow @gol
+--Wsign-compare  -Wstrict-aliasing @gol
++-Wsign-compare  -Wstack-protector  -Wstrict-aliasing @gol
+ -Wswitch  -Wswitch-default  -Wswitch-enum @gol
+ -Wsystem-headers  -Wtrigraphs  -Wundef  -Wuninitialized @gol
+ -Wunknown-pragmas  -Wunreachable-code @gol
+@@ -675,6 +675,7 @@
+ -fshort-double  -fshort-wchar @gol
+ -fverbose-asm  -fpack-struct  -fstack-check @gol
+ -fstack-limit-register=@var{reg}  -fstack-limit-symbol=@var{sym} @gol
++-fstack-protector  -fstack-protector-all @gol
+ -fargument-alias  -fargument-noalias @gol
+ -fargument-noalias-global  -fleading-underscore @gol
+ -ftls-model=@var{model} @gol
+@@ -2986,6 +2987,10 @@
+ complex; GCC will refuse to optimize programs when the optimization
+ itself is likely to take inordinate amounts of time.
+ 
++ at item -Wstack-protector
++ at opindex Wstack-protector
++Warn when not issuing stack smashing protection for some reason.
++
+ @item -Werror
+ @opindex Werror
+ Make all warnings into errors.
+@@ -11231,6 +11236,24 @@
+ @option{-Wl,--defsym,__stack_limit=0x7ffe0000} to enforce a stack limit
+ of 128KB at .  Note that this may only work with the GNU linker.
+ 
++ at item -fstack-protector
++ at item -fstack-protector-all
++ at opindex fstack-protector
++ at opindex fstack-protector-all
++ at opindex fno-stack-protector
++Generate code to protect an application from a stack smashing
++attack. The features are (1) the insertion of random value next to the
++frame pointer to detect the integrity of the stack, (2) the reordering
++of local variables to place buffers after pointers to avoid the
++corruption of pointers that could be used to further corrupt arbitrary
++memory locations, (3) the copying of pointers in function arguments to
++an area preceding local variable buffers to prevent the corruption of
++pointers that could be used to further corrupt arbitrary memory
++locations, and the (4) omission of instrumentation code from some
++functions to decrease the performance overhead.  If the integrity
++would be broken, the program is aborted.  If stack-protector-all is
++specified, instrumentation codes are generated at every functions.
++
+ @cindex aliasing of parameters
+ @cindex parameters, aliased
+ @item -fargument-alias
+diff -Naur gcc-3.4.4.orig/gcc/explow.c gcc-3.4.4/gcc/explow.c
+--- gcc-3.4.4.orig/gcc/explow.c	2004-11-23 13:12:28.000000000 +0000
++++ gcc-3.4.4/gcc/explow.c	2005-05-29 05:58:14.000000000 +0000
+@@ -84,7 +84,8 @@
+   rtx tem;
+   int all_constant = 0;
+ 
+-  if (c == 0)
++  if (c == 0
++      && ! (flag_propolice_protection && x == virtual_stack_vars_rtx))
+     return x;
+ 
+  restart:
+@@ -185,7 +186,10 @@
+       break;
+     }
+ 
+-  if (c != 0)
++  /* For the use of stack protection, keep the frame and offset pattern
++     even if the offset is zero.  */
++  if (c != 0
++      || (flag_propolice_protection && x == virtual_stack_vars_rtx))
+     x = gen_rtx_PLUS (mode, x, GEN_INT (c));
+ 
+   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
+@@ -474,6 +478,26 @@
+       if (memory_address_p (mode, oldx))
+ 	goto win2;
+ 
++      /* The stack protector keeps the addressing style of a local variable.
++	 LEGITIMIZE_ADDRESS changes the addressing to the machine-dependent
++	 style, so the protector split the frame address to a register using
++	 force_reg. */
++      if (flag_propolice_protection)
++	{
++#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS				\
++			&& XEXP (X, 0) == virtual_stack_vars_rtx	\
++			&& GET_CODE (XEXP (X, 1)) == CONST_INT)
++	  rtx y;
++	  if (FRAMEADDR_P (x))
++	    goto win;
++	  for (y = x; y != 0 && GET_CODE (y) == PLUS; y = XEXP (y, 0))
++	    {
++	      if (FRAMEADDR_P (XEXP (y, 0)))
++		XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
++	      if (FRAMEADDR_P (XEXP (y, 1)))
++		XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
++	    }
++	}
+       /* Perform machine-dependent transformations on X
+ 	 in certain cases.  This is not necessary since the code
+ 	 below can handle all possible cases, but machine-dependent
+diff -Naur gcc-3.4.4.orig/gcc/expr.c gcc-3.4.4/gcc/expr.c
+--- gcc-3.4.4.orig/gcc/expr.c	2005-05-11 21:19:48.000000000 +0000
++++ gcc-3.4.4/gcc/expr.c	2005-05-29 05:58:14.000000000 +0000
+@@ -48,6 +48,7 @@
+ #include "intl.h"
+ #include "tm_p.h"
+ #include "target.h"
++#include "protector.h"
+ 
+ /* Decide whether a function's arguments should be processed
+    from first to last or from last to first.
+@@ -1060,7 +1061,11 @@
+ 
+    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
+    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
+-   stpcpy.  */
++   stpcpy.
++
++   When the stack protector is used at the reverse move, it starts the move
++   instruction from the address within the region of a variable.
++   So it eliminates the first address decrement instruction.  */
+ 
+ rtx
+ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
+@@ -1123,6 +1128,8 @@
+ 
+       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
+ 	{
++	  if (flag_propolice_protection)
++	    len = len - GET_MODE_SIZE (mode);
+ 	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
+ 	  data.autinc_from = 1;
+ 	  data.explicit_inc_from = -1;
+@@ -1137,6 +1144,8 @@
+ 	data.from_addr = copy_addr_to_reg (from_addr);
+       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
+ 	{
++	  if (flag_propolice_protection)
++	    len = len - GET_MODE_SIZE (mode);
+ 	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
+ 	  data.autinc_to = 1;
+ 	  data.explicit_inc_to = -1;
+@@ -1280,11 +1289,15 @@
+ 	from1 = adjust_address (data->from, mode, data->offset);
+ 
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
+-	emit_insn (gen_add2_insn (data->to_addr,
+-				  GEN_INT (-(HOST_WIDE_INT)size)));
++	/* The stack protector skips the first address decrement instruction
++	   at the reverse move.  */
++	if (!flag_propolice_protection || data->explicit_inc_to < -1)
++	  emit_insn (gen_add2_insn (data->to_addr,
++				    GEN_INT (-(HOST_WIDE_INT)size)));
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
+-	emit_insn (gen_add2_insn (data->from_addr,
+-				  GEN_INT (-(HOST_WIDE_INT)size)));
++	if (!flag_propolice_protection || data->explicit_inc_from < -1)
++	  emit_insn (gen_add2_insn (data->from_addr,
++				    GEN_INT (-(HOST_WIDE_INT)size)));
+ 
+       if (data->to)
+ 	emit_insn ((*genfun) (to1, from1));
+@@ -2475,7 +2488,12 @@
+ 
+       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
+ 	{
+-	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
++	  int len = data->len;
++	  /* The stack protector starts the store instruction from
++	     the address within the region of a variable.  */
++	  if (flag_propolice_protection)
++	    len -= GET_MODE_SIZE (mode);
++	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
+ 	  data->autinc_to = 1;
+ 	  data->explicit_inc_to = -1;
+ 	}
+@@ -2544,8 +2562,11 @@
+ 	to1 = adjust_address (data->to, mode, data->offset);
+ 
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
+-	emit_insn (gen_add2_insn (data->to_addr,
+-				  GEN_INT (-(HOST_WIDE_INT) size)));
++	/* The stack protector skips the first address decrement instruction
++	   at the reverse store.  */
++	if (!flag_propolice_protection || data->explicit_inc_to < -1)
++	  emit_insn (gen_add2_insn (data->to_addr,
++				    GEN_INT (-(HOST_WIDE_INT) size)));
+ 
+       cst = (*data->constfun) (data->constfundata, data->offset, mode);
+       emit_insn ((*genfun) (to1, cst));
+@@ -5701,7 +5722,9 @@
+ 	  && GET_CODE (XEXP (value, 0)) == PLUS
+ 	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
+ 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
+-	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
++	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
++	  && (!flag_propolice_protection
++	      || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
+ 	{
+ 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
+ 					  XEXP (XEXP (value, 0), 0), op2,
+diff -Naur gcc-3.4.4.orig/gcc/flags.h gcc-3.4.4/gcc/flags.h
+--- gcc-3.4.4.orig/gcc/flags.h	2004-02-18 00:09:04.000000000 +0000
++++ gcc-3.4.4/gcc/flags.h	2005-05-29 05:58:14.000000000 +0000
+@@ -186,6 +186,10 @@
+ 
+ extern bool warn_strict_aliasing;
+ 
++/* Warn when not issuing stack smashing protection for some reason.  */
++
++extern bool warn_stack_protector;
++
+ /* Nonzero if generating code to do profiling.  */
+ 
+ extern int profile_flag;
+@@ -771,4 +775,12 @@
+ #define HONOR_SIGN_DEPENDENT_ROUNDING(MODE) \
+   (MODE_HAS_SIGN_DEPENDENT_ROUNDING (MODE) && flag_rounding_math)
+ 
++/* Nonzero means use propolice as a stack protection method.  */
++
++extern int flag_propolice_protection;
++
++/* Nonzero means use a stack protection method for every function.  */
++
++extern int flag_stack_protection;
++
+ #endif /* ! GCC_FLAGS_H */
+diff -Naur gcc-3.4.4.orig/gcc/function.c gcc-3.4.4/gcc/function.c
+--- gcc-3.4.4.orig/gcc/function.c	2005-05-11 21:19:49.000000000 +0000
++++ gcc-3.4.4/gcc/function.c	2005-05-29 06:03:47.000000000 +0000
+@@ -63,6 +63,7 @@
+ #include "integrate.h"
+ #include "langhooks.h"
+ #include "target.h"
++#include "protector.h"
+ 
+ #ifndef TRAMPOLINE_ALIGNMENT
+ #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
+@@ -155,6 +156,10 @@
+ /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
+    in this function.  */
+ static GTY(()) varray_type sibcall_epilogue;
++
++/* Current boundary mark for character arrays.  */
++static int temp_boundary_mark = 0;
++
+ 

+ /* In order to evaluate some expressions, such as function calls returning
+    structures in memory, we need to temporarily allocate stack locations.
+@@ -208,6 +213,8 @@
+   /* The size of the slot, including extra space for alignment.  This
+      info is for combine_temp_slots.  */
+   HOST_WIDE_INT full_size;
++  /* Boundary mark of a character array and the others. This info is for propolice.  */
++  int boundary_mark;
+ };
+ 

+ /* This structure is used to record MEMs or pseudos used to replace VAR, any
+@@ -641,6 +648,7 @@
+    whose lifetime is controlled by CLEANUP_POINT_EXPRs.  KEEP is 3
+    if we are to allocate something at an inner level to be treated as
+    a variable in the block (e.g., a SAVE_EXPR).
++   KEEP is 5 if we allocate a place to return structure.
+ 
+    TYPE is the type that will be used for the stack slot.  */
+ 
+@@ -651,6 +659,8 @@
+   unsigned int align;
+   struct temp_slot *p, *best_p = 0;
+   rtx slot;
++  int char_array = (flag_propolice_protection
++		    && keep == 1 && search_string_def (type));
+ 
+   /* If SIZE is -1 it means that somebody tried to allocate a temporary
+      of a variable size.  */
+@@ -676,7 +686,8 @@
+ 	&& ! p->in_use
+ 	&& objects_must_conflict_p (p->type, type)
+ 	&& (best_p == 0 || best_p->size > p->size
+-	    || (best_p->size == p->size && best_p->align > p->align)))
++	    || (best_p->size == p->size && best_p->align > p->align))
++	&& (! char_array || p->boundary_mark != 0))
+       {
+ 	if (p->align == align && p->size == size)
+ 	  {
+@@ -711,6 +722,7 @@
+ 	      p->address = 0;
+ 	      p->rtl_expr = 0;
+ 	      p->type = best_p->type;
++	      p->boundary_mark = best_p->boundary_mark;
+ 	      p->next = temp_slots;
+ 	      temp_slots = p;
+ 
+@@ -771,6 +783,7 @@
+       p->full_size = frame_offset - frame_offset_old;
+ #endif
+       p->address = 0;
++      p->boundary_mark = char_array ? ++temp_boundary_mark : 0;
+       p->next = temp_slots;
+       temp_slots = p;
+     }
+@@ -935,14 +948,16 @@
+ 	    int delete_q = 0;
+ 	    if (! q->in_use && GET_MODE (q->slot) == BLKmode)
+ 	      {
+-		if (p->base_offset + p->full_size == q->base_offset)
++		if (p->base_offset + p->full_size == q->base_offset &&
++		    p->boundary_mark == q->boundary_mark)
+ 		  {
+ 		    /* Q comes after P; combine Q into P.  */
+ 		    p->size += q->size;
+ 		    p->full_size += q->full_size;
+ 		    delete_q = 1;
+ 		  }
+-		else if (q->base_offset + q->full_size == p->base_offset)
++		else if (q->base_offset + q->full_size == p->base_offset &&
++			 p->boundary_mark == q->boundary_mark)
+ 		  {
+ 		    /* P comes after Q; combine P into Q.  */
+ 		    q->size += p->size;
+@@ -1457,8 +1472,11 @@
+     }
+ 
+   if (new == 0)
+-    new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode),
+-				consecutive_p ? -2 : 0, func);
++     new = function ?
++       assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode),
++				consecutive_p ? -2 : 0, func)
++       : assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode),
++						consecutive_p ? -2 : 0);
+ 
+   PUT_CODE (reg, MEM);
+   PUT_MODE (reg, decl_mode);
+@@ -3946,10 +3964,13 @@
+ 		}
+ 
+ 	      /* Otherwise copy the new constant into a register and replace
+-		 constant with that register.  */
++		 constant with that register.
++		 At the use of stack protection, stop to replace the frame
++		 offset with a register.  */
+ 	      temp = gen_reg_rtx (Pmode);
+ 	      XEXP (x, 0) = new;
+-	      if (validate_change (object, &XEXP (x, 1), temp, 0))
++	      if (validate_change (object, &XEXP (x, 1), temp, 0)
++		  && !flag_propolice_protection)
+ 		emit_insn_before (gen_move_insn (temp, new_offset), object);
+ 	      else
+ 		{
+diff -Naur gcc-3.4.4.orig/gcc/gcse.c gcc-3.4.4/gcc/gcse.c
+--- gcc-3.4.4.orig/gcc/gcse.c	2004-10-30 18:02:53.000000000 +0000
++++ gcc-3.4.4/gcc/gcse.c	2005-05-29 05:58:14.000000000 +0000
+@@ -4176,9 +4176,13 @@
+ 	continue;
+ 
+       /* Find an assignment that sets reg_used and is available
+-	 at the start of the block.  */
++	 at the start of the block.
++
++         Skip the copy propagation not to eliminate the register that is
++	 the duplicated pointer of a function argument. It is used for
++	 the function argument protection.  */
+       set = find_avail_set (regno, insn);
+-      if (! set)
++      if (! set || SET_VOLATILE_P (set->expr))
+ 	continue;
+ 
+       pat = set->expr;
+diff -Naur gcc-3.4.4.orig/gcc/integrate.c gcc-3.4.4/gcc/integrate.c
+--- gcc-3.4.4.orig/gcc/integrate.c	2004-12-05 05:21:01.000000000 +0000
++++ gcc-3.4.4/gcc/integrate.c	2005-05-29 05:58:14.000000000 +0000
+@@ -393,6 +393,11 @@
+   /* These args would always appear unused, if not for this.  */
+   TREE_USED (copy) = 1;
+ 
++  /* The inlined variable is marked as INLINE not to change the location
++     by stack protector.  */
++  if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL)
++    DECL_COPIED (copy) = 1;
++
+   /* Set the context for the new declaration.  */
+   if (!DECL_CONTEXT (decl))
+     /* Globals stay global.  */
+@@ -1970,6 +1975,12 @@
+ 
+ 	      seq = get_insns ();
+ 	      end_sequence ();
++#ifdef ARGS_GROWS_DOWNWARD
++	      /* Mark this pointer as the top of the argument
++		 block. The pointer minus one is in the block.  */
++	      if (flag_propolice_protection && GET_CODE (seq) == SET)
++		RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
++#endif
+ 	      emit_insn_after (seq, map->insns_at_start);
+ 	      return temp;
+ 	    }
+diff -Naur gcc-3.4.4.orig/gcc/loop.c gcc-3.4.4/gcc/loop.c
+--- gcc-3.4.4.orig/gcc/loop.c	2005-01-06 19:12:03.000000000 +0000
++++ gcc-3.4.4/gcc/loop.c	2005-05-29 05:58:14.000000000 +0000
+@@ -6525,6 +6525,14 @@
+   if (GET_CODE (*mult_val) == USE)
+     *mult_val = XEXP (*mult_val, 0);
+ 
++#ifndef FRAME_GROWS_DOWNWARD
++  if (flag_propolice_protection
++      && GET_CODE (*add_val) == PLUS
++      && (XEXP (*add_val, 0) == frame_pointer_rtx
++	  || XEXP (*add_val, 1) == frame_pointer_rtx))
++    return 0;
++#endif
++
+   if (is_addr)
+     *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost;
+   else
+diff -Naur gcc-3.4.4.orig/gcc/mklibgcc.in gcc-3.4.4/gcc/mklibgcc.in
+--- gcc-3.4.4.orig/gcc/mklibgcc.in	2005-02-24 09:26:57.000000000 +0000
++++ gcc-3.4.4/gcc/mklibgcc.in	2005-05-29 05:58:14.000000000 +0000
+@@ -58,7 +58,7 @@
+ 	_enable_execute_stack _trampoline __main _absvsi2 _absvdi2 _addvsi3
+ 	_addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors
+ 	_ffssi2 _ffsdi2 _clz _clzsi2 _clzdi2 _ctzsi2 _ctzdi2 _popcount_tab
+-	_popcountsi2 _popcountdi2 _paritysi2 _paritydi2'
++	_popcountsi2 _popcountdi2 _paritysi2 _paritydi2 _stack_smash_handler'
+ 
+ # Disable SHLIB_LINK if shared libgcc not enabled.
+ if [ "@enable_shared@" = "no" ]; then
+diff -Naur gcc-3.4.4.orig/gcc/optabs.c gcc-3.4.4/gcc/optabs.c
+--- gcc-3.4.4.orig/gcc/optabs.c	2004-12-05 05:21:01.000000000 +0000
++++ gcc-3.4.4/gcc/optabs.c	2005-05-29 05:58:14.000000000 +0000
+@@ -678,6 +678,27 @@
+   if (target)
+     target = protect_from_queue (target, 1);
+ 
++  /* Keep the frame and offset pattern at the use of stack protection.  */
++  if (flag_propolice_protection
++      && binoptab->code == PLUS
++      && op0 == virtual_stack_vars_rtx
++      && GET_CODE(op1) == CONST_INT)
++    {
++      int icode = (int) binoptab->handlers[(int) mode].insn_code;
++      if (target)
++	temp = target;
++      else
++	temp = gen_reg_rtx (mode);
++
++      if (! (*insn_data[icode].operand[0].predicate) (temp, mode)
++	  || GET_CODE (temp) != REG)
++	temp = gen_reg_rtx (mode);
++
++      emit_insn (gen_rtx_SET (VOIDmode, temp,
++			      gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
++      return temp;
++    }
++
+   if (flag_force_mem)
+     {
+       /* Load duplicate non-volatile operands once.  */
+diff -Naur gcc-3.4.4.orig/gcc/opts.c gcc-3.4.4/gcc/opts.c
+--- gcc-3.4.4.orig/gcc/opts.c	2004-02-18 00:09:04.000000000 +0000
++++ gcc-3.4.4/gcc/opts.c	2005-05-29 05:58:14.000000000 +0000
+@@ -125,6 +125,9 @@
+ bool warn_unused_variable;
+ bool warn_unused_value;
+ 
++/* Warn when not issuing stack smashing protection for some reason */
++bool warn_stack_protector;
++
+ /* Hack for cooperation between set_Wunused and set_Wextra.  */
+ static bool maybe_warn_unused_parameter;
+ 
+@@ -798,6 +801,10 @@
+       warn_unused_variable = value;
+       break;
+ 
++    case OPT_Wstack_protector:
++      warn_stack_protector = value;
++      break;
++
+     case OPT_aux_info:
+     case OPT_aux_info_:
+       aux_info_file_name = arg;
+@@ -1361,6 +1368,14 @@
+       stack_limit_rtx = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (arg));
+       break;
+ 
++    case OPT_fstack_protector:
++      flag_propolice_protection = value;
++      break;
++
++    case OPT_fstack_protector_all:
++      flag_stack_protection = value;
++      break;
++
+     case OPT_fstrength_reduce:
+       flag_strength_reduce = value;
+       break;
+diff -Naur gcc-3.4.4.orig/gcc/protector.c gcc-3.4.4/gcc/protector.c
+--- gcc-3.4.4.orig/gcc/protector.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.4/gcc/protector.c	2005-05-29 05:58:14.000000000 +0000
+@@ -0,0 +1,2730 @@
++/* RTL buffer overflow protection function for GNU C compiler
++   Copyright (C) 2003 Free Software Foundation, Inc.
++
++This file is part of GCC.
++
++GCC is free software; you can redistribute it and/or modify it under
++the terms of the GNU General Public License as published by the Free
++Software Foundation; either version 2, or (at your option) any later
++version.
++
++GCC is distributed in the hope that it will be useful, but WITHOUT ANY
++WARRANTY; without even the implied warranty of MERCHANTABILITY or
++FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
++for more details.
++
++You should have received a copy of the GNU General Public License
++along with GCC; see the file COPYING.  If not, write to the Free
++Software Foundation, 59 Temple Place - Suite 330, Boston, MA
++02111-1307, USA.  */
++
++/* This file contains several memory arrangement functions to protect
++   the return address and the frame pointer of the stack
++   from a stack-smashing attack. It also
++   provides the function that protects pointer variables.  */
++
++#include "config.h"
++#include "system.h"
++#include "coretypes.h"
++#include "tm.h"
++#include "machmode.h"
++#include "real.h"
++#include "rtl.h"
++#include "tree.h"
++#include "regs.h"
++#include "flags.h"
++#include "insn-config.h"
++#include "insn-flags.h"
++#include "expr.h"
++#include "output.h"
++#include "recog.h"
++#include "hard-reg-set.h"
++#include "except.h"
++#include "function.h"
++#include "toplev.h"
++#include "tm_p.h"
++#include "conditions.h"
++#include "insn-attr.h"
++#include "optabs.h"
++#include "reload.h"
++#include "protector.h"
++
++
++/* Round a value to the lowest integer less than it that is a multiple of
++   the required alignment.  Avoid using division in case the value is
++   negative.  Assume the alignment is a power of two.  */
++#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
++
++/* Similar, but round to the next highest integer that meets the
++   alignment.  */
++#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
++
++
++/* Nonzero if function being compiled can define string buffers that may be
++   damaged by the stack-smash attack.  */
++static int current_function_defines_vulnerable_string;
++static int current_function_defines_short_string;
++static int current_function_has_variable_string;
++static int current_function_defines_vsized_array;
++static int current_function_is_inlinable;
++
++/* Nonzero if search_string_def finds the variable which contains an array.  */
++static int is_array;
++
++/* Nonzero if search_string_def finds a byte-pointer variable,
++   which may be assigned to alloca output.  */
++static int may_have_alloca_pointer;
++
++static rtx guard_area, _guard;
++static rtx function_first_insn, prologue_insert_point;
++
++/* Offset to end of sweeped area for gathering character arrays.  */
++static HOST_WIDE_INT sweep_frame_offset;
++
++/* Offset to end of allocated area for instantiating pseudo registers.  */
++static HOST_WIDE_INT push_allocated_offset = 0;
++
++/* Offset to end of assigned area for instantiating pseudo registers.  */
++static HOST_WIDE_INT push_frame_offset = 0;
++
++/* Set to 1 after cse_not_expected becomes nonzero. it is used to identify
++   which stage assign_stack_local_for_pseudo_reg is called from.  */
++static int saved_cse_not_expected = 0;
++
++static int search_string_from_argsandvars (int);
++static int search_string_from_local_vars (tree);
++static int search_pointer_def (tree);
++static int search_func_pointer (tree);
++static int check_used_flag (rtx);
++static void reset_used_flags_for_insns (rtx);
++static void reset_used_flags_for_decls (tree);
++static void reset_used_flags_of_plus (rtx);
++static void rtl_prologue (rtx);
++static void rtl_epilogue (rtx);
++static void arrange_var_order (tree);
++static void copy_args_for_protection (void);
++static void sweep_string_variable (rtx, HOST_WIDE_INT);
++static void sweep_string_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
++static void sweep_string_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
++static void sweep_string_use_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
++static void sweep_string_in_operand (rtx, rtx *, HOST_WIDE_INT, HOST_WIDE_INT);
++static void move_arg_location (rtx, rtx, rtx, HOST_WIDE_INT);
++static void change_arg_use_of_insns (rtx, rtx, rtx *, HOST_WIDE_INT);
++static void change_arg_use_in_operand (rtx, rtx, rtx, rtx *, HOST_WIDE_INT);
++static void validate_insns_of_varrefs (rtx);
++static void validate_operand_of_varrefs (rtx, rtx *);
++
++/* Specify which size of buffers should be protected from a stack smashing
++   attack. Because small buffers are not used in situations which may
++   overflow buffer, the default size sets to the size of 64 bit register.  */
++#ifndef SUSPICIOUS_BUF_SIZE
++#define SUSPICIOUS_BUF_SIZE 8
++#endif
++
++#define AUTO_BASEPTR(X) \
++  (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
++#define AUTO_OFFSET(X) \
++  (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
++#undef PARM_PASSED_IN_MEMORY
++#define PARM_PASSED_IN_MEMORY(PARM) \
++ (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
++#define TREE_VISITED(NODE) ((NODE)->common.unused_0)
++
++/* Argument values for calling search_string_from_argsandvars.  */
++#define CALL_FROM_PREPARE_STACK_PROTECTION	0
++#define CALL_FROM_PUSH_FRAME			1
++
++
++/* Prepare several stack protection instruments for the current function
++   if the function has an array as a local variable, which may be vulnerable
++   from a stack smashing attack, and it is not inlinable.
++
++   The overall steps are as follows;
++   (1)search an array,
++   (2)insert guard_area on the stack,
++   (3)duplicate pointer arguments into local variables, and
++   (4)arrange the location of local variables.  */
++void
++prepare_stack_protection (int inlinable)
++{
++  tree blocks = DECL_INITIAL (current_function_decl);
++  current_function_is_inlinable = inlinable && !flag_no_inline;
++  push_frame_offset = push_allocated_offset = 0;
++  saved_cse_not_expected = 0;
++
++  /* Skip the protection if the function has no block
++    or it is an inline function.  */
++  if (current_function_is_inlinable)
++    validate_insns_of_varrefs (get_insns ());
++  if (! blocks || current_function_is_inlinable)
++    return;
++
++  current_function_defines_vulnerable_string
++    = search_string_from_argsandvars (CALL_FROM_PREPARE_STACK_PROTECTION);
++
++  if (current_function_defines_vulnerable_string
++      || flag_stack_protection)
++    {
++      function_first_insn = get_insns ();
++
++      if (current_function_contains_functions)
++	{
++	  if (warn_stack_protector)
++	    warning ("not protecting function: it contains functions");
++	  return;
++	}
++
++      /* Initialize recognition, indicating that volatile is OK.  */
++      init_recog ();
++
++      sweep_frame_offset = 0;
++	
++#ifdef STACK_GROWS_DOWNWARD
++      /* frame_offset: offset to end of allocated area of stack frame.
++	 It is defined in the function.c.  */
++
++      /* the location must be before buffers.  */
++      guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
++      PUT_MODE (guard_area, GUARD_m);
++      MEM_VOLATILE_P (guard_area) = 1;
++
++#ifndef FRAME_GROWS_DOWNWARD
++      sweep_frame_offset = frame_offset;
++#endif
++
++      /* For making room for guard value, scan all insns and fix the offset
++	 address of the variable that is based on frame pointer.
++	 Scan all declarations of variables and fix the offset address
++	 of the variable that is based on the frame pointer.  */
++      sweep_string_variable (guard_area, UNITS_PER_GUARD);
++
++	
++      /* the location of guard area moves to the beginning of stack frame.  */
++      if (AUTO_OFFSET(XEXP (guard_area, 0)))
++	XEXP (XEXP (guard_area, 0), 1)
++	  = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
++
++
++      /* Insert prologue rtl instructions.  */
++      rtl_prologue (function_first_insn);
++
++      if (! current_function_has_variable_string)
++	{
++	  /* Generate argument saving instruction.  */
++	  copy_args_for_protection ();
++
++#ifndef FRAME_GROWS_DOWNWARD
++	  /* If frame grows upward, character arrays for protecting args
++	     may copy to the top of the guard variable.
++	     So sweep the guard variable again.  */
++	  sweep_frame_offset = CEIL_ROUND (frame_offset,
++					   BIGGEST_ALIGNMENT / BITS_PER_UNIT);
++	  sweep_string_variable (guard_area, UNITS_PER_GUARD);
++#endif
++	}
++      /* Variable can't be protected from the overflow of variable length
++	 buffer. But variable reordering is still effective against
++	 the overflow of fixed size character arrays.  */
++      else if (warn_stack_protector)
++	warning ("not protecting variables: it has a variable length buffer");
++#endif
++#ifndef FRAME_GROWS_DOWNWARD
++      if (STARTING_FRAME_OFFSET == 0)
++	{
++	  /* This part may be only for alpha.  */
++	  push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++	  assign_stack_local (BLKmode, push_allocated_offset, -1);
++	  sweep_frame_offset = frame_offset;
++	  sweep_string_variable (const0_rtx, -push_allocated_offset);
++	  sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
++	}
++#endif
++
++      /* Arrange the order of local variables.  */
++      arrange_var_order (blocks);
++
++#ifdef STACK_GROWS_DOWNWARD
++      /* Insert epilogue rtl instructions.  */
++      rtl_epilogue (get_last_insn ());
++#endif
++      init_recog_no_volatile ();
++    }
++  else if (current_function_defines_short_string
++	   && warn_stack_protector)
++    warning ("not protecting function: buffer is less than %d bytes long",
++	     SUSPICIOUS_BUF_SIZE);
++}
++
++/*
++  Search string from arguments and local variables.
++   caller: CALL_FROM_PREPARE_STACK_PROTECTION (0)
++	   CALL_FROM_PUSH_FRAME (1)
++*/
++static int
++search_string_from_argsandvars (int caller)
++{
++  tree blocks, parms;
++  int string_p;
++
++  /* Saves a latest search result as a cached infomation.  */
++  static tree __latest_search_decl = 0;
++  static int  __latest_search_result = FALSE;
++
++  if (__latest_search_decl == current_function_decl)
++    return __latest_search_result;
++  else
++    if (caller == CALL_FROM_PUSH_FRAME)
++      return FALSE;
++
++  __latest_search_decl = current_function_decl;
++  __latest_search_result = TRUE;
++  
++  current_function_defines_short_string = FALSE;
++  current_function_has_variable_string = FALSE;
++  current_function_defines_vsized_array = FALSE;
++  may_have_alloca_pointer = FALSE;
++
++  /* Search a string variable from local variables.  */
++  blocks = DECL_INITIAL (current_function_decl);
++  string_p = search_string_from_local_vars (blocks);
++
++  if (! current_function_defines_vsized_array
++      && may_have_alloca_pointer
++      && current_function_calls_alloca)
++    {
++      current_function_has_variable_string = TRUE;
++      return TRUE;
++    }
++
++  if (string_p)
++    return TRUE;
++
++#ifdef STACK_GROWS_DOWNWARD
++  /* Search a string variable from arguments.  */
++  parms = DECL_ARGUMENTS (current_function_decl);
++
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms))
++	  {
++	    string_p = search_string_def (TREE_TYPE(parms));
++	    if (string_p)
++	      return TRUE;
++	  }
++      }
++#endif
++
++  __latest_search_result = FALSE;
++  return FALSE;
++}
++
++
++/* Search string from local variables in the specified scope.  */
++static int
++search_string_from_local_vars (tree block)
++{
++  tree types;
++  int found = FALSE;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable.  */
++	  /* name: types.decl.name.identifier.id                     */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
++	      && TREE_CODE (types) == VAR_DECL
++	      && ! DECL_ARTIFICIAL (types)
++	      && DECL_RTL_SET_P (types)
++	      && GET_CODE (DECL_RTL (types)) == MEM
++
++	      && search_string_def (TREE_TYPE (types)))
++	    {
++	      rtx home = DECL_RTL (types);
++
++	      if (GET_CODE (home) == MEM
++		  && (GET_CODE (XEXP (home, 0)) == MEM
++		      || (GET_CODE (XEXP (home, 0)) == REG
++			  && XEXP (home, 0) != virtual_stack_vars_rtx
++			  && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
++			  && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
++#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
++			  && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
++#endif
++			  )))
++		/* If the value is indirect by memory or by a register
++		   that isn't the frame pointer then it means the object is
++		   variable-sized and address through
++		   that register or stack slot.
++		   The protection has no way to hide pointer variables
++		   behind the array, so all we can do is staying
++		   the order of variables and arguments.  */
++		{
++		  current_function_has_variable_string = TRUE;
++		}
++	    
++	      /* Found character array.  */
++	      found = TRUE;
++	    }
++	}
++
++      if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
++	{
++	  found = TRUE;
++	}
++
++      block = BLOCK_CHAIN (block);
++    }
++    
++  return found;
++}
++
++
++/* Search a character array from the specified type tree.  */
++int
++search_string_def (tree type)
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case ARRAY_TYPE:
++      /* Check if the array is a variable-sized array.  */
++      if (TYPE_DOMAIN (type) == 0
++	  || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++	      && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
++	current_function_defines_vsized_array = TRUE;
++
++      /* Check if the array is related to char array.  */
++      if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
++	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
++	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
++	{
++	  /* Check if the string is a variable string.  */
++	  if (TYPE_DOMAIN (type) == 0
++	      || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++		  && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
++	    return TRUE;
++
++	  /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE.  */
++	  if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++	      && (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
++		  >= SUSPICIOUS_BUF_SIZE))
++	    return TRUE;
++
++	  current_function_defines_short_string = TRUE;
++	}
++      
++      /* to protect every functions, sweep any arrays to the frame top.  */
++      is_array = TRUE;
++
++      return search_string_def(TREE_TYPE(type));
++	
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++      /* Check if each field has character arrays.  */
++      for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	{
++	  /* Omit here local type decls until we know how to support them. */
++	  if ((TREE_CODE (tem) == TYPE_DECL)
++	      || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
++	    continue;
++
++	  if (search_string_def(TREE_TYPE(tem)))
++	    return TRUE;
++	}
++      break;
++	
++    case POINTER_TYPE:
++      /* Check if pointer variables, which may be a pointer assigned 
++	 by alloca function call, are declared.  */
++      if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
++	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
++	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
++	may_have_alloca_pointer = TRUE;
++      break;
++
++    case REFERENCE_TYPE:
++    case OFFSET_TYPE:
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++/* Examine whether the input contains frame pointer addressing.  */
++int
++contains_fp (rtx op)
++{
++  enum rtx_code code;
++  rtx x;
++  int i, j;
++  const char *fmt;
++
++  x = op;
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case REG:
++    case ADDRESSOF:
++      return FALSE;
++
++    case MEM:
++      /* This case is not generated at the stack protection.
++	 see plus_constant_wide and simplify_plus_minus function.  */
++      if (XEXP (x, 0) == virtual_stack_vars_rtx)
++	abort ();
++      
++    case PLUS:
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
++	return TRUE;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (contains_fp (XEXP (x, i)))
++	  return TRUE;
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	if (contains_fp (XVECEXP (x, i, j)))
++	  return TRUE;
++
++  return FALSE;
++}
++
++
++/* Examine whether the input contains any pointer.  */
++static int
++search_pointer_def (tree type)
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++      /* Check if each field has a pointer.  */
++      for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	{
++	  if ((TREE_CODE (tem) == TYPE_DECL)
++	      || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
++	    continue;
++
++	  if (search_pointer_def (TREE_TYPE(tem)))
++	    return TRUE;
++	}
++      break;
++
++    case ARRAY_TYPE:
++      return search_pointer_def (TREE_TYPE(type));
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++    case OFFSET_TYPE:
++      if (TYPE_READONLY (TREE_TYPE (type)))
++	{
++	  /* If this pointer contains function pointer,
++	     it should be protected.  */
++	  return search_func_pointer (TREE_TYPE (type));
++	}
++      return TRUE;
++	
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++/* Examine whether the input contains function pointer.  */
++static int
++search_func_pointer (tree type)
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++	if (! TREE_VISITED (type))
++	  {
++	    /* Mark the type as having been visited already.  */
++	    TREE_VISITED (type) = 1;
++
++	    /* Check if each field has a function pointer.  */
++	    for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	      {
++		if (TREE_CODE (tem) == FIELD_DECL
++		    && search_func_pointer (TREE_TYPE(tem)))
++		  {
++		    TREE_VISITED (type) = 0;
++		    return TRUE;
++		  }
++	      }
++	    
++	    TREE_VISITED (type) = 0;
++	  }
++	break;
++
++    case ARRAY_TYPE:
++      return search_func_pointer (TREE_TYPE(type));
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++    case OFFSET_TYPE:
++      if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
++	return TRUE;
++      return search_func_pointer (TREE_TYPE(type));
++	
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++/* Check whether the specified rtx contains PLUS rtx with used flag.  */
++static int
++check_used_flag (rtx x)
++{
++  register int i, j;
++  register enum rtx_code code;
++  register const char *format_ptr;
++
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case REG:
++    case QUEUED:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++      return FALSE;
++
++    case PLUS:
++      if (x->used)
++	return TRUE;
++
++    default:
++      break;
++    }
++
++  format_ptr = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++)
++    {
++      switch (*format_ptr++)
++	{
++	case 'e':
++	  if (check_used_flag (XEXP (x, i)))
++	    return TRUE;
++	  break;
++
++	case 'E':
++	  for (j = 0; j < XVECLEN (x, i); j++)
++	    if (check_used_flag (XVECEXP (x, i, j)))
++	      return TRUE;
++	  break;
++	}
++    }
++
++  return FALSE;
++}
++
++
++/* Reset used flag of every insns after the spcecified insn.  */
++static void
++reset_used_flags_for_insns (rtx insn)
++{
++  int i, j;
++  enum rtx_code code;
++  const char *format_ptr;
++
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	code = GET_CODE (insn);
++	insn->used = 0;
++	format_ptr = GET_RTX_FORMAT (code);
++
++	for (i = 0; i < GET_RTX_LENGTH (code); i++)
++	  {
++	    switch (*format_ptr++)
++	      {
++	      case 'e':
++		reset_used_flags_of_plus (XEXP (insn, i));
++		break;
++			
++	      case 'E':
++		for (j = 0; j < XVECLEN (insn, i); j++)
++		  reset_used_flags_of_plus (XVECEXP (insn, i, j));
++		break;
++	      }
++	  }
++      }
++}
++
++
++/* Reset used flag of every variables in the specified block.  */
++static void
++reset_used_flags_for_decls (tree block)
++{
++  tree types;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      types = BLOCK_VARS(block);
++	
++      for (types= BLOCK_VARS(block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable and
++	     also skip an global variable.  */
++	  if (! DECL_EXTERNAL (types))
++	    {
++	      if (! DECL_RTL_SET_P (types))
++		continue;
++	      home = DECL_RTL (types);
++
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == PLUS
++		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++		{
++		  XEXP (home, 0)->used = 0;
++		}
++	    }
++	}
++
++      reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++/* Reset the used flag of every PLUS rtx derived from the specified rtx.  */
++static void
++reset_used_flags_of_plus (rtx x)
++{
++  int i, j;
++  enum rtx_code code;
++  const char *format_ptr;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++      /* These types may be freely shared so we needn't do any resetting
++	 for them.  */
++    case REG:
++    case QUEUED:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++      return;
++
++    case INSN:
++    case JUMP_INSN:
++    case CALL_INSN:
++    case NOTE:
++    case LABEL_REF:
++    case BARRIER:
++      /* The chain of insns is not being copied.  */
++      return;
++      
++    case PLUS:
++      x->used = 0;
++      break;
++
++    case CALL_PLACEHOLDER:
++      reset_used_flags_for_insns (XEXP (x, 0));
++      reset_used_flags_for_insns (XEXP (x, 1));
++      reset_used_flags_for_insns (XEXP (x, 2));
++      break;
++
++    default:
++      break;
++    }
++
++  format_ptr = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++)
++    {
++      switch (*format_ptr++)
++	{
++	case 'e':
++	  reset_used_flags_of_plus (XEXP (x, i));
++	  break;
++
++	case 'E':
++	  for (j = 0; j < XVECLEN (x, i); j++)
++	    reset_used_flags_of_plus (XVECEXP (x, i, j));
++	  break;
++	}
++    }
++}
++
++
++/* Generate the prologue insns of the protector into the specified insn.  */
++static void
++rtl_prologue (rtx insn)
++{
++#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
++#undef HAS_INIT_SECTION
++#define HAS_INIT_SECTION
++#endif
++
++  rtx _val;
++
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == NOTE
++	&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
++      break;
++  
++#if !defined (HAS_INIT_SECTION)
++  /* If this function is `main', skip a call to `__main'
++     to run guard instruments after global initializers, etc.  */
++  if (DECL_NAME (current_function_decl)
++      && MAIN_NAME_P (DECL_NAME (current_function_decl))
++      && DECL_CONTEXT (current_function_decl) == NULL_TREE)
++    {
++      rtx fbinsn = insn;
++      for (; insn; insn = NEXT_INSN (insn))
++	if (GET_CODE (insn) == NOTE
++	    && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
++	  break;
++      if (insn == 0)
++	insn = fbinsn;
++    }
++#endif
++
++  /* Mark the next insn of FUNCTION_BEG insn.  */
++  prologue_insert_point = NEXT_INSN (insn);
++		
++  start_sequence ();
++
++  _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
++  emit_move_insn ( guard_area, _guard);
++
++  _val = get_insns ();
++  end_sequence ();
++
++  emit_insn_before (_val, prologue_insert_point);
++}
++
++
++/* Generate the epilogue insns of the protector into the specified insn.  */
++static void
++rtl_epilogue (rtx insn)
++{
++  rtx if_false_label;
++  rtx _val;
++  rtx funcname;
++  tree funcstr;
++  int  flag_have_return = FALSE;
++		
++  start_sequence ();
++
++#ifdef HAVE_return
++  if (HAVE_return)
++    {
++      rtx insn;
++      return_label = gen_label_rtx ();
++      
++      for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
++	if (GET_CODE (insn) == JUMP_INSN
++	    && GET_CODE (PATTERN (insn)) == RETURN
++	    && GET_MODE (PATTERN (insn)) == VOIDmode)
++	  {
++	    rtx pat = gen_rtx_SET (VOIDmode,
++				   pc_rtx,
++				   gen_rtx_LABEL_REF (VOIDmode,
++						      return_label));
++	    PATTERN (insn) = pat;
++	    flag_have_return = TRUE;
++	  }
++
++
++      emit_label (return_label);
++    }
++#endif
++
++  /*                                          if (guard_area != _guard) */
++  compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
++
++  if_false_label = gen_label_rtx ();		/* { */
++  emit_jump_insn ( gen_beq(if_false_label));
++
++  /* generate string for the current function name */
++  funcstr = build_string (strlen(current_function_name ())+1,
++			  current_function_name ());
++  TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
++  funcname = output_constant_def (funcstr, 1);
++
++  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__stack_smash_handler"),
++		     0, VOIDmode, 2,
++                     XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
++
++  /* generate RTL to return from the current function */
++		
++  emit_barrier ();				/* } */
++  emit_label (if_false_label);
++
++  /* generate RTL to return from the current function */
++  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
++    use_return_register ();
++
++#ifdef HAVE_return
++  if (HAVE_return && flag_have_return)
++    {
++      emit_jump_insn (gen_return ());
++      emit_barrier ();
++    }
++#endif
++  
++  _val = get_insns ();
++  end_sequence ();
++
++  emit_insn_after (_val, insn);
++}
++
++
++/* For every variable which type is character array, moves its location
++   in the stack frame to the sweep_frame_offset position.  */
++static void
++arrange_var_order (tree block)
++{
++  tree types;
++  HOST_WIDE_INT offset;
++    
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      /* arrange the location of character arrays in depth first.  */
++      arrange_var_order (BLOCK_SUBBLOCKS (block));
++      
++      for (types = BLOCK_VARS (block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable.  */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
++	      && TREE_CODE (types) == VAR_DECL
++	      && ! DECL_ARTIFICIAL (types)
++	      && DECL_RTL_SET_P (types)
++	      && GET_CODE (DECL_RTL (types)) == MEM
++	      && GET_MODE (DECL_RTL (types)) == BLKmode
++
++	      && (is_array=0,
++		  search_string_def (TREE_TYPE (types))
++		  || (! current_function_defines_vulnerable_string && is_array)))
++	    {
++	      rtx home = DECL_RTL (types);
++
++	      if (!(GET_CODE (home) == MEM
++		    && (GET_CODE (XEXP (home, 0)) == MEM
++			|| (GET_CODE (XEXP (home, 0)) == REG
++			    && XEXP (home, 0) != virtual_stack_vars_rtx
++			    && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
++			    && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
++#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
++			    && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
++#endif
++			    ))))
++		{
++		  /* Found a string variable.  */
++		  HOST_WIDE_INT var_size =
++		    ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
++		     / BITS_PER_UNIT);
++
++		  /* Confirmed it is BLKmode.  */
++		  int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++		  var_size = CEIL_ROUND (var_size, alignment);
++
++		  /* Skip the variable if it is top of the region
++		     specified by sweep_frame_offset.  */
++		  offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
++		  if (offset == sweep_frame_offset - var_size)
++		    sweep_frame_offset -= var_size;
++		      
++		  else if (offset < sweep_frame_offset - var_size)
++		    sweep_string_variable (DECL_RTL (types), var_size);
++		}
++	    }
++	}
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++/* To protect every pointer argument and move character arrays in the argument,
++   Copy those variables to the top of the stack frame and move the location of
++   character arrays to the posion of sweep_frame_offset.  */
++static void
++copy_args_for_protection (void)
++{
++  tree parms = DECL_ARGUMENTS (current_function_decl);
++  rtx temp_rtx;
++
++  parms = DECL_ARGUMENTS (current_function_decl);
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    int string_p;
++	    rtx seq;
++
++	    string_p = search_string_def (TREE_TYPE(parms));
++
++	    /* Check if it is a candidate to move.  */
++	    if (string_p || search_pointer_def (TREE_TYPE (parms)))
++	      {
++		int arg_size
++		  = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
++		     / BITS_PER_UNIT);
++		tree passed_type = DECL_ARG_TYPE (parms);
++		tree nominal_type = TREE_TYPE (parms);
++		
++		start_sequence ();
++
++		if (GET_CODE (DECL_RTL (parms)) == REG)
++		  {
++		    rtx safe = 0;
++		    
++		    change_arg_use_of_insns (prologue_insert_point,
++					     DECL_RTL (parms), &safe, 0);
++		    if (safe)
++		      {
++			/* Generate codes for copying the content.  */
++			rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
++		    
++			/* Avoid register elimination in gcse.c.  */
++			PATTERN (movinsn)->volatil = 1;
++			
++			/* Save debugger info.  */
++			SET_DECL_RTL (parms, safe);
++		      }
++		  }
++		else if (GET_CODE (DECL_RTL (parms)) == MEM
++			 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
++		  {
++		    rtx movinsn;
++		    rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
++
++		    /* Generate codes for copying the content.  */
++		    movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
++		    /* Avoid register elimination in gcse.c.  */
++		    PATTERN (movinsn)->volatil = 1;
++
++		    /* Change the addressof information to the newly
++		       allocated pseudo register.  */
++		    emit_move_insn (DECL_RTL (parms), safe);
++
++		    /* Save debugger info.  */
++		    SET_DECL_RTL (parms, safe);
++		  }
++			
++		/* See if the frontend wants to pass this by invisible
++		   reference.  */
++		else if (passed_type != nominal_type
++			 && POINTER_TYPE_P (passed_type)
++			 && TREE_TYPE (passed_type) == nominal_type)
++		  {
++		    rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
++
++		    change_arg_use_of_insns (prologue_insert_point,
++					     orig, &safe, 0);
++		    if (safe)
++		      {
++			/* Generate codes for copying the content.  */
++			rtx movinsn = emit_move_insn (safe, orig);
++		    
++			/* Avoid register elimination in gcse.c  */
++			PATTERN (movinsn)->volatil = 1;
++			
++			/* Save debugger info.  */
++			SET_DECL_RTL (parms, safe);
++		      }
++		  }
++
++		else
++		  {
++		    /* Declare temporary local variable for parms.  */
++		    temp_rtx
++		      = assign_stack_local (DECL_MODE (parms), arg_size,
++					    DECL_MODE (parms) == BLKmode ?
++					    -1 : 0);
++		    
++		    MEM_IN_STRUCT_P (temp_rtx)
++		      = AGGREGATE_TYPE_P (TREE_TYPE (parms));
++		    set_mem_alias_set (temp_rtx, get_alias_set (parms));
++
++		    /* Generate codes for copying the content.  */
++		    store_expr (parms, temp_rtx, 0);
++
++		    /* Change the reference for each instructions.  */
++		    move_arg_location (prologue_insert_point, DECL_RTL (parms),
++				       temp_rtx, arg_size);
++
++		    /* Change the location of parms variable.  */
++		    SET_DECL_RTL (parms, temp_rtx);
++		  }
++
++		seq = get_insns ();
++		end_sequence ();
++		emit_insn_before (seq, prologue_insert_point);
++
++#ifdef FRAME_GROWS_DOWNWARD
++		/* Process the string argument.  */
++		if (string_p && DECL_MODE (parms) == BLKmode)
++		  {
++		    int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++		    arg_size = CEIL_ROUND (arg_size, alignment);
++			
++		    /* Change the reference for each instructions.  */
++		    sweep_string_variable (DECL_RTL (parms), arg_size);
++		  }
++#endif
++	      }
++	  }
++      }
++}
++
++
++/* Sweep a string variable to the positon of sweep_frame_offset in the 
++   stack frame, that is a last position of string variables.  */
++static void
++sweep_string_variable (rtx sweep_var, HOST_WIDE_INT var_size)
++{
++  HOST_WIDE_INT sweep_offset;
++
++  switch (GET_CODE (sweep_var))
++    {
++    case MEM:
++      if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
++	  && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
++	return;
++      sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
++      break;
++    case CONST_INT:
++      sweep_offset = INTVAL (sweep_var);
++      break;
++    default:
++      abort ();
++    }
++
++  /* Scan all declarations of variables and fix the offset address of
++     the variable based on the frame pointer.  */
++  sweep_string_in_decls (DECL_INITIAL (current_function_decl),
++			 sweep_offset, var_size);
++
++  /* Scan all argument variable and fix the offset address based on
++     the frame pointer.  */
++  sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
++			sweep_offset, var_size);
++
++  /* For making room for sweep variable, scan all insns and
++     fix the offset address of the variable that is based on frame pointer.  */
++  sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
++
++
++  /* Clear all the USED bits in operands of all insns and declarations of
++     local variables.  */
++  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
++  reset_used_flags_for_insns (function_first_insn);
++
++  sweep_frame_offset -= var_size;
++}
++
++
++
++/* Move an argument to the local variable addressed by frame_offset.  */
++static void
++move_arg_location (rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)
++{
++  /* For making room for sweep variable, scan all insns and
++     fix the offset address of the variable that is based on frame pointer.  */
++  change_arg_use_of_insns (insn, orig, &new, var_size);
++
++
++  /* Clear all the USED bits in operands of all insns and declarations
++     of local variables.  */
++  reset_used_flags_for_insns (insn);
++}
++
++
++/* Sweep character arrays declared as local variable.  */
++static void
++sweep_string_in_decls (tree block, HOST_WIDE_INT sweep_offset,
++		       HOST_WIDE_INT sweep_size)
++{
++  tree types;
++  HOST_WIDE_INT offset;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable and
++	     also skip an global variable.  */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
++	    
++	    if (! DECL_RTL_SET_P (types))
++	      continue;
++
++	    home = DECL_RTL (types);
++
++	    /* Process for static local variable.  */
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
++	      continue;
++
++	    if (GET_CODE (home) == MEM
++		&& XEXP (home, 0) == virtual_stack_vars_rtx)
++	      {
++		offset = 0;
++		
++		/* the operand related to the sweep variable.  */
++		if (sweep_offset <= offset
++		    && offset < sweep_offset + sweep_size)
++		  {
++		    offset = sweep_frame_offset - sweep_size - sweep_offset;
++
++		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
++						    offset);
++		    XEXP (home, 0)->used = 1;
++		  }
++		else if (sweep_offset <= offset
++			 && offset < sweep_frame_offset)
++		  {
++		    /* the rest of variables under sweep_frame_offset,
++		       shift the location.  */
++		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
++						    -sweep_size);
++		    XEXP (home, 0)->used = 1;
++		  }
++	      }
++		
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == MEM)
++	      {
++		/* Process for dynamically allocated array.  */
++		home = XEXP (home, 0);
++	      }
++		
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == PLUS
++		&& XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
++		&& GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++	      {
++		if (! XEXP (home, 0)->used)
++		  {
++		    offset = AUTO_OFFSET(XEXP (home, 0));
++
++		    /* the operand related to the sweep variable.  */
++		    if (sweep_offset <= offset
++			&& offset < sweep_offset + sweep_size)
++		      {
++
++			offset
++			  += sweep_frame_offset - sweep_size - sweep_offset;
++			XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								      offset);
++
++			/* mark */
++			XEXP (home, 0)->used = 1;
++		      }
++		    else if (sweep_offset <= offset
++			     && offset < sweep_frame_offset)
++		      {
++			/* the rest of variables under sweep_frame_offset,
++			   so shift the location.  */
++
++			XEXP (XEXP (home, 0), 1)
++			  = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++
++			/* mark */
++			XEXP (home, 0)->used = 1;
++		      }
++		  }
++	      }
++	  }
++	}
++
++      sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
++			     sweep_offset, sweep_size);
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++/* Sweep character arrays declared as argument.  */
++static void
++sweep_string_in_args (tree parms, HOST_WIDE_INT sweep_offset,
++		      HOST_WIDE_INT sweep_size)
++{
++  rtx home;
++  HOST_WIDE_INT offset;
++    
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    home = DECL_INCOMING_RTL (parms);
++
++	    if (XEXP (home, 0)->used)
++	      continue;
++
++	    offset = AUTO_OFFSET(XEXP (home, 0));
++
++	    /* the operand related to the sweep variable.  */
++	    if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
++	      {
++		if (sweep_offset <= offset
++		    && offset < sweep_offset + sweep_size)
++		  {
++		    offset += sweep_frame_offset - sweep_size - sweep_offset;
++		    XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								  offset);
++
++		    /* mark */
++		    XEXP (home, 0)->used = 1;
++		  }
++		else if (sweep_offset <= offset
++			 && offset < sweep_frame_offset)
++		  {
++		    /* the rest of variables under sweep_frame_offset,
++		       shift the location.  */
++		    XEXP (XEXP (home, 0), 1)
++		      = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++
++		    /* mark */
++		    XEXP (home, 0)->used = 1;
++		  }
++	      }
++	  }
++      }
++}
++
++
++/* Set to 1 when the instruction contains virtual registers.  */
++static int has_virtual_reg;
++
++/* Sweep the specified character array for every insns. The array starts from
++   the sweep_offset and its size is sweep_size.  */
++static void
++sweep_string_use_of_insns (rtx insn, HOST_WIDE_INT sweep_offset,
++			   HOST_WIDE_INT sweep_size)
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	has_virtual_reg = FALSE;
++	sweep_string_in_operand (insn, &PATTERN (insn),
++				 sweep_offset, sweep_size);
++	sweep_string_in_operand (insn, &REG_NOTES (insn),
++				 sweep_offset, sweep_size);
++      }
++}
++
++
++/* Sweep the specified character array, which starts from the sweep_offset and
++   its size is sweep_size.
++
++   When a pointer is given,
++   if it points the address higher than the array, it stays.
++   if it points the address inside the array, it changes to point inside
++   the sweeped array.
++   if it points the address lower than the array, it shifts higher address by
++   the sweep_size.  */
++static void
++sweep_string_in_operand (rtx insn, rtx *loc,
++			 HOST_WIDE_INT sweep_offset, HOST_WIDE_INT sweep_size)
++{
++  rtx x = *loc;
++  enum rtx_code code;
++  int i, j, k = 0;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case ADDRESSOF:
++      return;
++	    
++    case REG:
++      if (x == virtual_incoming_args_rtx
++	  || x == virtual_stack_vars_rtx
++	  || x == virtual_stack_dynamic_rtx
++	  || x == virtual_outgoing_args_rtx
++	  || x == virtual_cfa_rtx)
++	has_virtual_reg = TRUE;
++      return;
++      
++    case SET:
++      /*
++	skip setjmp setup insn and setjmp restore insn
++	Example:
++	(set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
++	(set (virtual_stack_vars_rtx) (REG))
++      */
++      if (GET_CODE (XEXP (x, 0)) == MEM
++	  && XEXP (x, 1) == virtual_stack_vars_rtx)
++	return;
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == REG)
++	return;
++      break;
++	    
++    case PLUS:
++      /* Handle typical case of frame register plus constant.  */
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
++	{
++	  if (x->used)
++	    goto single_use_of_virtual_reg;
++	  
++	  offset = AUTO_OFFSET(x);
++
++	  /* When arguments grow downward, the virtual incoming
++	     args pointer points to the top of the argument block,
++	     so block is identified by the pointer - 1.
++	     The flag is set at the copy_rtx_and_substitute in integrate.c  */
++	  if (RTX_INTEGRATED_P (x))
++	    k = -1;
++
++	  /* the operand related to the sweep variable.  */
++	  if (sweep_offset <= offset + k
++	      && offset + k < sweep_offset + sweep_size)
++	    {
++	      offset += sweep_frame_offset - sweep_size - sweep_offset;
++
++	      XEXP (x, 0) = virtual_stack_vars_rtx;
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	      x->used = 1;
++	    }
++	  else if (sweep_offset <= offset + k
++		   && offset + k < sweep_frame_offset)
++	    {
++	      /* the rest of variables under sweep_frame_offset,
++		 shift the location.  */
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++	      x->used = 1;
++	    }
++	  
++	single_use_of_virtual_reg:
++	  if (has_virtual_reg) {
++	    /* excerpt from insn_invalid_p in recog.c  */
++	    int icode = recog_memoized (insn);
++
++	    if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
++	      {
++		rtx temp, seq;
++		
++		start_sequence ();
++		temp = force_operand (x, NULL_RTX);
++		seq = get_insns ();
++		end_sequence ();
++		
++		emit_insn_before (seq, insn);
++		if (! validate_change (insn, loc, temp, 0)
++		    && !validate_replace_rtx (x, temp, insn))
++		  fatal_insn ("sweep_string_in_operand", insn);
++	      }
++	  }
++
++	  has_virtual_reg = TRUE;
++	  return;
++	}
++
++#ifdef FRAME_GROWS_DOWNWARD
++      /* Alert the case of frame register plus constant given by reg.  */
++      else if (XEXP (x, 0) == virtual_stack_vars_rtx
++	       && GET_CODE (XEXP (x, 1)) == REG)
++	fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
++#endif
++
++      /*
++	process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      sweep_string_use_of_insns (XEXP (x, i),
++					 sweep_offset, sweep_size);
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	/*
++	  virtual_stack_vars_rtx without offset
++	  Example:
++	    (set (reg:SI xx) (reg:SI 78))
++	    (set (reg:SI xx) (MEM (reg:SI 78)))
++	*/
++	if (XEXP (x, i) == virtual_stack_vars_rtx)
++	  fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
++	sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
++}   
++
++
++/* Change the use of an argument to the use of the duplicated variable for
++   every insns, The variable is addressed by new rtx.  */
++static void
++change_arg_use_of_insns (rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	rtx seq;
++	
++	start_sequence ();
++	change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
++
++	seq = get_insns ();
++	end_sequence ();
++	emit_insn_before (seq, insn);
++
++	/* load_multiple insn from virtual_incoming_args_rtx have several
++	   load insns. If every insn change the load address of arg
++	   to frame region, those insns are moved before the PARALLEL insn
++	   and remove the PARALLEL insn.  */
++	if (GET_CODE (PATTERN (insn)) == PARALLEL
++	    && XVECLEN (PATTERN (insn), 0) == 0)
++	  delete_insn (insn);
++      }
++}
++
++
++/* Change the use of an argument to the use of the duplicated variable for
++   every rtx derived from the x.  */
++static void
++change_arg_use_in_operand (rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)
++{
++  enum rtx_code code;
++  int i, j;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return;
++
++    case MEM:
++      /* Handle special case of MEM (incoming_args).  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 0) == virtual_incoming_args_rtx)
++	{
++	  offset = 0;
++
++	  /* the operand related to the sweep variable.  */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
++
++	    offset = AUTO_OFFSET(XEXP (*new, 0))
++	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
++
++	    XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
++	    XEXP (x, 0)->used = 1;
++
++	    return;
++	  }
++	}
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 0) == virtual_incoming_args_rtx
++	  && GET_CODE (XEXP (x, 1)) == CONST_INT
++	  && ! x->used)
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  /* the operand related to the sweep variable.  */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
++	    {
++
++	      offset = (AUTO_OFFSET(XEXP (*new, 0))
++			+ (offset - AUTO_OFFSET(XEXP (orig, 0))));
++
++	      XEXP (x, 0) = virtual_stack_vars_rtx;
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	      x->used = 1;
++
++	      return;
++	    }
++
++	  /*
++	    process further subtree:
++	    Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	    (const_int 5))
++	  */
++	}
++      break;
++
++    case SET:
++      /* Handle special case of "set (REG or MEM) (incoming_args)".
++	 It means that the the address of the 1st argument is stored.  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 1) == virtual_incoming_args_rtx)
++	{
++	  offset = 0;
++
++	  /* the operand related to the sweep variable.  */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
++	    {
++	      offset = (AUTO_OFFSET(XEXP (*new, 0))
++			+ (offset - AUTO_OFFSET(XEXP (orig, 0))));
++
++	      XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
++							  offset), NULL_RTX);
++	      XEXP (x, 1)->used = 1;
++
++	      return;
++	    }
++	}
++      break;
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      change_arg_use_of_insns (XEXP (x, i), orig, new, size);
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    case PARALLEL:
++      for (j = 0; j < XVECLEN (x, 0); j++)
++	{
++	  change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
++	}
++      if (recog_memoized (insn) < 0)
++	{
++	  for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
++	    {
++	      /* if parallel insn has a insn used virtual_incoming_args_rtx,
++		 the insn is removed from this PARALLEL insn.  */
++	      if (check_used_flag (XVECEXP (x, 0, j)))
++		{
++		  emit_insn (XVECEXP (x, 0, j));
++		  XVECEXP (x, 0, j) = NULL;
++		}
++	      else
++		XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
++	    }
++	  PUT_NUM_ELEM (XVEC (x, 0), i);
++	}
++      return;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (XEXP (x, i) == orig)
++	  {
++	    if (*new == 0)
++	      *new = gen_reg_rtx (GET_MODE (orig));
++	    XEXP (x, i) = *new;
++	    continue;
++	  }
++	change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	{
++	  if (XVECEXP (x, i, j) == orig)
++	    {
++	      if (*new == 0)
++		*new = gen_reg_rtx (GET_MODE (orig));
++	      XVECEXP (x, i, j) = *new;
++	      continue;
++	    }
++	  change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
++	}
++}   
++
++
++/* Validate every instructions from the specified instruction.
++   
++   The stack protector prohibits to generate machine specific frame addressing
++   for the first rtl generation. The prepare_stack_protection must convert
++   machine independent frame addressing to machine specific frame addressing,
++   so instructions for inline functions, which skip the conversion of
++   the stack protection, validate every instructions.  */
++static void
++validate_insns_of_varrefs (rtx insn)
++{
++  rtx next;
++
++  /* Initialize recognition, indicating that volatile is OK.  */
++  init_recog ();
++
++  for (; insn; insn = next)
++    {
++      next = NEXT_INSN (insn);
++      if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	  || GET_CODE (insn) == CALL_INSN)
++	{
++	  /* excerpt from insn_invalid_p in recog.c  */
++	  int icode = recog_memoized (insn);
++
++	  if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
++	    validate_operand_of_varrefs (insn, &PATTERN (insn));
++	}
++    }
++
++  init_recog_no_volatile ();
++}
++
++
++/* Validate frame addressing of the rtx and covert it to machine specific one.  */
++static void
++validate_operand_of_varrefs (rtx insn, rtx *loc)
++{
++  enum rtx_code code;
++  rtx x, temp, seq;
++  int i, j;
++  const char *fmt;
++
++  x = *loc;
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case USE:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return;
++
++    case PLUS:
++      /* validate insn of frame register plus constant.  */
++      if (GET_CODE (x) == PLUS
++	  && XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
++	{
++	  start_sequence ();
++
++	  { /* excerpt from expand_binop in optabs.c  */
++	    optab binoptab = add_optab;
++	    enum machine_mode mode = GET_MODE (x);
++	    int icode = (int) binoptab->handlers[(int) mode].insn_code;
++	    enum machine_mode mode1 = insn_data[icode].operand[2].mode;
++	    rtx pat;
++	    rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
++	    temp = gen_reg_rtx (mode);
++
++	    /* Now, if insn's predicates don't allow offset operands,
++	       put them into pseudo regs.  */
++
++	    if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
++		&& mode1 != VOIDmode)
++	      xop1 = copy_to_mode_reg (mode1, xop1);
++
++	    pat = GEN_FCN (icode) (temp, xop0, xop1);
++	    if (pat)
++	      emit_insn (pat);
++	    else
++	      abort (); /* there must be add_optab handler.  */
++	  }	      
++	  seq = get_insns ();
++	  end_sequence ();
++	  
++	  emit_insn_before (seq, insn);
++	  if (! validate_change (insn, loc, temp, 0))
++	    abort ();
++	  return;
++	}
++	break;
++      
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      validate_insns_of_varrefs (XEXP (x, i));
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      validate_operand_of_varrefs (insn, &XEXP (x, i));
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
++}
++
++
++
++/* Return size that is not allocated for stack frame. It will be allocated
++   to modify the home of pseudo registers called from global_alloc.  */
++HOST_WIDE_INT
++get_frame_free_size (void)
++{
++  if (! flag_propolice_protection)
++    return 0;
++
++  return push_allocated_offset - push_frame_offset;
++}
++
++
++/* The following codes are invoked after the instantiation of pseudo registers.
++
++   Reorder local variables to place a peudo register after buffers to avoid
++   the corruption of local variables that could be used to further corrupt
++   arbitrary memory locations.  */
++#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
++static void push_frame (HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_in_operand (rtx, rtx, HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_of_reg_equiv_constant (HOST_WIDE_INT, HOST_WIDE_INT);
++static void reset_used_flags_for_push_frame (void);
++static int check_out_of_frame_access (rtx, HOST_WIDE_INT);
++static int check_out_of_frame_access_in_operand (rtx, HOST_WIDE_INT);
++#endif
++
++
++/* Assign stack local at the stage of register allocater. if a pseudo reg is
++   spilled out from such an allocation, it is allocated on the stack.
++   The protector keep the location be lower stack region than the location of
++   sweeped arrays.  */
++rtx
++assign_stack_local_for_pseudo_reg (enum machine_mode mode,
++				   HOST_WIDE_INT size, int align)
++{
++#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
++  return assign_stack_local (mode, size, align);
++#else
++  tree blocks = DECL_INITIAL (current_function_decl);
++  rtx new;
++  HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
++  int first_call_from_purge_addressof, first_call_from_global_alloc;
++
++  if (! flag_propolice_protection
++      || size == 0
++      || ! blocks
++      || current_function_is_inlinable
++      || ! search_string_from_argsandvars (CALL_FROM_PUSH_FRAME)
++      || current_function_contains_functions)
++    return assign_stack_local (mode, size, align);
++
++  first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
++  first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
++  saved_cse_not_expected = cse_not_expected;
++
++  starting_frame = ((STARTING_FRAME_OFFSET)
++		    ? STARTING_FRAME_OFFSET : BIGGEST_ALIGNMENT / BITS_PER_UNIT);
++  units_per_push = MAX (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
++			GET_MODE_SIZE (mode));
++    
++  if (first_call_from_purge_addressof)
++    {
++      push_frame_offset = push_allocated_offset;
++      if (check_out_of_frame_access (get_insns (), starting_frame))
++	{
++	  /* After the purge_addressof stage, there may be an instruction which
++	     have the pointer less than the starting_frame. 
++	     if there is an access below frame, push dummy region to seperate
++	     the address of instantiated variables.  */
++	  push_frame (GET_MODE_SIZE (DImode), 0);
++	  assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
++	}
++    }
++
++  if (first_call_from_global_alloc)
++    {
++      push_frame_offset = push_allocated_offset = 0;
++      if (check_out_of_frame_access (get_insns (), starting_frame))
++	{
++	  if (STARTING_FRAME_OFFSET)
++	    {
++	      /* if there is an access below frame, push dummy region 
++		 to seperate the address of instantiated variables.  */
++	      push_frame (GET_MODE_SIZE (DImode), 0);
++	      assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
++	    }
++	  else
++	    push_allocated_offset = starting_frame;
++	}
++    }
++
++  saved_frame_offset = frame_offset;
++  frame_offset = push_frame_offset;
++
++  new = assign_stack_local (mode, size, align);
++
++  push_frame_offset = frame_offset;
++  frame_offset = saved_frame_offset;
++  
++  if (push_frame_offset > push_allocated_offset)
++    {
++      push_frame (units_per_push,
++		  push_allocated_offset + STARTING_FRAME_OFFSET);
++
++      assign_stack_local (BLKmode, units_per_push, -1);
++      push_allocated_offset += units_per_push;
++    }
++
++  /* At the second call from global alloc, alpha push frame and assign
++     a local variable to the top of the stack.  */
++  if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
++    push_frame_offset = push_allocated_offset = 0;
++
++  return new;
++#endif
++}
++
++
++#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
++
++/* push frame infomation for instantiating pseudo register at the top of stack.
++   This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is 
++   not defined.
++
++   It is called by purge_addressof function and global_alloc (or reload)
++   function.  */
++static void
++push_frame (HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)
++{
++  reset_used_flags_for_push_frame();
++
++  /* Scan all declarations of variables and fix the offset address of
++     the variable based on the frame pointer.  */
++  push_frame_in_decls (DECL_INITIAL (current_function_decl),
++		       var_size, boundary);
++
++  /* Scan all argument variable and fix the offset address based on
++     the frame pointer.  */
++  push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
++		      var_size, boundary);
++
++  /* Scan all operands of all insns and fix the offset address
++     based on the frame pointer.  */
++  push_frame_of_insns (get_insns (), var_size, boundary);
++
++  /* Scan all reg_equiv_memory_loc and reg_equiv_constant.  */
++  push_frame_of_reg_equiv_memory_loc (var_size, boundary);
++  push_frame_of_reg_equiv_constant (var_size, boundary);
++
++  reset_used_flags_for_push_frame();
++}
++
++
++/* Reset used flag of every insns, reg_equiv_memory_loc,
++   and reg_equiv_constant.  */
++static void
++reset_used_flags_for_push_frame(void)
++{
++  int i;
++  extern rtx *reg_equiv_memory_loc;
++  extern rtx *reg_equiv_constant;
++
++  /* Clear all the USED bits in operands of all insns and declarations of
++     local vars.  */
++  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
++  reset_used_flags_for_insns (get_insns ());
++
++
++  /* The following codes are processed if the push_frame is called from 
++     global_alloc (or reload) function.  */
++  if (reg_equiv_memory_loc == 0)
++    return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_memory_loc[i])
++      {
++	rtx x = reg_equiv_memory_loc[i];
++
++	if (GET_CODE (x) == MEM
++	    && GET_CODE (XEXP (x, 0)) == PLUS
++	    && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
++	  {
++	    /* reset */
++	    XEXP (x, 0)->used = 0;
++	  }
++      }
++
++  
++  if (reg_equiv_constant == 0)
++    return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_constant[i])
++      {
++	rtx x = reg_equiv_constant[i];
++
++	if (GET_CODE (x) == PLUS
++	    && AUTO_BASEPTR (x) == frame_pointer_rtx)
++	  {
++	    /* reset */
++	    x->used = 0;
++	  }
++      }
++}
++
++
++/* Push every variables declared as a local variable and make a room for
++   instantiated register.  */
++static void
++push_frame_in_decls (tree block, HOST_WIDE_INT push_size,
++		     HOST_WIDE_INT boundary)
++{
++  tree types;
++  HOST_WIDE_INT offset;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable and
++	     also skip an global variable.  */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
++	    {
++	      if (! DECL_RTL_SET_P (types))
++		continue;
++
++	      home = DECL_RTL (types);
++
++	      /* Process for static local variable.  */
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
++		continue;
++
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == REG)
++		{
++		  if (XEXP (home, 0) != frame_pointer_rtx
++		      || boundary != 0)
++		    continue;
++
++		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
++						  push_size);
++
++		  /* mark */
++		  XEXP (home, 0)->used = 1;
++		}
++		
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == MEM)
++		{
++		  /* Process for dynamically allocated array.  */
++		  home = XEXP (home, 0);
++		}
++		
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == PLUS
++		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++		{
++		  offset = AUTO_OFFSET(XEXP (home, 0));
++
++		  if (! XEXP (home, 0)->used
++		      && offset >= boundary)
++		    {
++		      offset += push_size;
++		      XEXP (XEXP (home, 0), 1)
++			= gen_rtx_CONST_INT (VOIDmode, offset);
++		      
++		      /* mark */
++		      XEXP (home, 0)->used = 1;
++		    }
++		}
++	    }
++	}
++
++      push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++/* Push every variables declared as an argument and make a room for
++   instantiated register.  */
++static void
++push_frame_in_args (tree parms, HOST_WIDE_INT push_size,
++		    HOST_WIDE_INT boundary)
++{
++  rtx home;
++  HOST_WIDE_INT offset;
++    
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms))
++	  {
++	    home = DECL_INCOMING_RTL (parms);
++	    offset = AUTO_OFFSET(XEXP (home, 0));
++
++	    if (XEXP (home, 0)->used || offset < boundary)
++	      continue;
++
++	    /* the operand related to the sweep variable.  */
++	    if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
++	      {
++		if (XEXP (home, 0) == frame_pointer_rtx)
++		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
++						  push_size);
++		else {
++		  offset += push_size;
++		  XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								offset);
++		}
++
++		/* mark */
++		XEXP (home, 0)->used = 1;
++	      }
++	  }
++      }
++}
++
++
++/* Set to 1 when the instruction has the reference to be pushed.  */
++static int insn_pushed;
++
++/* Tables of equivalent registers with frame pointer.  */
++static int *fp_equiv = 0;
++
++
++/* Push the frame region to make a room for allocated local variable.  */
++static void
++push_frame_of_insns (rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
++{
++  /* init fp_equiv */
++  fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
++		
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	rtx last;
++	
++	insn_pushed = FALSE;
++
++	/* Push frame in INSN operation.  */
++	push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
++
++	/* Push frame in NOTE.  */
++	push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
++
++	/* Push frame in CALL EXPR_LIST.  */
++	if (GET_CODE (insn) == CALL_INSN)
++	  push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
++				 push_size, boundary);
++
++	/* Pushed frame addressing style may not be machine specific one.
++	   so the instruction should be converted to use the machine specific
++	   frame addressing.  */
++	if (insn_pushed
++	    && (last = try_split (PATTERN (insn), insn, 1)) != insn)
++	  {
++	    rtx first = NEXT_INSN (insn);
++	    rtx trial = NEXT_INSN (first);
++	    rtx pattern = PATTERN (trial);
++	    rtx set;
++
++	    /* Update REG_EQUIV info to the first splitted insn.  */
++	    if ((set = single_set (insn))
++		&& find_reg_note (insn, REG_EQUIV, SET_SRC (set))
++		&& GET_CODE (PATTERN (first)) == SET)
++	      {
++		REG_NOTES (first)
++		  = gen_rtx_EXPR_LIST (REG_EQUIV,
++				       SET_SRC (PATTERN (first)),
++				       REG_NOTES (first));
++	      }
++
++	    /* copy the first insn of splitted insns to the original insn and
++	       delete the first insn,
++	       because the original insn is pointed from records:
++	       insn_chain, reg_equiv_init, used for global_alloc.  */
++	    if (cse_not_expected)
++	      {
++		add_insn_before (insn, first);
++		
++		/* Copy the various flags, and other information.  */
++		memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
++		PATTERN (insn) = PATTERN (first);
++		INSN_CODE (insn) = INSN_CODE (first);
++		LOG_LINKS (insn) = LOG_LINKS (first);
++		REG_NOTES (insn) = REG_NOTES (first);
++
++		/* then remove the first insn of splitted insns.  */
++		remove_insn (first);
++		INSN_DELETED_P (first) = 1;
++	      }
++
++	    if (GET_CODE (pattern) == SET
++		&& GET_CODE (XEXP (pattern, 0)) == REG
++		&& GET_CODE (XEXP (pattern, 1)) == PLUS
++		&& XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
++		&& GET_CODE (XEXP (XEXP (pattern, 1), 1)) == CONST_INT)
++	      {
++		rtx offset = XEXP (XEXP (pattern, 1), 1);
++		fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
++
++		delete_insn (trial);
++	      }
++
++	    insn = last;
++	  }
++      }
++
++  /* Clean up.  */
++  free (fp_equiv);
++}
++
++
++/* Push the frame region by changing the operand that points the frame.  */
++static void
++push_frame_in_operand (rtx insn, rtx orig,
++		       HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
++{
++  rtx x = orig;
++  enum rtx_code code;
++  int i, j;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++    case USE:
++      return;
++	    
++    case SET:
++      /*
++	Skip setjmp setup insn and setjmp restore insn
++	alpha case:
++	(set (MEM (reg:SI xx)) (frame_pointer_rtx)))
++	(set (frame_pointer_rtx) (REG))
++      */
++      if (GET_CODE (XEXP (x, 0)) == MEM
++	  && XEXP (x, 1) == frame_pointer_rtx)
++	return;
++      if (XEXP (x, 0) == frame_pointer_rtx
++	  && GET_CODE (XEXP (x, 1)) == REG)
++	return;
++
++      /*
++	powerpc case: restores setjmp address
++	(set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
++	or
++	(set (reg) (plus frame_pointer_rtx const_int -n))
++	(set (frame_pointer_rtx) (reg))
++      */
++      if (GET_CODE (XEXP (x, 0)) == REG
++	  && GET_CODE (XEXP (x, 1)) == PLUS
++	  && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
++	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
++	  && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
++	{
++	  x = XEXP (x, 1);
++	  offset = AUTO_OFFSET(x);
++	  if (x->used || -offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
++	  x->used = 1; insn_pushed = TRUE;
++	  return;
++	}
++
++      /* Reset fp_equiv register.  */
++      else if (GET_CODE (XEXP (x, 0)) == REG
++	  && fp_equiv[REGNO (XEXP (x, 0))])
++	fp_equiv[REGNO (XEXP (x, 0))] = 0;
++
++      /* Propagete fp_equiv register.  */
++      else if (GET_CODE (XEXP (x, 0)) == REG
++	       && GET_CODE (XEXP (x, 1)) == REG
++	       && fp_equiv[REGNO (XEXP (x, 1))])
++	if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
++	    || reg_renumber[REGNO (XEXP (x, 0))] > 0)
++	  fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
++      break;
++
++    case MEM:
++      if (XEXP (x, 0) == frame_pointer_rtx
++	  && boundary == 0)
++	{
++	  XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
++	  XEXP (x, 0)->used = 1; insn_pushed = TRUE;
++	  return;
++	}
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (GET_CODE (XEXP (x, 1)) == CONST_INT
++	  && XEXP (x, 0) == frame_pointer_rtx)
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  if (x->used || offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle alpha case:
++	 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
++      */
++      if (GET_CODE (XEXP (x, 1)) == CONST_INT
++	  && GET_CODE (XEXP (x, 0)) == SUBREG
++	  && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  if (x->used || offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle powerpc case:
++	 (set (reg x) (plus fp const))
++	 (set (.....) (... (plus (reg x) (const B))))
++      */
++      else if (GET_CODE (XEXP (x, 1)) == CONST_INT
++	       && GET_CODE (XEXP (x, 0)) == REG
++	       && fp_equiv[REGNO (XEXP (x, 0))])
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  if (x->used)
++	    return;
++
++	  offset += fp_equiv[REGNO (XEXP (x, 0))];
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle special case of frame register plus reg (constant).
++	 (set (reg x) (const B))
++	 (set (....) (...(plus fp (reg x))))
++      */
++      else if (XEXP (x, 0) == frame_pointer_rtx
++	       && GET_CODE (XEXP (x, 1)) == REG
++	       && PREV_INSN (insn)
++	       && PATTERN (PREV_INSN (insn))
++	       && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
++	       && GET_CODE (SET_SRC (PATTERN (PREV_INSN (insn)))) == CONST_INT)
++	{
++	  offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
++
++	  if (x->used || offset < boundary)
++	    return;
++	  
++	  SET_SRC (PATTERN (PREV_INSN (insn)))
++	    = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1;
++	  XEXP (x, 1)->used = 1;
++
++	  return;
++	}
++      /*
++	Handle special case of frame register plus reg (used).
++	The register already have a pushed offset, just mark this frame
++	addressing.
++      */
++      else if (XEXP (x, 0) == frame_pointer_rtx
++	       && XEXP (x, 1)->used)
++	{
++	  x->used = 1;
++	  return;
++	}
++      /*
++	Process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (FP) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      push_frame_of_insns (XEXP (x, 0), push_size, boundary);
++      push_frame_of_insns (XEXP (x, 1), push_size, boundary);
++      push_frame_of_insns (XEXP (x, 2), push_size, boundary);
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
++	  fatal_insn ("push_frame_in_operand", insn);
++	push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
++}   
++
++
++/* Change the location pointed in reg_equiv_memory_loc.  */
++static void
++push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT push_size,
++				    HOST_WIDE_INT boundary)
++{
++  int i;
++  extern rtx *reg_equiv_memory_loc;
++
++  /* This function is processed if the push_frame is called from 
++     global_alloc (or reload) function.  */
++  if (reg_equiv_memory_loc == 0)
++    return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_memory_loc[i])
++      {
++	rtx x = reg_equiv_memory_loc[i];
++	int offset;
++
++	if (GET_CODE (x) == MEM
++	    && GET_CODE (XEXP (x, 0)) == PLUS
++	    && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
++	  {
++	    offset = AUTO_OFFSET(XEXP (x, 0));
++	    
++	    if (! XEXP (x, 0)->used
++		&& offset >= boundary)
++	      {
++		offset += push_size;
++		XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		/* mark */
++		XEXP (x, 0)->used = 1;
++	      }
++	  }
++	else if (GET_CODE (x) == MEM
++		 && XEXP (x, 0) == frame_pointer_rtx
++		 && boundary == 0)
++	  {
++	    XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
++	    XEXP (x, 0)->used = 1; insn_pushed = TRUE;
++	  }
++      }
++}
++
++
++/* Change the location pointed in reg_equiv_constant.  */
++static void
++push_frame_of_reg_equiv_constant (HOST_WIDE_INT push_size,
++				  HOST_WIDE_INT boundary)
++{
++  int i;
++  extern rtx *reg_equiv_constant;
++
++  /* This function is processed if the push_frame is called from 
++     global_alloc (or reload) function.  */
++  if (reg_equiv_constant == 0)
++    return;
++
++  for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
++    if (reg_equiv_constant[i])
++      {
++	rtx x = reg_equiv_constant[i];
++	int offset;
++
++	if (GET_CODE (x) == PLUS
++	    && XEXP (x, 0) == frame_pointer_rtx)
++	  {
++	    offset = AUTO_OFFSET(x);
++	    
++	    if (! x->used
++		&& offset >= boundary)
++	      {
++		offset += push_size;
++		XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		/* mark */
++		x->used = 1;
++	      }
++	  }
++	else if (x == frame_pointer_rtx
++		 && boundary == 0)
++	  {
++	    reg_equiv_constant[i]
++	      = plus_constant (frame_pointer_rtx, push_size);
++	    reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
++	  }
++      }
++}
++
++
++/* Check every instructions if insn's memory reference is out of frame.  */
++static int
++check_out_of_frame_access (rtx insn, HOST_WIDE_INT boundary)
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
++	  return TRUE;
++      }
++  return FALSE;
++}
++
++
++/* Check every operands if the reference is out of frame.  */
++static int
++check_out_of_frame_access_in_operand (rtx orig, HOST_WIDE_INT boundary)
++{
++  rtx x = orig;
++  enum rtx_code code;
++  int i, j;
++  const char *fmt;
++
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return FALSE;
++	    
++    case MEM:
++      if (XEXP (x, 0) == frame_pointer_rtx)
++	if (0 < boundary)
++	  return TRUE;
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (GET_CODE (XEXP (x, 1)) == CONST_INT
++	  && XEXP (x, 0) == frame_pointer_rtx)
++	{
++	  if (0 <= AUTO_OFFSET(x)
++	      && AUTO_OFFSET(x) < boundary)
++	    return TRUE;
++	  return FALSE;
++	}
++      /*
++	Process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      if (check_out_of_frame_access (XEXP (x, 0), boundary))
++	return TRUE;
++      if (check_out_of_frame_access (XEXP (x, 1), boundary))
++	return TRUE;
++      if (check_out_of_frame_access (XEXP (x, 2), boundary))
++	return TRUE;
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
++	  return TRUE;
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
++	  return TRUE;
++
++  return FALSE;
++}
++#endif
+diff -Naur gcc-3.4.4.orig/gcc/protector.h gcc-3.4.4/gcc/protector.h
+--- gcc-3.4.4.orig/gcc/protector.h	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.4/gcc/protector.h	2005-05-29 05:58:14.000000000 +0000
+@@ -0,0 +1,55 @@
++/* RTL buffer overflow protection function for GNU C compiler
++   Copyright (C) 2003 Free Software Foundation, Inc.
++
++This file is part of GCC.
++
++GCC is free software; you can redistribute it and/or modify it under
++the terms of the GNU General Public License as published by the Free
++Software Foundation; either version 2, or (at your option) any later
++version.
++
++GCC is distributed in the hope that it will be useful, but WITHOUT ANY
++WARRANTY; without even the implied warranty of MERCHANTABILITY or
++FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
++for more details.
++
++You should have received a copy of the GNU General Public License
++along with GCC; see the file COPYING.  If not, write to the Free
++Software Foundation, 59 Temple Place - Suite 330, Boston, MA
++02111-1307, USA.  */
++
++
++/* Declare GUARD variable.  */
++#define GUARD_m		Pmode
++#define UNITS_PER_GUARD						\
++  MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
++
++#ifndef L_stack_smash_handler
++
++/* Insert a guard variable before a character buffer and change the order
++ of pointer variables, character buffers and pointer arguments.  */
++
++extern void prepare_stack_protection  (int);
++
++#ifdef TREE_CODE
++/* Search a character array from the specified type tree.  */
++
++extern int search_string_def (tree);
++#endif
++
++/* Examine whether the input contains frame pointer addressing.  */
++
++extern int contains_fp (rtx);
++
++/* Return size that is not allocated for stack frame. It will be allocated
++   to modify the home of pseudo registers called from global_alloc.  */
++
++extern HOST_WIDE_INT get_frame_free_size (void);
++
++/* Allocate a local variable in the stack area before character buffers
++   to avoid the corruption of it.  */
++
++extern rtx assign_stack_local_for_pseudo_reg (enum machine_mode,
++					      HOST_WIDE_INT, int);
++
++#endif
+diff -Naur gcc-3.4.4.orig/gcc/reload1.c gcc-3.4.4/gcc/reload1.c
+--- gcc-3.4.4.orig/gcc/reload1.c	2005-03-17 21:11:35.000000000 +0000
++++ gcc-3.4.4/gcc/reload1.c	2005-05-29 05:58:14.000000000 +0000
+@@ -43,6 +43,7 @@
+ #include "toplev.h"
+ #include "except.h"
+ #include "tree.h"
++#include "protector.h"
+ 
+ /* This file contains the reload pass of the compiler, which is
+    run after register allocation has been done.  It checks that
+@@ -891,7 +892,7 @@
+       if (cfun->stack_alignment_needed)
+         assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
+ 
+-      starting_frame_size = get_frame_size ();
++      starting_frame_size = get_frame_size () - get_frame_free_size ();
+ 
+       set_initial_elim_offsets ();
+       set_initial_label_offsets ();
+@@ -955,7 +956,7 @@
+ 	setup_save_areas ();
+ 
+       /* If we allocated another stack slot, redo elimination bookkeeping.  */
+-      if (starting_frame_size != get_frame_size ())
++      if (starting_frame_size != get_frame_size () - get_frame_free_size ())
+ 	continue;
+ 
+       if (caller_save_needed)
+@@ -974,7 +975,7 @@
+ 
+       /* If we allocated any new memory locations, make another pass
+ 	 since it might have changed elimination offsets.  */
+-      if (starting_frame_size != get_frame_size ())
++      if (starting_frame_size != get_frame_size () - get_frame_free_size ())
+ 	something_changed = 1;
+ 
+       {
+@@ -1066,11 +1067,11 @@
+   if (insns_need_reload != 0 || something_needs_elimination
+       || something_needs_operands_changed)
+     {
+-      HOST_WIDE_INT old_frame_size = get_frame_size ();
++      HOST_WIDE_INT old_frame_size = get_frame_size () - get_frame_free_size ();
+ 
+       reload_as_needed (global);
+ 
+-      if (old_frame_size != get_frame_size ())
++      if (old_frame_size != get_frame_size () - get_frame_free_size ())
+ 	abort ();
+ 
+       if (num_eliminable)
+@@ -1957,8 +1958,10 @@
+ 	 inherent space, and no less total space, then the previous slot.  */
+       if (from_reg == -1)
+ 	{
+-	  /* No known place to spill from => no slot to reuse.  */
+-	  x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
++	  /* No known place to spill from => no slot to reuse.
++	     For the stack protection, an allocated slot should be placed in
++	     the safe region from the stack smaching attack.  */
++	  x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
+ 				  inherent_size == total_size ? 0 : -1);
+ 	  if (BYTES_BIG_ENDIAN)
+ 	    /* Cancel the  big-endian correction done in assign_stack_local.
+diff -Naur gcc-3.4.4.orig/gcc/rtl.h gcc-3.4.4/gcc/rtl.h
+--- gcc-3.4.4.orig/gcc/rtl.h	2004-12-05 05:21:01.000000000 +0000
++++ gcc-3.4.4/gcc/rtl.h	2005-05-29 05:58:14.000000000 +0000
+@@ -473,6 +473,18 @@
+ 			     __FUNCTION__);				\
+    _rtx; })
+ 
++#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9)	\
++  __extension__								\
++({ rtx const _rtx = (RTX);						\
++   if (GET_CODE(_rtx) != C1 && GET_CODE(_rtx) != C2			\
++       && GET_CODE(_rtx) != C3 && GET_CODE(_rtx) != C4			\
++       && GET_CODE(_rtx) != C5 && GET_CODE(_rtx) != C6			\
++       && GET_CODE(_rtx) != C7 && GET_CODE(_rtx) != C8			\
++       && GET_CODE(_rtx) != C9)						\
++     rtl_check_failed_flag  (NAME, _rtx, __FILE__, __LINE__,		\
++			     __FUNCTION__);				\
++   _rtx; })
++
+ extern void rtl_check_failed_flag (const char *, rtx, const char *,
+ 				   int, const char *)
+     ATTRIBUTE_NORETURN
+@@ -488,6 +500,7 @@
+ #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6)		(RTX)
+ #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7)		(RTX)
+ #define RTL_FLAG_CHECK8(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8)	(RTX)
++#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9)	(RTX)
+ #endif
+ 
+ #define CLEAR_RTX_FLAGS(RTX)	\
+@@ -583,9 +596,9 @@
+ #define LOG_LINKS(INSN)	XEXP(INSN, 7)
+ 
+ #define RTX_INTEGRATED_P(RTX)						\
+-  (RTL_FLAG_CHECK8("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN,		\
++  (RTL_FLAG_CHECK9("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN,		\
+ 		   JUMP_INSN, INSN_LIST, BARRIER, CODE_LABEL, CONST,	\
+-		   NOTE)->integrated)
++		   PLUS, NOTE)->integrated)
+ #define RTX_UNCHANGING_P(RTX)						\
+   (RTL_FLAG_CHECK3("RTX_UNCHANGING_P", (RTX), REG, MEM, CONCAT)->unchanging)
+ #define RTX_FRAME_RELATED_P(RTX)					\
+@@ -1125,6 +1138,10 @@
+   (RTL_FLAG_CHECK3("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS,		\
+ 		   ASM_INPUT)->volatil)
+ 
++/* 1 if RTX is an SET rtx that is not eliminated for the stack protection.  */
++#define SET_VOLATILE_P(RTX)					\
++  (RTL_FLAG_CHECK1("SET_VOLATILE_P", (RTX), SET)->volatil)
++
+ /* 1 if RTX is a mem that refers to an aggregate, either to the
+    aggregate itself of to a field of the aggregate.  If zero, RTX may
+    or may not be such a reference.  */
+diff -Naur gcc-3.4.4.orig/gcc/simplify-rtx.c gcc-3.4.4/gcc/simplify-rtx.c
+--- gcc-3.4.4.orig/gcc/simplify-rtx.c	2005-03-23 14:41:59.000000000 +0000
++++ gcc-3.4.4/gcc/simplify-rtx.c	2005-05-29 05:58:14.000000000 +0000
+@@ -2329,6 +2329,7 @@
+   int n_ops = 2, input_ops = 2, input_consts = 0, n_consts;
+   int first, changed;
+   int i, j;
++  HOST_WIDE_INT fp_offset = 0;
+ 
+   memset (ops, 0, sizeof ops);
+ 
+@@ -2354,6 +2355,10 @@
+ 	  switch (this_code)
+ 	    {
+ 	    case PLUS:
++	    if (flag_propolice_protection
++		&& XEXP (this_op, 0) == virtual_stack_vars_rtx
++		&& GET_CODE (XEXP (this_op, 1)) == CONST_INT)
++	      fp_offset = INTVAL (XEXP (this_op, 1));
+ 	    case MINUS:
+ 	      if (n_ops == 7)
+ 		return NULL_RTX;
+@@ -2515,11 +2520,24 @@
+       && GET_CODE (ops[n_ops - 1].op) == CONST_INT
+       && CONSTANT_P (ops[n_ops - 2].op))
+     {
+-      rtx value = ops[n_ops - 1].op;
+-      if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
+-	value = neg_const_int (mode, value);
+-      ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
+-      n_ops--;
++      if (!flag_propolice_protection)
++	{
++	  rtx value = ops[n_ops - 1].op;
++	  if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
++	    value = neg_const_int (mode, value);
++	  ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
++	  n_ops--;
++	}
++      /* The stack protector keeps the addressing style of a local variable,
++	 so it doesn't use neg_const_int function not to change
++	 the offset value.  */
++      else {
++	HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
++	if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
++	  value = -value;
++	ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value);
++	n_ops--;
++      }
+     }
+ 
+   /* Count the number of CONSTs that we generated.  */
+@@ -2537,6 +2555,59 @@
+ 	  || (n_ops + n_consts == input_ops && n_consts <= input_consts)))
+     return NULL_RTX;
+ 
++  if (flag_propolice_protection)
++    {
++      /* keep the addressing style of local variables
++	 as (plus (virtual_stack_vars_rtx) (CONST_int x)).
++	 For the case array[r-1],
++	 converts from (+ (+VFP c1) (+r -1)) to (SET R (+VFP c1)) (+ R (+r -1)).
++
++	 This loop finds ops[i] which is the register for the frame
++	 addressing, Then, makes the frame addressing using the register and
++	 the constant of ops[n_ops - 1].  */
++      for (i = 0; i < n_ops; i++)
++#ifdef FRAME_GROWS_DOWNWARD
++	if (ops[i].op == virtual_stack_vars_rtx)
++#else
++	if (ops[i].op == virtual_stack_vars_rtx
++	    || ops[i].op == frame_pointer_rtx)
++#endif
++	  {
++	    if (GET_CODE (ops[n_ops - 1].op) == CONST_INT)
++	      {
++		HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
++		if (value >= fp_offset)
++		  {
++		    ops[i].op = plus_constant (ops[i].op, value);
++		    n_ops--;
++		  }
++		else
++		  {
++		    if (!force
++			&& (n_ops + 1 + n_consts > input_ops
++			    || (n_ops + 1 + n_consts == input_ops
++				&& n_consts <= input_consts)))
++		      return NULL_RTX;
++		    ops[n_ops - 1].op = GEN_INT (value-fp_offset);
++		    ops[i].op = plus_constant (ops[i].op, fp_offset);
++		  }
++	      }
++	    /* keep the following address pattern;
++	       (1) buf[BUFSIZE] is the first assigned variable.
++	       (+ (+ fp -BUFSIZE) BUFSIZE)
++	       (2) ((+ (+ fp 1) r) -1).  */
++	    else if (fp_offset != 0)
++	      return NULL_RTX;
++	    /* keep the (+ fp 0) pattern for the following case;
++	       (1) buf[i]: i: REG, buf: (+ fp 0) in !FRAME_GROWS_DOWNWARD
++	       (2) argument: the address is (+ fp 0).  */
++	    else if (fp_offset == 0)
++	      return NULL_RTX;
++
++	    break;
++	  }
++    }
++
+   /* Put a non-negated operand first, if possible.  */
+ 
+   for (i = 0; i < n_ops && ops[i].neg; i++)
+diff -Naur gcc-3.4.4.orig/gcc/testsuite/gcc.dg/ssp-warn.c gcc-3.4.4/gcc/testsuite/gcc.dg/ssp-warn.c
+--- gcc-3.4.4.orig/gcc/testsuite/gcc.dg/ssp-warn.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.4/gcc/testsuite/gcc.dg/ssp-warn.c	2005-05-29 05:58:14.000000000 +0000
+@@ -0,0 +1,32 @@
++/* { dg-do compile } */
++/* { dg-options "-fstack-protector" } */
++void
++test1()
++{
++  void intest1(int *a)
++    {
++      *a ++;
++    }
++  
++  char buf[80];
++
++  buf[0] = 0;
++} /* { dg-bogus "not protecting function: it contains functions" } */
++
++void
++test2(int n)
++{
++  char buf[80];
++  char vbuf[n];
++
++  buf[0] = 0;
++  vbuf[0] = 0;
++} /* { dg-bogus "not protecting variables: it has a variable length buffer" } */
++
++void
++test3()
++{
++  char buf[5];
++
++  buf[0] = 0;
++} /* { dg-bogus "not protecting function: buffer is less than 8 bytes long" } */
+diff -Naur gcc-3.4.4.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp gcc-3.4.4/gcc/testsuite/gcc.misc-tests/ssp-execute.exp
+--- gcc-3.4.4.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.4/gcc/testsuite/gcc.misc-tests/ssp-execute.exp	2005-05-29 05:58:14.000000000 +0000
+@@ -0,0 +1,35 @@
++#   Copyright (C) 2003, 2004 Free Software Foundation, Inc.
++
++# This program is free software; you can redistribute it and/or modify
++# it under the terms of the GNU General Public License as published by
++# the Free Software Foundation; either version 2 of the License, or
++# (at your option) any later version.
++# 
++# This program is distributed in the hope that it will be useful,
++# but WITHOUT ANY WARRANTY; without even the implied warranty of
++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
++# GNU General Public License for more details.
++# 
++# You should have received a copy of the GNU General Public License
++# along with this program; if not, write to the Free Software
++# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.  
++
++if $tracelevel then {
++    strace $tracelevel
++}
++
++# Load support procs.
++load_lib c-torture.exp
++
++#
++# main test loop
++#
++
++foreach src [lsort [glob -nocomplain $srcdir/$subdir/ssp-execute*.c]] {
++    # If we're only testing specific files and this isn't one of them, skip it.
++    if ![runtest_file_p $runtests $src] then {
++	continue
++    }
++
++    c-torture-execute $src -fstack-protector
++}
+diff -Naur gcc-3.4.4.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c gcc-3.4.4/gcc/testsuite/gcc.misc-tests/ssp-execute1.c
+--- gcc-3.4.4.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.4/gcc/testsuite/gcc.misc-tests/ssp-execute1.c	2005-05-29 05:58:14.000000000 +0000
+@@ -0,0 +1,54 @@
++/* Test location changes of character array.  */
++
++void
++test(int i)
++{
++  int  ibuf1[10];
++  char buf[50];
++  int  ibuf2[10];
++  char buf2[50000];
++  int  ibuf3[10];
++  char *p;
++
++  /* c1: the frame offset of buf[0]
++     c2: the frame offset of buf2[0]
++  */
++  p= &buf[0]; *p=1;		/* expected rtl: (+ fp -c1) */
++  if (*p != buf[0])
++    abort();
++  p= &buf[5]; *p=2;		/* expected rtl: (+ fp -c1+5) */
++  if (*p != buf[5])
++    abort();
++  p= &buf[-1]; *p=3;		/* expected rtl: (+ (+ fp -c1) -1) */
++  if (*p != buf[-1])
++    abort();
++  p= &buf[49]; *p=4;		/* expected rtl: (+ fp -c1+49) */
++  if (*p != buf[49])
++    abort();
++  p = &buf[i+5]; *p=5;		/* expected rtl: (+ (+ fp -c1) (+ i 5)) */
++  if (*p != buf[i+5])
++    abort ();
++  p = buf - 1; *p=6;		/* expected rtl: (+ (+ fp -c1) -1) */
++  if (*p != buf[-1])
++    abort ();
++  p = 1 + buf; *p=7;		/* expected rtl: (+ (+ fp -c1) 1) */
++  if (*p != buf[1])
++    abort ();
++  p = &buf[1] - 1; *p=8;	/* expected rtl: (+ (+ fp -c1+1) -1) */
++  if (*p != buf[0])
++    abort ();
++
++  /* test big offset which is greater than the max value of signed 16 bit integer.  */
++  p = &buf2[45555]; *p=9;	/* expected rtl: (+ fp -c2+45555) */
++  if (*p != buf2[45555])
++    abort ();
++}
++
++int main()
++{
++  test(10);
++  exit(0);
++}
++
++
++  
+diff -Naur gcc-3.4.4.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c gcc-3.4.4/gcc/testsuite/gcc.misc-tests/ssp-execute2.c
+--- gcc-3.4.4.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.4/gcc/testsuite/gcc.misc-tests/ssp-execute2.c	2005-05-29 05:58:14.000000000 +0000
+@@ -0,0 +1,49 @@
++void
++test(int i, char *j, int k)
++{
++  int  a[10];
++  char b;
++  int  c;
++  long *d;
++  char buf[50];
++  long e[10];
++  int  n;
++
++  a[0] = 4;
++  b = 5;
++  c = 6;
++  d = (long*)7;
++  e[0] = 8;
++
++  /* overflow buffer */
++  for (n = 0; n < 120; n++)
++    buf[n] = 0;
++  
++  if (j == 0 || *j != 2)
++    abort ();
++  if (a[0] == 0)
++    abort ();
++  if (b == 0)
++    abort ();
++  if (c == 0)
++    abort ();
++  if (d == 0)
++    abort ();
++  if (e[0] == 0)
++    abort ();
++
++  exit (0);
++}
++
++int main()
++{
++  int i, k;
++  int j[40];
++  i = 1;
++  j[39] = 2;
++  k = 3;
++  test(i, &j[39], k);
++}
++
++
++  
+diff -Naur gcc-3.4.4.orig/gcc/toplev.c gcc-3.4.4/gcc/toplev.c
+--- gcc-3.4.4.orig/gcc/toplev.c	2005-03-09 00:50:25.000000000 +0000
++++ gcc-3.4.4/gcc/toplev.c	2005-05-29 05:58:14.000000000 +0000
+@@ -79,6 +79,7 @@
+ #include "coverage.h"
+ #include "value-prof.h"
+ #include "alloc-pool.h"
++#include "protector.h"
+ 
+ #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
+ #include "dwarf2out.h"
+@@ -97,6 +98,10 @@
+ 				   declarations for e.g. AIX 4.x.  */
+ #endif
+ 
++#ifdef STACK_PROTECTOR
++#include "protector.h"
++#endif
++
+ #ifndef HAVE_conditional_execution
+ #define HAVE_conditional_execution 0
+ #endif
+@@ -979,6 +984,15 @@
+    minimum function alignment.  Zero means no alignment is forced.  */
+ int force_align_functions_log;
+ 
++#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
++/* Nonzero means use propolice as a stack protection method */
++int flag_propolice_protection = 1;
++int flag_stack_protection = 0;
++#else
++int flag_propolice_protection = 0;
++int flag_stack_protection = 0;
++#endif
++
+ typedef struct
+ {
+   const char *const string;
+@@ -1154,7 +1168,9 @@
+   {"mem-report", &mem_report, 1 },
+   { "trapv", &flag_trapv, 1 },
+   { "wrapv", &flag_wrapv, 1 },
+-  { "new-ra", &flag_new_regalloc, 1 }
++  { "new-ra", &flag_new_regalloc, 1 },
++  {"stack-protector", &flag_propolice_protection, 1 },
++  {"stack-protector-all", &flag_stack_protection, 1 }
+ };
+ 
+ /* Here is a table, controlled by the tm.h file, listing each -m switch
+@@ -2689,6 +2705,9 @@
+ 
+   insns = get_insns ();
+ 
++  if (flag_propolice_protection)
++    prepare_stack_protection (inlinable);
++
+   /* Dump the rtl code if we are dumping rtl.  */
+ 
+   if (open_dump_file (DFI_rtl, decl))
+@@ -4485,6 +4504,12 @@
+     /* The presence of IEEE signaling NaNs, implies all math can trap.  */
+     if (flag_signaling_nans)
+       flag_trapping_math = 1;
++
++  /* This combination makes optimized frame addressings and causes
++    a internal compilation error at prepare_stack_protection.
++    so don't allow it.  */
++  if (flag_stack_protection && !flag_propolice_protection)
++    flag_propolice_protection = TRUE;
+ }
+ 
+ /* Initialize the compiler back end.  */
+diff -Naur gcc-3.4.4.orig/gcc/tree.h gcc-3.4.4/gcc/tree.h
+--- gcc-3.4.4.orig/gcc/tree.h	2005-01-16 16:01:19.000000000 +0000
++++ gcc-3.4.4/gcc/tree.h	2005-05-29 05:58:14.000000000 +0000
+@@ -1489,6 +1489,10 @@
+    where it is called.  */
+ #define DECL_INLINE(NODE) (FUNCTION_DECL_CHECK (NODE)->decl.inline_flag)
+ 
++/* In a VAR_DECL, nonzero if the declaration is copied for inlining.
++   The stack protector should keep its location in the stack.  */
++#define DECL_COPIED(NODE) (VAR_DECL_CHECK (NODE)->decl.inline_flag)
++
+ /* Nonzero in a FUNCTION_DECL means that this function was declared inline,
+    such as via the `inline' keyword in C/C++.  This flag controls the linkage
+    semantics of 'inline'; whether or not the function is inlined is




More information about the patches mailing list