r621 - in trunk: gcc glibc linux

jim at linuxfromscratch.org jim at linuxfromscratch.org
Tue Aug 31 12:56:56 PDT 2004


Author: jim
Date: 2004-08-31 13:56:54 -0600 (Tue, 31 Aug 2004)
New Revision: 621

Added:
   trunk/gcc/gcc-2.95.3-ssp-3.patch
   trunk/gcc/gcc-3.3-ssp-3.patch
   trunk/gcc/gcc-3.3.4-no_fixincludes-1.patch
   trunk/gcc/gcc-3.3.4-specs-1.patch
   trunk/gcc/gcc-3.4-ssp-1.patch
   trunk/gcc/gcc-3.4-sspspecs-1.patch
   trunk/glibc/glibc-2.3.3-gcc34_allow-1.patch
   trunk/glibc/glibc-2.3.3-pax-1.patch
   trunk/glibc/glibc-2.3.3-security_fixes-1.patch
   trunk/linux/linux-2.4.27-frandom-1.patch
   trunk/linux/linux-2.4.27-ssp-1.patch
Log:
Added: HLFS patches

Added: trunk/gcc/gcc-2.95.3-ssp-3.patch
===================================================================
--- trunk/gcc/gcc-2.95.3-ssp-3.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/gcc/gcc-2.95.3-ssp-3.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,3511 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2004-08-29
+Initial Package Version: 2.95.3
+Upstream Status: Rejected Upstream
+Origin: http://www.research.ibm.com/trl/projects/security/ssp/
+Description: Smashing Stack Protector - protector-2.95.3-28.tar.gz
+This patch is made specificly to work with the Glibc SSP patch. All guard
+functions have been removed. Developers are encouraged to check the
+differences between this patch, the original from ibm, and the Glibc patch.
+
+You might also want to change the version after applying this patch:
+sed -e 's/2.95.3/2.95.3 ssp/' -i gcc/version.c
+
+This patch, and Glibc's patch, depends on erandom sysctl from:
+http://frandom.sourceforge.net/
+Thanks to Eli Billauer.
+
+Also see:
+http://www.linuxfromscratch.org/hlfs/
+http://www.linuxfromscratch.org/hints/downloads/files/ssp.txt
+http://www.linuxfromscratch.org/hints/downloads/files/entropy.txt
+
+diff -Naur gcc-2.95.3.orig/gcc/Makefile.in gcc-2.95.3.ssp/gcc/Makefile.in
+--- gcc-2.95.3.orig/gcc/Makefile.in	2001-01-25 14:02:58.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/Makefile.in	2004-08-30 01:29:21.000000000 +0000
+@@ -683,7 +683,7 @@
+  insn-peep.o reorg.o $(SCHED_PREFIX)sched.o final.o recog.o reg-stack.o \
+  insn-opinit.o insn-recog.o insn-extract.o insn-output.o insn-emit.o lcm.o \
+  profile.o insn-attrtab.o $(out_object_file) getpwd.o $(EXTRA_OBJS) convert.o \
+- mbchar.o dyn-string.o splay-tree.o graph.o sbitmap.o resource.o hash.o
++ mbchar.o dyn-string.o splay-tree.o graph.o sbitmap.o resource.o hash.o protector.o
+ 
+ # GEN files are listed separately, so they can be built before doing parallel
+ #  makes for cc1 or cc1plus.  Otherwise sequent parallel make attempts to load
+@@ -734,7 +734,7 @@
+     _fixtfdi _fixunstfdi _floatditf \
+     __gcc_bcmp _varargs __dummy _eprintf \
+     _bb _shtab _clear_cache _trampoline __main _exit \
+-    _ctors _pure
++    _ctors _pure _stack_smash_handler
+ 
+ LIB2FUNCS_EH = _eh
+ 
+diff -Naur gcc-2.95.3.orig/gcc/calls.c gcc-2.95.3.ssp/gcc/calls.c
+--- gcc-2.95.3.orig/gcc/calls.c	2001-01-25 14:03:00.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/calls.c	2004-08-30 01:29:21.000000000 +0000
+@@ -1753,7 +1753,7 @@
+ 	    /* This DECL is just something to feed to mark_addressable;
+ 	       it doesn't get pushed.  */
+ 	    d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
+-	    DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
++	    DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 5, 0, 1);
+ 	    mark_addressable (d);
+ 	    structure_value_addr = XEXP (DECL_RTL (d), 0);
+ 	    TREE_USED (d) = 1;
+diff -Naur gcc-2.95.3.orig/gcc/combine.c gcc-2.95.3.ssp/gcc/combine.c
+--- gcc-2.95.3.orig/gcc/combine.c	2001-01-25 14:03:01.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/combine.c	2004-08-30 01:29:21.000000000 +0000
+@@ -3483,6 +3483,16 @@
+ 	  rtx inner_op1 = XEXP (x, 1);
+ 	  rtx inner;
+ 	  
++#ifndef FRAME_GROWS_DOWNWARD
++	  if (flag_propolice_protection
++	      && code == PLUS
++	      && other == frame_pointer_rtx
++	      && GET_CODE (inner_op0) == CONST_INT
++	      && GET_CODE (inner_op1) == CONST_INT
++	      && INTVAL (inner_op0) > 0
++	      && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
++	    return x;
++#endif
+ 	  /* Make sure we pass the constant operand if any as the second
+ 	     one if this is a commutative operation.  */
+ 	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
+@@ -3953,6 +3963,11 @@
+ 	 they are now checked elsewhere.  */
+       if (GET_CODE (XEXP (x, 0)) == PLUS
+ 	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
++#ifndef FRAME_GROWS_DOWNWARD
++	if (! (flag_propolice_protection
++	       && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
++	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
++#endif
+ 	return gen_binary (PLUS, mode,
+ 			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
+ 				       XEXP (x, 1)),
+@@ -4045,7 +4060,10 @@
+ 
+       /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
+ 	 integers.  */
+-      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
++      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
++	  && (! (flag_propolice_protection
++		 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
++		 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
+ 	return gen_binary (MINUS, mode,
+ 			   gen_binary (MINUS, mode, XEXP (x, 0),
+ 				       XEXP (XEXP (x, 1), 0)),
+diff -Naur gcc-2.95.3.orig/gcc/config/t-linux gcc-2.95.3.ssp/gcc/config/t-linux
+--- gcc-2.95.3.orig/gcc/config/t-linux	1998-12-16 21:00:09.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/config/t-linux	2004-08-30 01:29:22.000000000 +0000
+@@ -7,7 +7,7 @@
+ # Compile crtbeginS.o and crtendS.o with pic.
+ CRTSTUFF_T_CFLAGS_S = -fPIC
+ # Compile libgcc2.a with pic.
+-TARGET_LIBGCC2_CFLAGS = -fPIC
++TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG
+ 
+ # Do not build libgcc1. Let gcc generate those functions. The GNU/Linux
+ # C library can handle them.
+diff -Naur gcc-2.95.3.orig/gcc/cse.c gcc-2.95.3.ssp/gcc/cse.c
+--- gcc-2.95.3.orig/gcc/cse.c	2001-01-25 14:03:03.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/cse.c	2004-08-30 01:29:21.000000000 +0000
+@@ -4504,6 +4504,7 @@
+   int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
+   int first = 1, negate = 0, changed;
+   int i, j;
++  HOST_WIDE_INT fp_offset = 0;
+ 
+   bzero ((char *) ops, sizeof ops);
+   
+@@ -4522,6 +4523,10 @@
+ 	switch (GET_CODE (ops[i]))
+ 	  {
+ 	  case PLUS:
++	    if (flag_propolice_protection
++		&& XEXP (ops[i], 0) == virtual_stack_vars_rtx
++		&& GET_CODE (XEXP (ops[i], 1)) == CONST_INT)
++	      fp_offset = INTVAL (XEXP (ops[i], 1));
+ 	  case MINUS:
+ 	    if (n_ops == 7)
+ 	      return 0;
+@@ -4637,7 +4642,54 @@
+ 	j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
+       }
+ 
+-  /* Put a non-negated operand first.  If there aren't any, make all
++  if (flag_propolice_protection)
++    {
++      /* keep the addressing style of local variables
++	 as (plus (virtual_stack_vars_rtx) (CONST_int x))
++	 (1) inline function is expanded, (+ (+VFP c1) -c2)=>(+ VFP c1-c2)
++	 (2) the case ary[r-1], (+ (+VFP c1) (+r -1))=>(+ R (+r -1))
++      */
++      for (i = 0; i < n_ops; i++)
++#ifdef FRAME_GROWS_DOWNWARD
++	if (ops[i] == virtual_stack_vars_rtx)
++#else
++	if (ops[i] == virtual_stack_vars_rtx
++	    || ops[i] == frame_pointer_rtx)
++#endif
++	  {
++	    if (GET_CODE (ops[n_ops - 1]) == CONST_INT)
++	      {
++		HOST_WIDE_INT value = INTVAL (ops[n_ops - 1]);
++		if (n_ops < 3 || value >= fp_offset)
++		  {
++		    ops[i] = plus_constant (ops[i], value);
++		    n_ops--;
++		  }
++		else
++		  {
++		    if (n_ops+1 + n_consts > input_ops
++			|| (n_ops+1 + n_consts == input_ops && n_consts <= input_consts))
++		      return 0;
++		    ops[n_ops - 1] = GEN_INT (value-fp_offset);
++		    ops[i] = plus_constant (ops[i], fp_offset);
++		  }
++	      }
++	    /* buf[BUFSIZE]: buf is the first local variable (+ (+ fp -S) S) 
++	       or (+ (fp 0) r) ==> ((+ (+fp 1) r) -1) */
++	    else if (fp_offset != 0)
++	      return 0;
++#ifndef FRAME_GROWS_DOWNWARD
++	    /*
++	     * For the case of buf[i], i: REG, buf: (plus fp 0),
++	     */
++	    else if (fp_offset == 0)
++	      return 0;
++#endif
++	    break;
++	  }
++    }
++
++/* Put a non-negated operand first.  If there aren't any, make all
+      operands positive and negate the whole thing later.  */
+   for (i = 0; i < n_ops && negs[i]; i++)
+     ;
+@@ -5964,7 +6016,14 @@
+ 
+ 	      if (new_const == 0)
+ 		break;
+-
++#ifndef FRAME_GROWS_DOWNWARD
++	      if (flag_propolice_protection
++		  && GET_CODE (y) == PLUS
++		  && XEXP (y, 0) == frame_pointer_rtx
++		  && INTVAL (inner_const) > 0
++		  && INTVAL (new_const) <= 0)
++		break;
++#endif
+ 	      /* If we are associating shift operations, don't let this
+ 		 produce a shift of the size of the object or larger.
+ 		 This could occur when we follow a sign-extend by a right
+@@ -6483,6 +6542,13 @@
+       if (SET_DEST (x) == pc_rtx
+ 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
+ 	;
++      else if (x->volatil) {
++	rtx x1 = SET_DEST (x);
++	if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
++	  x1 = SUBREG_REG (x1);
++	make_new_qty (REGNO (x1));
++	qty_mode[REG_QTY (REGNO (x1))] = GET_MODE (x1);
++      }
+ 
+       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
+ 	 The hard function value register is used only once, to copy to
+diff -Naur gcc-2.95.3.orig/gcc/explow.c gcc-2.95.3.ssp/gcc/explow.c
+--- gcc-2.95.3.orig/gcc/explow.c	1999-04-17 17:14:48.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/explow.c	2004-08-30 01:29:21.000000000 +0000
+@@ -52,7 +52,8 @@
+   register rtx tem;
+   int all_constant = 0;
+ 
+-  if (c == 0)
++  if (c == 0
++      && !(flag_propolice_protection && x == virtual_stack_vars_rtx))
+     return x;
+ 
+  restart:
+@@ -149,7 +150,8 @@
+       break;
+     }
+ 
+-  if (c != 0)
++  if (c != 0
++      || (flag_propolice_protection && x == virtual_stack_vars_rtx))
+     x = gen_rtx_PLUS (mode, x, GEN_INT (c));
+ 
+   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
+@@ -473,6 +475,21 @@
+ 	 in certain cases.  This is not necessary since the code
+ 	 below can handle all possible cases, but machine-dependent
+ 	 transformations can make better code.  */
++      if (flag_propolice_protection)
++	{
++#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS				\
++			&& XEXP (X, 0) == virtual_stack_vars_rtx	\
++			&& GET_CODE (XEXP (X, 1)) == CONST_INT)
++	  rtx y;
++	  if (FRAMEADDR_P (x)) goto win;
++	  for (y=x; y!=0 && GET_CODE (y)==PLUS; y = XEXP (y, 0))
++	    {
++	      if (FRAMEADDR_P (XEXP (y, 0)))
++		XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
++	      if (FRAMEADDR_P (XEXP (y, 1)))
++		XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
++	    }
++	}
+       LEGITIMIZE_ADDRESS (x, oldx, mode, win);
+ 
+       /* PLUS and MULT can appear in special ways
+diff -Naur gcc-2.95.3.orig/gcc/expr.c gcc-2.95.3.ssp/gcc/expr.c
+--- gcc-2.95.3.orig/gcc/expr.c	2001-02-19 14:02:00.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/expr.c	2004-08-30 01:29:21.000000000 +0000
+@@ -41,6 +41,7 @@
+ #include "typeclass.h"
+ #include "defaults.h"
+ #include "toplev.h"
++#include "protector.h"
+ 
+ #define CEIL(x,y) (((x) + (y) - 1) / (y))
+ 
+@@ -1468,7 +1469,7 @@
+ 
+       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
+ 	{
+-	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
++	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len-GET_MODE_SIZE (mode)));
+ 	  data.autinc_from = 1;
+ 	  data.explicit_inc_from = -1;
+ 	}
+@@ -1482,7 +1483,7 @@
+ 	data.from_addr = copy_addr_to_reg (from_addr);
+       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
+ 	{
+-	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
++	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len-GET_MODE_SIZE (mode)));
+ 	  data.autinc_to = 1;
+ 	  data.explicit_inc_to = -1;
+ 	}
+@@ -1600,9 +1601,9 @@
+       MEM_IN_STRUCT_P (from1) = data->from_struct;
+ 
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
+-	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
++	if (data->explicit_inc_to-- < -1) emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
+-	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
++	if (data->explicit_inc_from-- < -1) emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
+ 
+       emit_insn ((*genfun) (to1, from1));
+       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
+@@ -2313,7 +2314,7 @@
+ 
+       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
+ 	{
+-	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
++	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len-GET_MODE_SIZE (mode)));
+ 	  data.autinc_to = 1;
+ 	  data.explicit_inc_to = -1;
+ 	}
+@@ -2383,7 +2384,7 @@
+       MEM_IN_STRUCT_P (to1) = data->to_struct;
+ 
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
+-	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
++	if (data->explicit_inc_to-- < -1) emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
+ 
+       emit_insn ((*genfun) (to1, const0_rtx));
+       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
+@@ -5160,7 +5161,9 @@
+ 	  && GET_CODE (XEXP (value, 0)) == PLUS
+ 	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
+ 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
+-	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
++	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
++	  && (!flag_propolice_protection
++	      || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
+ 	{
+ 	  rtx temp = expand_binop (GET_MODE (value), binoptab,
+ 				   XEXP (XEXP (value, 0), 0), op2,
+@@ -7075,7 +7078,8 @@
+       /* If adding to a sum including a constant,
+ 	 associate it to put the constant outside.  */
+       if (GET_CODE (op1) == PLUS
+-	  && CONSTANT_P (XEXP (op1, 1)))
++	  && CONSTANT_P (XEXP (op1, 1))
++	  && !(flag_propolice_protection && (contains_fp (op0) || contains_fp (op1))))
+ 	{
+ 	  rtx constant_term = const0_rtx;
+ 
+diff -Naur gcc-2.95.3.orig/gcc/flags.h gcc-2.95.3.ssp/gcc/flags.h
+--- gcc-2.95.3.orig/gcc/flags.h	1999-05-18 01:05:04.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/flags.h	2004-08-30 01:29:21.000000000 +0000
+@@ -538,3 +538,12 @@
+    string identifying the compiler.  */
+ 
+ extern int flag_no_ident;
++
++/* Nonzero means use propolice as a stack protection method */
++
++extern int flag_propolice_protection;
++extern int flag_stack_protection;
++
++/* Warn when not issuing stack smashing protection for some reason */
++
++extern int warn_stack_protector;
+diff -Naur gcc-2.95.3.orig/gcc/function.c gcc-2.95.3.ssp/gcc/function.c
+--- gcc-2.95.3.orig/gcc/function.c	2001-01-25 14:03:15.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/function.c	2004-08-30 01:29:21.000000000 +0000
+@@ -58,6 +58,7 @@
+ #include "obstack.h"
+ #include "toplev.h"
+ #include "hash.h"
++#include "protector.h"
+ 
+ #ifndef TRAMPOLINE_ALIGNMENT
+ #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
+@@ -430,6 +431,8 @@
+   /* The size of the slot, including extra space for alignment.  This
+      info is for combine_temp_slots.  */
+   HOST_WIDE_INT full_size;
++  /* Boundary mark of a character array and the others. This info is for propolice */
++  int boundary_mark;
+ };
+ 
+ /* List of all temporaries allocated, both available and in use.  */
+@@ -449,6 +452,11 @@
+    until no longer needed.  CLEANUP_POINT_EXPRs define the lifetime
+    of TARGET_EXPRs.  */
+ int target_temp_slot_level;
++
++/* Current boundary mark for character arrays.  */
++
++int temp_boundary_mark;
++
+ 

+ /* This structure is used to record MEMs or pseudos used to replace VAR, any
+    SUBREGs of VAR, and any MEMs containing VAR as an address.  We need to
+@@ -917,7 +925,8 @@
+    with this flag.  KEEP is 2 if we allocate a longer term temporary,
+    whose lifetime is controlled by CLEANUP_POINT_EXPRs.  KEEP is 3
+    if we are to allocate something at an inner level to be treated as
+-   a variable in the block (e.g., a SAVE_EXPR).  
++   a variable in the block (e.g., a SAVE_EXPR).
++   KEEP is 5 if we allocate a place to return structure.
+ 
+    TYPE is the type that will be used for the stack slot.  */
+ 
+@@ -931,6 +940,8 @@
+   int align;
+   int alias_set;
+   struct temp_slot *p, *best_p = 0;
++  int char_array = (flag_propolice_protection
++		    && keep == 1 && search_string_def (type));
+ 
+   /* If SIZE is -1 it means that somebody tried to allocate a temporary
+      of a variable size.  */
+@@ -963,7 +974,8 @@
+ 	&& (!flag_strict_aliasing
+ 	    || (alias_set && p->alias_set == alias_set))
+ 	&& (best_p == 0 || best_p->size > p->size
+-	    || (best_p->size == p->size && best_p->align > p->align)))
++	    || (best_p->size == p->size && best_p->align > p->align))
++	&& (! char_array || p->boundary_mark != 0))
+       {
+ 	if (p->align == align && p->size == size)
+ 	  {
+@@ -1001,6 +1013,7 @@
+ 	      p->align = best_p->align;
+ 	      p->address = 0;
+ 	      p->rtl_expr = 0;
++	      p->boundary_mark = best_p->boundary_mark;
+ 	      p->next = temp_slots;
+ 	      temp_slots = p;
+ 
+@@ -1062,6 +1075,7 @@
+       p->full_size = frame_offset - frame_offset_old;
+ #endif
+       p->address = 0;
++      p->boundary_mark = char_array?++temp_boundary_mark:0;
+       p->next = temp_slots;
+       temp_slots = p;
+     }
+@@ -1186,14 +1200,16 @@
+ 	    int delete_q = 0;
+ 	    if (! q->in_use && GET_MODE (q->slot) == BLKmode)
+ 	      {
+-		if (p->base_offset + p->full_size == q->base_offset)
++		if (p->base_offset + p->full_size == q->base_offset &&
++		    p->boundary_mark == q->boundary_mark)
+ 		  {
+ 		    /* Q comes after P; combine Q into P.  */
+ 		    p->size += q->size;
+ 		    p->full_size += q->full_size;
+ 		    delete_q = 1;
+ 		  }
+-		else if (q->base_offset + q->full_size == p->base_offset)
++		else if (q->base_offset + q->full_size == p->base_offset &&
++			 p->boundary_mark == q->boundary_mark)
+ 		  {
+ 		    /* P comes after Q; combine P into Q.  */
+ 		    q->size += p->size;
+@@ -1702,7 +1718,7 @@
+       if (regno < max_parm_reg)
+ 	new = parm_reg_stack_loc[regno];
+       if (new == 0)
+-	new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
++	new = assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
+     }
+ 
+   PUT_MODE (reg, decl_mode);
+@@ -3860,7 +3876,8 @@
+ 		 constant with that register.  */
+ 	      temp = gen_reg_rtx (Pmode);
+ 	      XEXP (x, 0) = new;
+-	      if (validate_change (object, &XEXP (x, 1), temp, 0))
++	      if (validate_change (object, &XEXP (x, 1), temp, 0)
++		  && ! flag_propolice_protection)
+ 		emit_insn_before (gen_move_insn (temp, new_offset), object);
+ 	      else
+ 		{
+diff -Naur gcc-2.95.3.orig/gcc/gcse.c gcc-2.95.3.ssp/gcc/gcse.c
+--- gcc-2.95.3.orig/gcc/gcse.c	1999-10-16 21:20:32.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/gcse.c	2004-08-30 01:29:21.000000000 +0000
+@@ -3718,7 +3718,7 @@
+       /* Find an assignment that sets reg_used and is available
+ 	 at the start of the block.  */
+       set = find_avail_set (regno, insn);
+-      if (! set)
++      if (! set || set->expr->volatil)
+ 	continue;
+   
+       pat = set->expr;
+diff -Naur gcc-2.95.3.orig/gcc/integrate.c gcc-2.95.3.ssp/gcc/integrate.c
+--- gcc-2.95.3.orig/gcc/integrate.c	1999-04-25 23:35:12.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/integrate.c	2004-08-30 01:29:21.000000000 +0000
+@@ -2270,6 +2270,8 @@
+ 	}
+       /* These args would always appear unused, if not for this.  */
+       TREE_USED (d) = 1;
++      if (flag_propolice_protection && TREE_CODE (d) == VAR_DECL)
++	DECL_INLINE (d) = 1;
+ 
+       if (DECL_LANG_SPECIFIC (d))
+ 	copy_lang_decl (d);
+@@ -2393,6 +2395,10 @@
+ 
+ 	      seq = gen_sequence ();
+ 	      end_sequence ();
++#ifdef FRAME_GROWS_DOWNWARD
++	      if (flag_propolice_protection && GET_CODE (seq) == SET)
++		RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
++#endif
+ 	      emit_insn_after (seq, map->insns_at_start);
+ 	      return temp;
+ 	    }
+diff -Naur gcc-2.95.3.orig/gcc/jump.c gcc-2.95.3.ssp/gcc/jump.c
+--- gcc-2.95.3.orig/gcc/jump.c	1999-10-21 06:24:03.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/jump.c	2004-08-30 01:29:21.000000000 +0000
+@@ -65,6 +65,7 @@
+ #include "real.h"
+ #include "except.h"
+ #include "toplev.h"
++#include "protector.h"
+ 
+ /* ??? Eventually must record somehow the labels used by jumps
+    from nested functions.  */
+@@ -2521,6 +2522,35 @@
+       insn = PREV_INSN (insn);
+     }
+ 
++  /* If the NOTE_INSN_FUNCTION_END is generated by the stack protector,
++     skip to the label insn before protector's epilogue insns.  */
++  if (flag_propolice_protection
++      && insn != NULL_RTX
++      && GET_CODE (insn) == NOTE
++      && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
++      && NOTE_SOURCE_FILE (insn)
++      && strcmp (NOTE_SOURCE_FILE (insn), SSP_DUMMY_FILE) == 0)
++    {
++      /* found ssp handler and skip to the call insn.  */
++      for (; insn != NULL_RTX; insn = PREV_INSN (insn))
++	{
++	  if (GET_CODE (insn) == CALL_INSN)
++	    break;
++	}
++
++      /* skip to the insn before jump insn.  */
++      for (; insn != NULL_RTX; insn = PREV_INSN (insn))
++	{
++	  if (GET_CODE (insn) == JUMP_INSN)
++	    {
++	      insn = PREV_INSN (insn);
++	      return calculate_can_reach_end (insn, check_deleted,
++					      delete_final_note);
++	    }
++	}
++      return 0;
++    }
++
+   /* See if we backed up to the appropriate type of note.  */
+   if (insn != NULL_RTX
+       && GET_CODE (insn) == NOTE
+diff -Naur gcc-2.95.3.orig/gcc/libgcc2.c gcc-2.95.3.ssp/gcc/libgcc2.c
+--- gcc-2.95.3.orig/gcc/libgcc2.c	1999-06-11 03:11:43.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/libgcc2.c	2004-08-30 01:29:21.000000000 +0000
+@@ -4012,3 +4012,104 @@
+   __terminate ();
+ }
+ #endif
++

++#ifdef L_stack_smash_handler
++#ifndef _LIBC_PROVIDES_SSP_
++#include <stdio.h>
++#include <string.h>
++#include <fcntl.h>
++#include <unistd.h>
++
++#ifdef _POSIX_SOURCE
++#include <signal.h>
++#endif
++
++#if defined(HAVE_SYSLOG)
++#include <sys/types.h>
++#include <sys/socket.h>
++#include <sys/un.h>
++
++#include <sys/syslog.h>
++#ifndef _PATH_LOG
++#define _PATH_LOG "/dev/log"
++#endif
++#endif
++
++long __guard[8] = {0,0,0,0,0,0,0,0};
++static void __guard_setup (void) __attribute__ ((constructor)) ;
++static void __guard_setup (void)
++{
++  int fd;
++  if (__guard[0]!=0) return;
++  fd = open ("/dev/urandom", 0);
++  if (fd != -1) {
++    ssize_t size = read (fd, (char*)&__guard, sizeof(__guard));
++    close (fd) ;
++    if (size == sizeof(__guard)) return;
++  }
++  /* If a random generator can't be used, the protector switches the guard
++     to the "terminator canary" */
++  ((char*)__guard)[0] = 0; ((char*)__guard)[1] = 0;
++  ((char*)__guard)[2] = '\n'; ((char*)__guard)[3] = 255;
++}
++void __stack_smash_handler (char func[], int damaged ATTRIBUTE_UNUSED)
++{
++#if defined (__GNU_LIBRARY__)
++  extern char * __progname;
++#endif
++  const char message[] = ": stack smashing attack in function ";
++  int bufsz = 256, len;
++  char buf[bufsz];
++#if defined(HAVE_SYSLOG)
++  int LogFile;
++  struct sockaddr_un SyslogAddr;  /* AF_UNIX address of local logger */
++#endif
++#ifdef _POSIX_SOURCE
++  {
++    sigset_t mask;
++    sigfillset(&mask);
++    sigdelset(&mask, SIGABRT);	/* Block all signal handlers */
++    sigprocmask(SIG_BLOCK, &mask, NULL); /* except SIGABRT */
++  }
++#endif
++
++  strcpy(buf, "<2>"); len=3;	/* send LOG_CRIT */
++#if defined (__GNU_LIBRARY__)
++  strncat(buf, __progname, bufsz-len-1); len = strlen(buf);
++#endif
++  if (bufsz>len) {strncat(buf, message, bufsz-len-1); len = strlen(buf);}
++  if (bufsz>len) {strncat(buf, func, bufsz-len-1); len = strlen(buf);}
++
++  /* print error message */
++  write (STDERR_FILENO, buf+3, len-3);
++#if defined(HAVE_SYSLOG)
++  if ((LogFile = socket(AF_UNIX, SOCK_DGRAM, 0)) != -1) {
++
++    /*
++     * Send "found" message to the "/dev/log" path
++     */
++    SyslogAddr.sun_family = AF_UNIX;
++    (void)strncpy(SyslogAddr.sun_path, _PATH_LOG,
++		  sizeof(SyslogAddr.sun_path) - 1);
++    SyslogAddr.sun_path[sizeof(SyslogAddr.sun_path) - 1] = '\0';
++    sendto(LogFile, buf, len, 0, (struct sockaddr *)&SyslogAddr,
++	   sizeof(SyslogAddr));
++  }
++#endif
++
++#ifdef _POSIX_SOURCE
++  { /* Make sure the default handler is associated with SIGABRT */
++    struct sigaction sa;
++    
++    memset(&sa, 0, sizeof(struct sigaction));
++    sigfillset(&sa.sa_mask);	/* Block all signals */
++    sa.sa_flags = 0;
++    sa.sa_handler = SIG_DFL;
++    sigaction(SIGABRT, &sa, NULL);
++    (void)kill(getpid(), SIGABRT);
++  }
++#endif
++  _exit(127);
++}
++#endif
++#endif
+diff -Naur gcc-2.95.3.orig/gcc/loop.c gcc-2.95.3.ssp/gcc/loop.c
+--- gcc-2.95.3.orig/gcc/loop.c	2001-01-25 14:03:18.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/loop.c	2004-08-30 01:29:22.000000000 +0000
+@@ -6201,6 +6201,14 @@
+   if (GET_CODE (*mult_val) == USE)
+     *mult_val = XEXP (*mult_val, 0);
+ 
++#ifndef FRAME_GROWS_DOWNWARD
++  if (flag_propolice_protection
++      && GET_CODE (*add_val) == PLUS
++      && (XEXP (*add_val, 0) == frame_pointer_rtx
++	  || XEXP (*add_val, 1) == frame_pointer_rtx))
++    return 0;
++#endif
++
+   if (is_addr)
+     {
+ #ifdef ADDRESS_COST
+diff -Naur gcc-2.95.3.orig/gcc/optabs.c gcc-2.95.3.ssp/gcc/optabs.c
+--- gcc-2.95.3.orig/gcc/optabs.c	2001-01-25 14:03:19.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/optabs.c	2004-08-30 01:29:22.000000000 +0000
+@@ -772,6 +772,25 @@
+   if (target)
+     target = protect_from_queue (target, 1);
+ 
++  if (flag_propolice_protection
++      && binoptab->code == PLUS
++      && op0 == virtual_stack_vars_rtx
++      && GET_CODE(op1) == CONST_INT)
++    {
++      int icode = (int) binoptab->handlers[(int) mode].insn_code;
++      if (target)
++	temp = target;
++      else
++	temp = gen_reg_rtx (mode);
++
++      if (! (*insn_operand_predicate[icode][0]) (temp, mode))
++	temp = gen_reg_rtx (mode);
++
++      emit_insn (gen_rtx_SET (VOIDmode, temp,
++			      gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
++      return temp;
++    }
++
+   if (flag_force_mem)
+     {
+       op0 = force_not_mem (op0);
+diff -Naur gcc-2.95.3.orig/gcc/protector.c gcc-2.95.3.ssp/gcc/protector.c
+--- gcc-2.95.3.orig/gcc/protector.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/protector.c	2004-05-17 00:30:52.000000000 +0000
+@@ -0,0 +1,2645 @@
++/* RTL buffer overflow protection function for GNU C compiler
++   Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
++
++This file is part of GNU CC.
++
++GNU CC is free software; you can redistribute it and/or modify
++it under the terms of the GNU General Public License as published by
++the Free Software Foundation; either version 2, or (at your option)
++any later version.
++
++GNU CC is distributed in the hope that it will be useful,
++but WITHOUT ANY WARRANTY; without even the implied warranty of
++MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
++GNU General Public License for more details.
++
++You should have received a copy of the GNU General Public License
++along with GNU CC; see the file COPYING.  If not, write to
++the Free Software Foundation, 59 Temple Place - Suite 330,
++Boston, MA 02111-1307, USA.  */
++
++#include "config.h"
++#include "system.h"
++#include "machmode.h"
++
++#include "rtl.h"
++#include "tree.h"
++#include "regs.h"
++#include "flags.h"
++#include "insn-config.h"
++#include "insn-flags.h"
++#include "expr.h"
++#include "output.h"
++#include "recog.h"
++#include "hard-reg-set.h"
++#include "real.h"
++#include "except.h"
++#include "function.h"
++#include "toplev.h"
++#include "conditions.h"
++#include "insn-attr.h"
++#include "c-tree.h"
++#include "protector.h"
++
++
++rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int));
++
++
++/* Warn when not issuing stack smashing protection for some reason */
++int warn_stack_protector;
++
++/* Round a value to the lowest integer less than it that is a multiple of
++   the required alignment.  Avoid using division in case the value is
++   negative.  Assume the alignment is a power of two.  */
++#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
++
++/* Similar, but round to the next highest integer that meets the
++   alignment.  */
++#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
++
++
++/* Nonzero means use propolice as a stack protection method */
++extern int flag_propolice_protection;
++
++/* This file contains several memory arrangement functions to protect
++   the return address and the frame pointer of the stack
++   from a stack-smashing attack. It also
++   provides the function that protects pointer variables. */
++
++/* Nonzero if function being compiled can define string buffers that may be
++   damaged by the stack-smash attack */
++static int current_function_defines_vulnerable_string;
++static int current_function_defines_short_string;
++static int current_function_has_variable_string;
++static int current_function_defines_vsized_array;
++static int current_function_is_inlinable;
++static int is_array;
++
++static rtx guard_area, _guard;
++static rtx function_first_insn, prologue_insert_point;
++static rtx debuginsn;
++
++/*  */
++static HOST_WIDE_INT sweep_frame_offset;
++static HOST_WIDE_INT push_allocated_offset = 0;
++static HOST_WIDE_INT push_frame_offset = 0;
++static int saved_cse_not_expected = 0;
++
++static int search_string_from_argsandvars PARAMS ((int caller));
++static int search_string_from_local_vars PARAMS ((tree block));
++static int search_pointer_def PARAMS ((tree names));
++static int search_func_pointer PARAMS ((tree type, int mark));
++static void reset_used_flags_for_insns PARAMS ((rtx insn));
++static void reset_used_flags_for_decls PARAMS ((tree block));
++static void reset_used_flags_of_plus PARAMS ((rtx x));
++static void rtl_prologue PARAMS ((rtx insn));
++static void rtl_epilogue PARAMS ((rtx fnlastinsn));
++static void arrange_var_order PARAMS ((tree blocks));
++static void copy_args_for_protection PARAMS ((void));
++static void sweep_string_variable PARAMS ((rtx sweep_var, HOST_WIDE_INT var_size));
++static void sweep_string_in_decls PARAMS ((tree block, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
++static void sweep_string_in_args PARAMS ((tree parms, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
++static void sweep_string_use_of_insns PARAMS ((rtx insn, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
++static void sweep_string_in_operand PARAMS ((rtx insn, rtx *loc, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
++static void move_arg_location PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size));
++static void change_arg_use_of_insns PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT size));
++static void change_arg_use_in_operand PARAMS ((rtx x, rtx orig, rtx new, HOST_WIDE_INT size));
++static void expand_value_return PARAMS ((rtx val));
++static int  replace_return_reg PARAMS ((rtx insn, rtx return_save));
++static void validate_insns_of_varrefs PARAMS ((rtx insn));
++static void validate_operand_of_varrefs PARAMS ((rtx insn, rtx *loc));
++
++#ifndef SUSPICIOUS_BUF_SIZE
++#define SUSPICIOUS_BUF_SIZE 8
++#endif
++
++#define AUTO_BASEPTR(X) \
++  (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
++#define AUTO_OFFSET(X) \
++  (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
++#undef PARM_PASSED_IN_MEMORY
++#define PARM_PASSED_IN_MEMORY(PARM) \
++ (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
++#define VIRTUAL_STACK_VARS_P(X) \
++ ((X) == virtual_stack_vars_rtx || (GET_CODE (X) == REG && (X)->used))
++
++
++
++void
++prepare_stack_protection (inlinable)
++     int inlinable;
++{
++  tree blocks = DECL_INITIAL (current_function_decl);
++  current_function_is_inlinable = inlinable && !flag_no_inline;
++  push_frame_offset = push_allocated_offset = 0;
++  saved_cse_not_expected = 0;
++
++  /*
++    skip the protection if the function has no block or it is an inline function
++  */
++  if (current_function_is_inlinable) validate_insns_of_varrefs (get_insns ());
++  if (! blocks || current_function_is_inlinable) return;
++
++  current_function_defines_vulnerable_string = search_string_from_argsandvars (0);
++
++  if (current_function_defines_vulnerable_string
++      || flag_stack_protection)
++    {
++      HOST_WIDE_INT offset;
++      function_first_insn = get_insns ();
++
++      if (current_function_contains_functions) {
++	  if (warn_stack_protector)
++             warning ("not protecting function: it contains functions");
++	  return;
++      }
++
++      /* Initialize recognition, indicating that volatile is OK.  */
++      init_recog ();
++
++      sweep_frame_offset = 0;
++	
++#ifdef STACK_GROWS_DOWNWARD
++      /*
++	frame_offset: offset to end of allocated area of stack frame.
++	 It is defined in the function.c
++      */
++
++      /* the location must be before buffers */
++      guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
++      PUT_MODE (guard_area, GUARD_m);
++      MEM_VOLATILE_P (guard_area) = 1;
++
++#ifndef FRAME_GROWS_DOWNWARD
++      sweep_frame_offset = frame_offset;
++#endif
++
++      /* For making room for guard value, scan all insns and fix the offset address
++	 of the variable that is based on frame pointer.
++	 Scan all declarations of variables and fix the offset address of the variable that
++	 is based on the frame pointer */
++      sweep_string_variable (guard_area, UNITS_PER_GUARD);
++
++	
++      /* the location of guard area moves to the beginning of stack frame */
++      if ((offset = AUTO_OFFSET(XEXP (guard_area, 0))))
++	XEXP (XEXP (guard_area, 0), 1) = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
++
++
++      /* Insert prologue rtl instructions */
++      rtl_prologue (function_first_insn);
++
++      if (! current_function_has_variable_string)
++	{
++	  /* Generate argument saving instruction */
++	  copy_args_for_protection ();
++
++#ifndef FRAME_GROWS_DOWNWARD
++	  /* If frame grows upward, character string copied from an arg stays top of
++	     the guard variable. So sweep the guard variable again */
++	  sweep_frame_offset = CEIL_ROUND (frame_offset, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
++	  sweep_string_variable (guard_area, UNITS_PER_GUARD);
++#endif
++	}
++      else if (warn_stack_protector)
++	warning ("not protecting variables: it has a variable length buffer");
++#endif
++#ifndef FRAME_GROWS_DOWNWARD
++      if (STARTING_FRAME_OFFSET == 0)
++	{
++	  /* this may be only for alpha */
++	  push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++	  assign_stack_local (BLKmode, push_allocated_offset, -1);
++	  sweep_frame_offset = frame_offset;
++	  sweep_string_variable (const0_rtx, -push_allocated_offset);
++	  sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
++	}
++#endif
++
++      /* Arrange the order of local variables */
++      arrange_var_order (blocks);
++
++#ifdef STACK_GROWS_DOWNWARD
++      /* Insert epilogue rtl instructions */
++      rtl_epilogue (get_last_insn ());
++#endif
++      init_recog_no_volatile ();
++    }
++  else if (current_function_defines_short_string
++	   && warn_stack_protector)
++    warning ("not protecting function: buffer is less than %d bytes long",
++	     SUSPICIOUS_BUF_SIZE);
++}
++
++/*
++  search string from arguments and local variables
++  caller: 0 means call from protector_stack_protection
++          1 means call from push_frame
++*/
++static int
++search_string_from_argsandvars (caller)
++     int caller;
++{
++  tree blocks, parms;
++  int string_p;
++
++  /* saves a latest search result as a cached infomation */
++  static tree __latest_search_decl = 0;
++  static int  __latest_search_result = FALSE;
++
++  if (__latest_search_decl == current_function_decl)
++    return __latest_search_result;
++  else if (caller) return FALSE;
++  __latest_search_decl = current_function_decl;
++  __latest_search_result = TRUE;
++  
++  current_function_defines_short_string = FALSE;
++  current_function_has_variable_string = FALSE;
++  current_function_defines_vsized_array = FALSE;
++
++  /*
++    search a string variable from local variables
++  */
++  blocks = DECL_INITIAL (current_function_decl);
++  string_p = search_string_from_local_vars (blocks);
++
++  if (!current_function_defines_vsized_array && current_function_calls_alloca)
++    {
++      current_function_has_variable_string = TRUE;
++      return TRUE;
++    }
++
++  if (string_p) return TRUE;
++
++#ifdef STACK_GROWS_DOWNWARD
++  /*
++    search a string variable from arguments
++  */
++  parms = DECL_ARGUMENTS (current_function_decl);
++
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    string_p = search_string_def (TREE_TYPE(parms));
++	    if (string_p) return TRUE;
++	  }
++      }
++#endif
++
++  __latest_search_result = FALSE;
++  return FALSE;
++}
++
++
++static int
++search_string_from_local_vars (block)
++     tree block;
++{
++  tree types;
++  int found = FALSE;
++
++  while (block)
++    {
++      types = BLOCK_VARS(block);
++
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable */
++	  /* name: types.decl.name.identifier.id                   */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
++	      && TREE_CODE (types) == VAR_DECL
++	      && ! DECL_ARTIFICIAL (types)
++	      && DECL_RTL (types)
++	      && GET_CODE (DECL_RTL (types)) == MEM)
++	    {
++	      if (search_string_def (TREE_TYPE (types)))
++		{
++		  rtx home = DECL_RTL (types);
++
++		  if (GET_CODE (home) == MEM
++		      && (GET_CODE (XEXP (home, 0)) == MEM
++			  || (GET_CODE (XEXP (home, 0)) == REG
++			      && XEXP (home, 0) != virtual_stack_vars_rtx
++			      && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
++			      && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
++#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
++			      && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
++#endif
++			      )))
++		    /* If the value is indirect by memory or by a register
++		       that isn't the frame pointer
++		       then it means the object is variable-sized and address through
++		       that register or stack slot.  The protection has no way to hide pointer variables
++		       behind the array, so all we can do is staying arguments. */
++		    {
++		      current_function_has_variable_string = TRUE;
++		    }
++		  /* found character array */
++		  found = TRUE;
++		}
++	    }
++
++	  types = TREE_CHAIN(types);
++	}
++
++      if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
++	{
++	  found = TRUE;
++	}
++
++      block = BLOCK_CHAIN (block);
++    }
++    
++  return found;
++}
++
++
++/*
++ * search a character array from the specified type tree
++ */
++int
++search_string_def (type)
++     tree type;
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case ARRAY_TYPE:
++      /* Check if the array is a variable-sized array */
++      if (TYPE_DOMAIN (type) == 0 ||
++	  TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR)
++	current_function_defines_vsized_array = TRUE;
++
++      if (TREE_TYPE (type) == char_type_node
++	  || (TREE_TYPE (type)
++	      && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
++	      && TYPE_PRECISION (TREE_TYPE (type)) == 8))
++	{
++	  /* Check if the string is a variable string */
++	  if (TYPE_DOMAIN (type) == 0 ||
++	      TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR)
++	    return TRUE;
++
++#if SUSPICIOUS_BUF_SIZE > 0
++	  /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE */
++	  if (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1 >= SUSPICIOUS_BUF_SIZE)
++	    return TRUE;
++
++	  current_function_defines_short_string = TRUE;
++#else
++	  return TRUE;
++#endif
++	}
++      
++      /* to protect every functions, sweep any arrays to the frame top */
++      is_array = TRUE;
++
++      return search_string_def(TREE_TYPE(type));
++	
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++      /* Output the name, type, position (in bits), size (in bits) of each
++	 field.  */
++      for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	{
++	  /* Omit here local type decls until we know how to support them. */
++	  if ((TREE_CODE (tem) == TYPE_DECL)
++	      || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
++	    continue;
++
++	  if (search_string_def(TREE_TYPE(tem))) return TRUE;
++	}
++      break;
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++      /* I'm not sure whether OFFSET_TYPE needs this treatment,
++	 so I'll play safe and return 1.  */
++    case OFFSET_TYPE:
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++/*
++ * examine whether the input contains frame pointer addressing
++ */
++int
++contains_fp (op)
++     rtx op;
++{
++  register enum rtx_code code;
++  rtx x;
++  int i, j;
++  const char *fmt;
++
++  x = op;
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case PLUS:
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && CONSTANT_P (XEXP (x, 1)))
++	return TRUE;
++      break;
++
++    default:
++      return FALSE;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (contains_fp (XEXP (x, i))) return TRUE;
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	if (contains_fp (XVECEXP (x, i, j))) return TRUE;
++
++  return FALSE;
++}
++
++
++static int
++search_pointer_def (type)
++     tree type;
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++      /* Output the name, type, position (in bits), size (in bits) of each
++	 field.  */
++      for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	{
++	  /* Omit here local type decls until we know how to support them. */
++	  if ((TREE_CODE (tem) == TYPE_DECL)
++	      || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
++	    continue;
++
++	  if (search_pointer_def (TREE_TYPE(tem))) return TRUE;
++	}
++      break;
++
++    case ARRAY_TYPE:
++      return search_pointer_def (TREE_TYPE(type));
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++      /* I'm not sure whether OFFSET_TYPE needs this treatment,
++	 so I'll play safe and return 1.  */
++    case OFFSET_TYPE:
++      if (TYPE_READONLY (TREE_TYPE (type)))
++	{
++	  int funcp = search_func_pointer (TREE_TYPE (type), 1);
++	  /* Un-mark the type as having been visited already */
++	  search_func_pointer (TREE_TYPE (type), 0);
++	  return funcp;
++	}
++      return TRUE;
++	
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++static int
++search_func_pointer (type, mark)
++     tree type;
++     int mark;
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++	if (TREE_ASM_WRITTEN (type) != mark)
++	  {
++	    /* mark the type as having been visited already */
++	    TREE_ASM_WRITTEN (type) = mark;
++
++	    /* Output the name, type, position (in bits), size (in bits) of
++	       each field.  */
++	    for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	      {
++		/* Omit here local type decls until we know how to support them. */
++		if (TREE_CODE (tem) == FIELD_DECL
++		    && search_func_pointer (TREE_TYPE(tem), mark)) return TRUE;
++	      }
++	  }
++	break;
++
++    case ARRAY_TYPE:
++      return search_func_pointer (TREE_TYPE(type), mark);
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++      /* I'm not sure whether OFFSET_TYPE needs this treatment,
++	 so I'll play safe and return 1.  */
++    case OFFSET_TYPE:
++      return TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE;
++	
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++static void
++reset_used_flags_for_insns (insn)
++     rtx insn;
++{
++  register int i, j;
++  register enum rtx_code code;
++  register const char *format_ptr;
++
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	code = GET_CODE (insn);
++	insn->used = 0;
++	format_ptr = GET_RTX_FORMAT (code);
++
++	for (i = 0; i < GET_RTX_LENGTH (code); i++)
++	  {
++	    switch (*format_ptr++) {
++	    case 'e':
++	      reset_used_flags_of_plus (XEXP (insn, i));
++	      break;
++			
++	    case 'E':
++	      for (j = 0; j < XVECLEN (insn, i); j++)
++		reset_used_flags_of_plus (XVECEXP (insn, i, j));
++	      break;
++	    }
++	  }
++      }
++}
++
++static void
++reset_used_flags_for_decls (block)
++     tree block;
++{
++  tree types;
++  rtx home;
++
++  while (block)
++    {
++      types = BLOCK_VARS(block);
++	
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable and
++	     also skip an global variable */
++	  if (! DECL_EXTERNAL (types))
++	    {
++	      home = DECL_RTL (types);
++	      if (home == 0) goto next;
++
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == PLUS
++		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++		{
++		  XEXP (home, 0)->used = 0;
++		}
++	    }
++	next:
++	  types = TREE_CHAIN(types);
++	}
++
++      reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++/* Clear the USED bits only of type PLUS in X */
++
++static void
++reset_used_flags_of_plus (x)
++     rtx x;
++{
++  register int i, j;
++  register enum rtx_code code;
++  register const char *format_ptr;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  /* These types may be freely shared so we needn't do any resetting
++     for them.  */
++
++  switch (code)
++    {
++    case REG:
++    case QUEUED:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++      return;
++
++    case INSN:
++    case JUMP_INSN:
++    case CALL_INSN:
++    case NOTE:
++    case LABEL_REF:
++    case BARRIER:
++      /* The chain of insns is not being copied.  */
++      return;
++      
++    case PLUS:
++      x->used = 0;
++      break;
++
++    case CALL_PLACEHOLDER:
++      reset_used_flags_for_insns (XEXP (x, 0));
++      reset_used_flags_for_insns (XEXP (x, 1));
++      reset_used_flags_for_insns (XEXP (x, 2));
++      break;
++
++    default:
++      break;
++    }
++
++  format_ptr = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++)
++    {
++      switch (*format_ptr++)
++	{
++	case 'e':
++	  reset_used_flags_of_plus (XEXP (x, i));
++	  break;
++
++	case 'E':
++	  for (j = 0; j < XVECLEN (x, i); j++)
++	    reset_used_flags_of_plus (XVECEXP (x, i, j));
++	  break;
++	}
++    }
++}
++
++
++static void
++rtl_prologue (insn)
++     rtx insn;
++{
++#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
++#undef HAS_INIT_SECTION
++#define HAS_INIT_SECTION
++#endif
++  rtx _val;
++
++
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
++      break;
++  
++#if !defined (HAS_INIT_SECTION)
++  /* If this function is `main', skip a call to `__main'
++     to run guard instruments after global initializers, etc.  */
++  if (DECL_NAME (current_function_decl)
++      && strcmp (IDENTIFIER_POINTER (DECL_NAME (current_function_decl)), "main") == 0
++      && DECL_CONTEXT (current_function_decl) == NULL_TREE)
++    {
++      rtx fbinsn = insn;
++      for (; insn; insn = NEXT_INSN (insn))
++	if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
++	  break;
++      if (insn == 0) insn = fbinsn;
++    }
++#endif
++
++  prologue_insert_point = NEXT_INSN (insn);	/* mark the next insn of FUNCTION_BEG insn */
++		
++  start_sequence ();
++
++  _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
++  emit_move_insn ( guard_area, _guard);
++
++  _val = gen_sequence ();
++  end_sequence ();
++
++  emit_insn_before (_val, prologue_insert_point);
++}
++
++static void
++rtl_epilogue (insn)
++     rtx insn;
++{
++  rtx if_false_label, end_label = 0;
++  rtx _val, dummyend;
++  rtx funcname;
++  tree funcstr;
++  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl)),
++    return_save = 0;
++  int  flag_have_return = FALSE;
++		
++  start_sequence ();
++
++#ifdef HAVE_return
++  if (HAVE_return)
++    {
++      rtx insn;
++      return_label = gen_label_rtx ();
++      
++      for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
++	if (GET_CODE (insn) == JUMP_INSN
++	    && GET_CODE (PATTERN (insn)) == RETURN
++	    && GET_MODE (PATTERN (insn)) == VOIDmode)
++	  {
++	    rtx pat = gen_rtx_SET (VOIDmode,
++				   pc_rtx,
++				   gen_rtx_LABEL_REF (VOIDmode,
++						      return_label));
++	    PATTERN (insn) = pat;
++	    flag_have_return = TRUE;
++	  }
++
++
++      emit_label (return_label);
++    }
++#endif
++
++  if (return_reg
++      && ! (current_function_returns_struct
++	    || current_function_returns_pcc_struct)
++      /* If scalar return value was NOT computed in a pseudo-reg */
++      && ! (GET_CODE (return_reg) == REG
++	    && REGNO (return_reg) >= FIRST_PSEUDO_REGISTER))
++    {
++      return_save = GET_CODE (return_reg)==REG?
++	gen_reg_rtx (GET_MODE (return_reg)):return_reg;
++
++      if (! replace_return_reg (prologue_insert_point, return_save))
++	emit_move_insn (return_save, return_reg);
++    }
++
++  compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, 0, 0);	/* if (guard_area != _guard) */
++
++  if_false_label = gen_label_rtx ();				/* { */
++  emit_jump_insn ( gen_beq(if_false_label));
++
++  /*
++    In the function force_const_mem in varasm.c of egcs-1.1.2-30, there is a 
++    failure to assign the guard_area variable to eax register, which destroys 
++    the return value of the function.
++
++    The BUG preceding comment is an apropriate processes.
++    When the bug is fixed, removes the comment
++  */
++
++  /* generate string for the current function name */
++  funcstr = build_string (strlen(current_function_name)+1, current_function_name);
++  TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);/* = char_array_type_node;*/
++  funcname = output_constant_def (funcstr);
++
++  emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__stack_smash_handler"),
++		     0, VOIDmode, 2,
++                     XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
++
++  /* generate RTL to return from the current function */
++		
++  emit_barrier ();						/* } */
++  emit_label (if_false_label);
++
++  /* generate RTL to return from the current function */
++  if (return_reg)
++    {
++      if (return_save)
++	expand_value_return (return_save);
++
++      /* If returning a structure, arrange to return the address of the value
++	 in a place where debuggers expect to find it.
++
++	 If returning a structure PCC style,
++	 the caller also depends on this value.
++	 And current_function_returns_pcc_struct is not necessarily set.  */
++      else if (current_function_returns_struct
++	       || current_function_returns_pcc_struct)
++	{
++	  rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
++	  tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
++#ifdef FUNCTION_OUTGOING_VALUE
++	  rtx outgoing
++	    = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
++				       current_function_decl);
++#else
++	  rtx outgoing
++	    = FUNCTION_VALUE (build_pointer_type (type),
++			      current_function_decl);
++#endif
++	  
++	  /* Mark this as a function return value so integrate will delete the
++	     assignment and USE below when inlining this function.  */
++	  REG_FUNCTION_VALUE_P (outgoing) = 1;
++
++	  emit_move_insn (outgoing, value_address);
++	  use_variable (outgoing);
++	}
++
++      else if (GET_CODE (return_reg) == REG
++	       && REGNO (return_reg) >= FIRST_PSEUDO_REGISTER) {
++	/* If scalar return value was computed in a pseudo-reg,
++	   copy that to the hard return register.  */
++	emit_move_insn (current_function_return_rtx, return_reg);
++	emit_insn (gen_rtx_USE (VOIDmode, current_function_return_rtx));
++      }
++
++      end_label = gen_label_rtx ();
++      emit_jump (end_label);
++    }
++
++  /* Mark the end of the function body.
++     If control reaches this insn, the function can drop through
++     without returning a value.  */
++  dummyend = emit_note (SSP_DUMMY_FILE, NOTE_INSN_FUNCTION_END);
++  
++  if (end_label)
++    emit_label (end_label);
++
++#ifdef HAVE_return
++  if (HAVE_return && flag_have_return)
++    {
++      emit_jump_insn (gen_return ());
++      emit_barrier ();
++    }
++#endif
++  
++  _val = gen_sequence ();
++  end_sequence ();
++
++  emit_insn_after (_val, insn);
++
++  /* mark NOTE_FUNCTION_END as deleted not to be eliminated.  */
++  INSN_DELETED_P (dummyend) = 1;
++}
++
++
++static void
++arrange_var_order (block)
++     tree block;
++{
++  tree types;
++  HOST_WIDE_INT offset;
++    
++  while (block)
++    {
++      /* arrange the location of character arrays in depth first.  */
++      arrange_var_order (BLOCK_SUBBLOCKS (block));
++
++      types = BLOCK_VARS (block);
++
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable */
++	  /* name: types.decl.assembler_name.id			   */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
++	      && TREE_CODE (types) == VAR_DECL
++	      && ! DECL_ARTIFICIAL (types)
++	      && ! DECL_INLINE (types)	/* don't sweep inlined string */
++	      && DECL_RTL (types)
++	      && GET_CODE (DECL_RTL (types)) == MEM
++	      && GET_MODE (DECL_RTL (types)) == BLKmode)
++	    {
++	      is_array = 0;
++	      if (search_string_def (TREE_TYPE (types))
++		  || (! current_function_defines_vulnerable_string
++		      && is_array))
++		{
++		  rtx home = DECL_RTL (types);
++
++		  if (! (GET_CODE (home) == MEM
++			 && (GET_CODE (XEXP (home, 0)) == MEM
++			     || (GET_CODE (XEXP (home, 0)) == REG
++				 && XEXP (home, 0) != virtual_stack_vars_rtx
++				 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
++				 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
++#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
++				 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
++#endif
++				 ))))
++		    {
++		      /* found a string variable */
++		      HOST_WIDE_INT var_size =
++			((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
++			 / BITS_PER_UNIT);
++
++		      /* confirmed it is BLKmode.  */
++		      int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++		      var_size = CEIL_ROUND (var_size, alignment);
++
++		      /* skip the variable if it is top of the region
++			 specified by sweep_frame_offset */
++		      offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
++		      if (offset == sweep_frame_offset - var_size)
++			sweep_frame_offset -= var_size;
++		      
++		      else if (offset < sweep_frame_offset - var_size)
++			sweep_string_variable (DECL_RTL (types), var_size);
++		    }
++		}
++	    }
++
++	  types = TREE_CHAIN(types);
++	}
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++static void
++copy_args_for_protection (void)
++{
++  tree parms = DECL_ARGUMENTS (current_function_decl);
++  rtx temp_rtx;
++  int idx;
++
++  parms = DECL_ARGUMENTS (current_function_decl);
++  for (idx = 0; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    int string_p;
++
++	    /*
++	      skip arguemnt protection if the last argument is used
++	      for the variable argument
++	    */
++	    /*
++	      tree fntype;
++	      if (TREE_CHAIN (parms) == 0)
++	      {
++	        fntype = TREE_TYPE (current_function_decl);
++
++	        if ((TYPE_ARG_TYPES (fntype) != 0 &&
++	             TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) != void_type_node)
++	             || current_function_varargs)
++	          continue;
++	      }
++	    */
++
++	    string_p = search_string_def (TREE_TYPE(parms));
++
++	    /* check if it is a candidate to move */
++	    if (string_p || search_pointer_def (TREE_TYPE (parms)))
++	      {
++		int arg_size
++		  = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
++		     / BITS_PER_UNIT);
++		
++		start_sequence ();
++
++		if (GET_CODE (DECL_RTL (parms)) == REG)
++		  {
++		    rtx movinsn;
++		    rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
++
++		    /* generate codes for copying the content */
++		    movinsn = emit_move_insn (safe, DECL_RTL (parms));
++		    PATTERN (movinsn)->volatil = 1;	/* avoid register elimination in gcse.c (COPY-PROP)*/
++
++		    change_arg_use_of_insns (prologue_insert_point, DECL_RTL (parms), safe, 0);
++
++		    /* save debugger info */
++		    DECL_INCOMING_RTL (parms) = safe;
++		  }
++
++		else if (GET_CODE (DECL_RTL (parms)) == MEM
++			 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
++		  {
++		    rtx movinsn;
++		    rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
++
++		    /* generate codes for copying the content */
++		    movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
++		    PATTERN (movinsn)->volatil = 1;	/* avoid register elimination in gcse.c (COPY-PROP)*/
++
++		    /* change the addressof information to the newly allocated pseudo register */
++		    emit_move_insn (DECL_RTL (parms), safe);
++
++		    /* save debugger info */
++		    DECL_INCOMING_RTL (parms) = safe;
++		  }
++			
++		else
++		  {
++		    /* declare temporary local variable DECL_NAME (parms) for it */
++		    temp_rtx
++		      = assign_stack_local (DECL_MODE (parms), arg_size,
++					    DECL_MODE (parms) == BLKmode ? -1 : 0);
++		    
++		    MEM_IN_STRUCT_P (temp_rtx) = AGGREGATE_TYPE_P (TREE_TYPE (parms));
++		    MEM_ALIAS_SET (temp_rtx) = get_alias_set (parms);
++
++		    /* move_arg_location may change the contents of
++		       DECL_RTL (parms). to avoid this, copies the contents */
++		    DECL_RTL (parms) = copy_rtx (DECL_RTL (parms));
++
++		    /* generate codes for copying the content */
++		    store_expr (parms, temp_rtx, 0);
++
++		    /* change the reference for each instructions */
++		    move_arg_location (prologue_insert_point, DECL_RTL (parms),
++				       temp_rtx, arg_size);
++
++		    /* change the location of parms variable */
++		    DECL_RTL (parms) = temp_rtx;
++
++		    /* change debugger info */
++		    DECL_INCOMING_RTL (parms) = temp_rtx;
++		  }
++
++		emit_insn_before (gen_sequence (), prologue_insert_point);
++		end_sequence ();
++
++#ifdef FRAME_GROWS_DOWNWARD
++		/* process the string argument */
++		if (string_p && DECL_MODE (parms) == BLKmode)
++		  {
++		    int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++		    arg_size = CEIL_ROUND (arg_size, alignment);
++			
++		    /* change the reference for each instructions */
++		    sweep_string_variable (DECL_RTL (parms), arg_size);
++		  }
++#endif
++	      }
++	  }
++      }
++}
++
++
++/*
++  sweep a string variable to the local variable addressed by sweep_frame_offset, that is
++  a last position of string variables.
++*/
++static void
++sweep_string_variable (sweep_var, var_size)
++     rtx sweep_var;
++     HOST_WIDE_INT var_size;
++{
++  HOST_WIDE_INT sweep_offset;
++
++  switch (GET_CODE (sweep_var))
++    {
++    case MEM:
++      if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
++	  && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
++	return;
++      sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
++      break;
++    case CONST_INT:
++      sweep_offset = INTVAL (sweep_var);
++      break;
++    default:
++      abort ();
++    }
++
++  /* scan all declarations of variables and fix the offset address of
++     the variable based on the frame pointer */
++  sweep_string_in_decls (DECL_INITIAL (current_function_decl), sweep_offset, var_size);
++
++  /* scan all argument variable and fix the offset address based on the frame pointer */
++  sweep_string_in_args (DECL_ARGUMENTS (current_function_decl), sweep_offset, var_size);
++
++  /* For making room for sweep variable, scan all insns and fix the offset address
++     of the variable that is based on frame pointer*/
++  sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
++
++
++  /* Clear all the USED bits in operands of all insns and declarations of local vars */
++  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
++  reset_used_flags_for_insns (function_first_insn);
++
++  sweep_frame_offset -= var_size;
++}
++
++
++
++/*
++  move an argument to the local variable addressed by frame_offset
++*/
++static void
++move_arg_location (insn, orig, new, var_size)
++     rtx  insn, orig, new;
++     HOST_WIDE_INT var_size;
++{
++  /* For making room for sweep variable, scan all insns and fix the offset address
++     of the variable that is based on frame pointer*/
++  change_arg_use_of_insns (insn, orig, new, var_size);
++
++
++  /* Clear all the USED bits in operands of all insns and declarations of local vars */
++  reset_used_flags_for_insns (insn);
++}
++
++
++static void
++sweep_string_in_decls (block, sweep_offset, sweep_size)
++     tree block;
++     HOST_WIDE_INT sweep_offset, sweep_size;
++{
++  tree types;
++  HOST_WIDE_INT offset;
++  rtx home;
++
++  while (block)
++    {
++      types = BLOCK_VARS(block);
++	
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable and
++	     also skip an global variable */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
++	    
++	    home = DECL_RTL (types);
++	    if (home == 0) goto next;
++
++	    /* process for static local variable */
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
++	      goto next;
++
++	    if (GET_CODE (home) == MEM
++		&& XEXP (home, 0) == virtual_stack_vars_rtx)
++	      {
++		offset = 0;
++		
++		/* the operand related to the sweep variable */
++		if (sweep_offset <= offset
++		    && offset < sweep_offset + sweep_size)
++		  {
++		    offset = sweep_frame_offset - sweep_size - sweep_offset;
++
++		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, offset);
++		    XEXP (home, 0)->used = 1;
++		  }
++		else if (sweep_offset <= offset
++			 && offset < sweep_frame_offset)
++		  {	/* the rest of variables under sweep_frame_offset, so shift the location */
++		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, -sweep_size);
++		    XEXP (home, 0)->used = 1;
++		  }
++	      }
++		
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == MEM)
++	      {
++		/* process for dynamically allocated aray */
++		home = XEXP (home, 0);
++	      }
++		
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == PLUS
++		&& XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
++		&& GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++	      {
++		if (! XEXP (home, 0)->used)
++		  {
++		    offset = AUTO_OFFSET(XEXP (home, 0));
++
++		    /* the operand related to the sweep variable */
++		    if (sweep_offset <= offset
++			&& offset < sweep_offset + sweep_size)
++		      {
++
++			offset += sweep_frame_offset - sweep_size - sweep_offset;
++			XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++			/* mark */
++			XEXP (home, 0)->used = 1;
++		      }
++		    else if (sweep_offset <= offset
++			     && offset < sweep_frame_offset)
++		      {	/* the rest of variables under sweep_frame_offset,
++			   so shift the location */
++
++			XEXP (XEXP (home, 0), 1)
++			  = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++
++			/* mark */
++			XEXP (home, 0)->used = 1;
++		      }
++		  }
++	      }
++
++	  }
++	next:
++	  types = TREE_CHAIN(types);
++	}
++
++      sweep_string_in_decls (BLOCK_SUBBLOCKS (block), sweep_offset, sweep_size);
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++static void
++sweep_string_in_args (parms, sweep_offset, sweep_size)
++     tree parms;
++     HOST_WIDE_INT sweep_offset, sweep_size;
++{
++  rtx home;
++  HOST_WIDE_INT offset;
++    
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    home = DECL_INCOMING_RTL (parms);
++
++	    if (XEXP (home, 0)->used) continue;
++
++	    offset = AUTO_OFFSET(XEXP (home, 0));
++
++	    /* the operand related to the sweep variable */
++	    if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
++	      {
++		if (sweep_offset <= offset
++		    && offset < sweep_offset + sweep_size)
++		  {
++		    offset += sweep_frame_offset - sweep_size - sweep_offset;
++		    XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		    /* mark */
++		    XEXP (home, 0)->used = 1;
++		  }
++		else if (sweep_offset <= offset
++			 && offset < sweep_frame_offset)
++		  {	/* the rest of variables under sweep_frame_offset, so shift the location */
++		    XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++
++		    /* mark */
++		    XEXP (home, 0)->used = 1;
++		  }
++	      }
++	  }
++      }
++}
++
++
++static int has_virtual_reg;
++
++static void
++sweep_string_use_of_insns (insn, sweep_offset, sweep_size)
++     rtx insn;
++     HOST_WIDE_INT sweep_offset, sweep_size;
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	has_virtual_reg = FALSE;
++	sweep_string_in_operand (insn, &PATTERN (insn), sweep_offset, sweep_size);
++      }
++}
++
++
++static void
++sweep_string_in_operand (insn, loc, sweep_offset, sweep_size)
++     rtx insn, *loc;
++     HOST_WIDE_INT sweep_offset, sweep_size;
++{
++  register rtx x = *loc;
++  register enum rtx_code code;
++  int i, j, k = 0;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case ADDRESSOF:
++      return;
++	    
++    case REG:
++      if (x == virtual_incoming_args_rtx
++	  || x == virtual_stack_vars_rtx
++	  || x == virtual_stack_dynamic_rtx
++	  || x == virtual_outgoing_args_rtx
++	  || x == virtual_cfa_rtx)
++	has_virtual_reg = TRUE;
++      return;
++      
++    case SET:
++      /*
++	skip setjmp setup insn and setjmp restore insn
++	Example:
++	(set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
++	(set (virtual_stack_vars_rtx) (REG))
++      */
++      if (GET_CODE (XEXP (x, 0)) == MEM
++	  && XEXP (x, 1) == virtual_stack_vars_rtx)
++	return;
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == REG)
++	return;
++      break;
++	    
++    case PLUS:
++      /* Handle typical case of frame register plus constant.  */
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && CONSTANT_P (XEXP (x, 1)))
++	{
++	  if (x->used) goto single_use_of_virtual_reg;
++	  
++	  offset = AUTO_OFFSET(x);
++	  if (RTX_INTEGRATED_P (x)) k = -1; /* for inline base ptr */
++
++	  /* the operand related to the sweep variable */
++	  if (sweep_offset <= offset + k
++	      && offset + k < sweep_offset + sweep_size)
++	    {
++	      offset += sweep_frame_offset - sweep_size - sweep_offset;
++
++	      XEXP (x, 0) = virtual_stack_vars_rtx;
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	      x->used = 1;
++	    }
++	  else if (sweep_offset <= offset + k
++		   && offset + k < sweep_frame_offset)
++	    {	/* the rest of variables under sweep_frame_offset, so shift the location */
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++	      x->used = 1;
++	    }
++	  
++	single_use_of_virtual_reg:
++	  if (has_virtual_reg) {
++	    /* excerpt from insn_invalid_p in recog.c */
++	    int icode = recog_memoized (insn);
++
++	    if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
++	      {
++		rtx temp, seq;
++		
++		start_sequence ();
++		temp = force_operand (x, NULL_RTX);
++		seq = get_insns ();
++		end_sequence ();
++		
++		emit_insns_before (seq, insn);
++		if (! validate_change (insn, loc, temp, 0)
++		    && ! validate_replace_rtx (x, temp, insn))
++		  fatal_insn ("sweep_string_in_operand", insn);
++	      }
++	  }
++
++	  has_virtual_reg = TRUE;
++	  return;
++	}
++
++#ifdef FRAME_GROWS_DOWNWARD
++      /*
++	special case of frame register plus constant given by reg.
++	*/
++      else if (XEXP (x, 0) == virtual_stack_vars_rtx
++	       && GET_CODE (XEXP (x, 1)) == REG)
++	fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
++#endif
++
++      /*
++	process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      sweep_string_use_of_insns (XEXP (x, 0), sweep_offset, sweep_size);
++      sweep_string_use_of_insns (XEXP (x, 1), sweep_offset, sweep_size);
++      sweep_string_use_of_insns (XEXP (x, 2), sweep_offset, sweep_size);
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	/*
++	  virtual_stack_vars_rtx without offset
++	  Example:
++	    (set (reg:SI xx) (reg:SI 78))
++	    (set (reg:SI xx) (MEM (reg:SI 78)))
++	*/
++	if (XEXP (x, i) == virtual_stack_vars_rtx)
++	  fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
++	sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
++}   
++
++
++/*
++  change a argument variable to the local variable addressed by the "new" variable.
++*/
++static void
++change_arg_use_of_insns (insn, orig, new, size)
++     rtx insn, orig, new;
++     HOST_WIDE_INT size;
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	change_arg_use_in_operand (PATTERN (insn), orig, new, size);
++      }
++}
++
++
++static void
++change_arg_use_in_operand (x, orig, new, size)
++     rtx x, orig, new;
++     HOST_WIDE_INT size;
++{
++  register enum rtx_code code;
++  int i, j;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return;
++
++    case MEM:
++      /* Handle special case of MEM (incoming_args)  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 0) == virtual_incoming_args_rtx)
++	{
++	  offset = 0;
++
++	  /* the operand related to the sweep variable */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
++
++	    offset = AUTO_OFFSET(XEXP (new, 0))
++	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
++
++	    XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
++	    XEXP (x, 0)->used = 1;
++
++	    return;
++	  }
++	}
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (GET_CODE (orig) == MEM /* skip if orig is register variable in the optimization */
++	  && XEXP (x, 0) == virtual_incoming_args_rtx && CONSTANT_P (XEXP (x, 1))
++	  && ! x->used)
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  /* the operand related to the sweep variable */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
++
++	    offset = AUTO_OFFSET(XEXP (new, 0))
++	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
++
++	    XEXP (x, 0) = virtual_stack_vars_rtx;
++	    XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	    x->used = 1;
++
++	    return;
++	  }
++
++	  /*
++	    process further subtree:
++	    Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	    (const_int 5))
++	  */
++	}
++      break;
++
++    case SET:
++      /* Handle special case of "set (REG or MEM) (incoming_args)".
++	 It means that the the address of the 1st argument is stored. */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 1) == virtual_incoming_args_rtx)
++	{
++	  offset = 0;
++
++	  /* the operand related to the sweep variable */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
++
++	    offset = AUTO_OFFSET(XEXP (new, 0))
++	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
++
++	    XEXP (x, 1) = plus_constant (virtual_stack_vars_rtx, offset);
++	    XEXP (x, 1)->used = 1;
++
++	  return;
++	  }
++	}
++      break;
++
++    case CALL_PLACEHOLDER:
++      change_arg_use_of_insns (XEXP (x, 0), orig, new, size);
++      change_arg_use_of_insns (XEXP (x, 1), orig, new, size);
++      change_arg_use_of_insns (XEXP (x, 2), orig, new, size);
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (XEXP (x, i) == orig)
++	  {
++	    XEXP (x, i) = new;
++	    continue;
++	  }
++	change_arg_use_in_operand (XEXP (x, i), orig, new, size);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	{
++
++	  if (XVECEXP (x, i, j) == orig)
++	    {
++	      XVECEXP (x, i, j) = new;
++	      continue;
++	    }
++	  change_arg_use_in_operand (XVECEXP (x, i, j), orig, new, size);
++	}
++}   
++
++static int
++replace_return_reg (first, return_save)
++     rtx first, return_save;
++{
++  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
++  rtx insn;
++    
++  /* comfirm that insn patterns are the expected order */
++  for (insn = first; insn; insn = NEXT_INSN (insn))
++    {
++      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
++	{
++
++	  rtx prev;
++
++	  if (PREV_INSN (insn)) prev = PREV_INSN (insn);
++
++	  if (GET_CODE (PATTERN (insn)) == USE && XEXP (PATTERN (insn), 0) == return_reg)
++	    if (!(prev && GET_CODE (PATTERN (prev)) == SET && XEXP (PATTERN (prev), 0) == return_reg))
++	      return FALSE;
++	}
++    }
++
++  /* replace return register */
++  for (insn = first; insn; insn = NEXT_INSN (insn))
++    {
++      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
++	{
++	  rtx prev;
++
++	  if (PREV_INSN (insn)) prev = PREV_INSN (insn);
++	  if (GET_CODE (PATTERN (insn)) == USE
++	      && XEXP (PATTERN (insn), 0) == return_reg
++	      && prev
++	      && GET_CODE (PATTERN (prev)) == SET
++	      && XEXP (PATTERN (prev), 0) == return_reg)
++	    {
++	      XEXP (PATTERN (prev), 0) = return_save;
++		
++	      /* change use insn to NOTE_INSN_DELETED */
++	      PUT_CODE (insn, NOTE);
++	      NOTE_SOURCE_FILE (insn) = 0;
++	      NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
++	    }
++	}
++    }
++
++  return TRUE;
++}
++
++
++/*
++  Generate RTL to return from the current function, with value VAL.
++  It is copied and modified based on expand_value_return function of stmt.c
++*/
++
++static void
++expand_value_return (val)
++     rtx val;
++{
++  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
++
++  /* Copy the value to the return location
++     unless it's already there.  */
++
++  if (return_reg != val)
++    {
++#ifdef PROMOTE_FUNCTION_RETURN
++      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
++      int unsignedp = TREE_UNSIGNED (type);
++      enum machine_mode mode
++	= promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
++			&unsignedp, 1);
++
++      if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
++	convert_move (return_reg, val, unsignedp);
++      else
++#endif
++	emit_move_insn (return_reg, val);
++    }
++  if (GET_CODE (return_reg) == REG
++      && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
++    emit_insn (gen_rtx_USE (VOIDmode, return_reg));
++  /* Handle calls that return values in multiple non-contiguous locations.
++     The Irix 6 ABI has examples of this.  */
++  else if (GET_CODE (return_reg) == PARALLEL)
++    {
++      int i;
++
++      for (i = 0; i < XVECLEN (return_reg, 0); i++)
++	{
++	  rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
++
++	  if (GET_CODE (x) == REG
++	      && REGNO (x) < FIRST_PSEUDO_REGISTER)
++	    emit_insn (gen_rtx_USE (VOIDmode, x));
++	}
++    }
++}
++
++
++static void
++validate_insns_of_varrefs (insn)
++     rtx insn;
++{
++  rtx next;
++
++  /* Initialize recognition, indicating that volatile is OK.  */
++  init_recog ();
++
++  for (; insn; insn = next)
++    {
++      next = NEXT_INSN (insn);
++      if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	  || GET_CODE (insn) == CALL_INSN)
++	{
++	  /* excerpt from insn_invalid_p in recog.c */
++	  int icode = recog_memoized (insn);
++
++	  if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
++	    validate_operand_of_varrefs (insn, &PATTERN (insn));
++	}
++    }
++
++  init_recog_no_volatile ();
++}
++
++
++static void
++validate_operand_of_varrefs (insn, loc)
++     rtx insn, *loc;
++{
++  register enum rtx_code code;
++  rtx x, temp, seq;
++  int i, j;
++  const char *fmt;
++
++  x = *loc;
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case USE:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return;
++
++    case PLUS:
++      /* validate insn of frame register plus constant.  */
++      if (GET_CODE (x) == PLUS
++	  && XEXP (x, 0) == virtual_stack_vars_rtx
++	  && CONSTANT_P (XEXP (x, 1)))
++	{
++	  start_sequence ();
++	  /* temp = force_operand (x, NULL_RTX); */
++	  { /* excerpt from expand_binop in optabs.c */
++	    optab binoptab = add_optab;
++	    enum machine_mode mode = GET_MODE (x);
++	    int icode = (int) binoptab->handlers[(int) mode].insn_code;
++	    enum machine_mode mode1 = insn_operand_mode[icode][2];
++	    rtx pat;
++	    rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
++	    temp = gen_reg_rtx (mode);
++
++	    /* Now, if insn's predicates don't allow offset operands, put them into
++	       pseudo regs.  */
++
++	    if (! (*insn_operand_predicate[icode][2]) (xop1, mode1)
++		&& mode1 != VOIDmode)
++	      xop1 = copy_to_mode_reg (mode1, xop1);
++
++	    pat = GEN_FCN (icode) (temp, xop0, xop1);
++	    if (pat)
++	      emit_insn (pat);
++	  }	      
++	  seq = get_insns ();
++	  end_sequence ();
++	  
++	  emit_insns_before (seq, insn);
++	  if (! validate_change (insn, loc, temp, 0))
++	    abort ();
++	  return;
++	}
++	break;
++      
++
++    case CALL_PLACEHOLDER:
++      validate_insns_of_varrefs (XEXP (x, 0));
++      validate_insns_of_varrefs (XEXP (x, 1));
++      validate_insns_of_varrefs (XEXP (x, 2));
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      validate_operand_of_varrefs (insn, &XEXP (x, i));
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
++}
++
++
++
++
++/*
++  The following codes are invoked after the instantiation of pseuso registers.
++
++  Reorder local variables to place a peudo register after buffers to avoid
++  the corruption of local variables that could be used to further corrupt
++  arbitrary memory locations.
++*/
++#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
++static void push_frame PARAMS ((HOST_WIDE_INT var_size, HOST_WIDE_INT boundary));
++static void push_frame_in_decls PARAMS ((tree block, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_in_args PARAMS ((tree parms, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_of_insns PARAMS ((rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_in_operand PARAMS ((rtx insn, rtx orig, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_of_reg_equiv_memory_loc PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_of_reg_equiv_constant PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void reset_used_flags_for_push_frame PARAMS ((void));
++static int check_out_of_frame_access PARAMS ((rtx insn, HOST_WIDE_INT boundary));
++static int check_out_of_frame_access_in_operand PARAMS ((rtx, HOST_WIDE_INT boundary));
++#endif
++
++rtx
++assign_stack_local_for_pseudo_reg (mode, size, align)
++     enum machine_mode mode;
++     HOST_WIDE_INT size;
++     int align;
++{
++#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
++  return assign_stack_local (mode, size, align);
++#else
++  tree blocks = DECL_INITIAL (current_function_decl);
++  rtx new;
++  HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
++  int first_call_from_purge_addressof, first_call_from_global_alloc;
++
++  if (! flag_propolice_protection
++      || size == 0
++      || ! blocks || TREE_CODE (blocks) != BLOCK
++      || current_function_is_inlinable
++      || ! search_string_from_argsandvars (1)
++      || current_function_contains_functions)
++    return assign_stack_local (mode, size, align);
++
++  first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
++  first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
++  saved_cse_not_expected = cse_not_expected;
++
++  starting_frame = (STARTING_FRAME_OFFSET)?STARTING_FRAME_OFFSET:BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++  units_per_push = MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT,
++		       GET_MODE_SIZE (mode));
++    
++  if (first_call_from_purge_addressof)
++    {
++      push_frame_offset = push_allocated_offset;
++      if (check_out_of_frame_access (get_insns (), starting_frame))
++	{
++	  /* if there is an access beyond frame, push dummy region to seperate
++	     the address of instantiated variables */
++	  push_frame (GET_MODE_SIZE (DImode), 0);
++	  assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
++	}
++    }
++
++  if (first_call_from_global_alloc)
++    {
++      push_frame_offset = push_allocated_offset = 0;
++      if (check_out_of_frame_access (get_insns (), starting_frame))
++	{
++	  if (STARTING_FRAME_OFFSET)
++	    {
++	      /* if there is an access beyond frame, push dummy region 
++		 to seperate the address of instantiated variables */
++	      push_frame (GET_MODE_SIZE (DImode), 0);
++	      assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
++	    }
++	  else
++	    push_allocated_offset = starting_frame;
++	}
++    }
++
++  saved_frame_offset = frame_offset;
++  frame_offset = push_frame_offset;
++
++  new = assign_stack_local (mode, size, align);
++
++  push_frame_offset = frame_offset;
++  frame_offset = saved_frame_offset;
++  
++  if (push_frame_offset > push_allocated_offset)
++    {
++      push_frame (units_per_push, push_allocated_offset + STARTING_FRAME_OFFSET);
++
++      assign_stack_local (BLKmode, units_per_push, -1);
++      push_allocated_offset += units_per_push;
++    }
++
++  /* At the second call from global alloc, alpha push frame and assign
++     a local variable to the top of the stack */
++  if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
++    push_frame_offset = push_allocated_offset = 0;
++
++  return new;
++#endif
++}
++
++
++#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
++/*
++  push frame infomation for instantiating pseudo register at the top of stack.
++  This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is 
++  not defined.
++
++  It is called by purge_addressof function and global_alloc (or reload)
++  function.
++*/
++static void
++push_frame (var_size, boundary)
++     HOST_WIDE_INT var_size, boundary;
++{
++  reset_used_flags_for_push_frame();
++
++  /* scan all declarations of variables and fix the offset address of the variable based on the frame pointer */
++  push_frame_in_decls (DECL_INITIAL (current_function_decl), var_size, boundary);
++
++  /* scan all argument variable and fix the offset address based on the frame pointer */
++  push_frame_in_args (DECL_ARGUMENTS (current_function_decl), var_size, boundary);
++
++  /* scan all operands of all insns and fix the offset address based on the frame pointer */
++  push_frame_of_insns (get_insns (), var_size, boundary);
++
++  /* scan all reg_equiv_memory_loc and reg_equiv_constant*/
++  push_frame_of_reg_equiv_memory_loc (var_size, boundary);
++  push_frame_of_reg_equiv_constant (var_size, boundary);
++
++  reset_used_flags_for_push_frame();
++}
++
++static void
++reset_used_flags_for_push_frame()
++{
++  int i;
++  extern rtx *reg_equiv_memory_loc;
++  extern rtx *reg_equiv_constant;
++
++  /* Clear all the USED bits in operands of all insns and declarations of local vars */
++  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
++  reset_used_flags_for_insns (get_insns ());
++
++
++  /* The following codes are processed if the push_frame is called from 
++     global_alloc (or reload) function */
++  if (reg_equiv_memory_loc == 0) return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_memory_loc[i])
++      {
++	rtx x = reg_equiv_memory_loc[i];
++
++	if (GET_CODE (x) == MEM
++	    && GET_CODE (XEXP (x, 0)) == PLUS
++	    && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
++	  {
++	    /* reset */
++	    XEXP (x, 0)->used = 0;
++	  }
++      }
++
++  
++  if (reg_equiv_constant == 0) return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_constant[i])
++      {
++	rtx x = reg_equiv_constant[i];
++
++	if (GET_CODE (x) == PLUS
++	    && AUTO_BASEPTR (x) == frame_pointer_rtx)
++	  {
++	    /* reset */
++	    x->used = 0;
++	  }
++      }
++}
++
++static void
++push_frame_in_decls (block, push_size, boundary)
++     tree block;
++     HOST_WIDE_INT push_size, boundary;
++{
++  tree types;
++  HOST_WIDE_INT offset;
++  rtx home;
++
++  while (block)
++    {
++      types = BLOCK_VARS(block);
++	
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable and
++	     also skip an global variable */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
++	    {
++	    
++	      home = DECL_RTL (types);
++	      if (home == 0) goto next;
++
++	      /* process for static local variable */
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
++		goto next;
++
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == REG)
++		{
++		  if (XEXP (home, 0) != frame_pointer_rtx
++		      || boundary != 0)
++		    goto next;
++
++		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
++						  push_size);
++
++		  /* mark */
++		  XEXP (home, 0)->used = 1;
++		}
++		
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == MEM)
++		{
++
++		  /* process for dynamically allocated aray */
++		  home = XEXP (home, 0);
++		}
++		
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == PLUS
++		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++		{
++		  offset = AUTO_OFFSET(XEXP (home, 0));
++
++		  if (! XEXP (home, 0)->used
++		      && offset >= boundary)
++		    {
++		      offset += push_size;
++		      XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++		      
++		      /* mark */
++		      XEXP (home, 0)->used = 1;
++		    }
++		}
++
++	    }
++	next:
++	  types = TREE_CHAIN(types);
++	}
++
++      push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++static void
++push_frame_in_args (parms, push_size, boundary)
++     tree parms;
++     HOST_WIDE_INT push_size, boundary;
++{
++  rtx home;
++  HOST_WIDE_INT offset;
++    
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    home = DECL_INCOMING_RTL (parms);
++	    offset = AUTO_OFFSET(XEXP (home, 0));
++
++	    if (XEXP (home, 0)->used || offset < boundary) continue;
++
++	    /* the operand related to the sweep variable */
++	    if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
++	      {
++		if (XEXP (home, 0) == frame_pointer_rtx)
++		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
++						  push_size);
++		else {
++		  offset += push_size;
++		  XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								offset);
++		}
++
++		/* mark */
++		XEXP (home, 0)->used = 1;
++	      }
++	  }
++      }
++}
++
++
++static int insn_pushed;
++static int *fp_equiv = 0;
++
++static void
++push_frame_of_insns (insn, push_size, boundary)
++     rtx insn;
++     HOST_WIDE_INT push_size, boundary;
++{
++  /* init fp_equiv */
++  fp_equiv = (int *) alloca (max_reg_num () * sizeof (int));
++  bzero ((char *) fp_equiv, max_reg_num () * sizeof (int));
++		
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	insn_pushed = FALSE; debuginsn = insn;
++	push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
++
++	if (insn_pushed)
++	  {
++	    rtx trial = insn;
++	    rtx before = PREV_INSN (trial);
++	    rtx after = NEXT_INSN (trial);
++	    int has_barrier = 0;
++	    rtx tem;
++	    rtx seq = split_insns (PATTERN (insn), insn);
++
++	    /* If we are splitting a JUMP_INSN, it might be followed by a 
++	       BARRIER. We may need to handle this specially.  */
++	    if (after && GET_CODE (after) == BARRIER)
++	      {
++		has_barrier = 1;
++		after = NEXT_INSN (after);
++	      }
++
++	    if (seq && GET_CODE (seq) == SEQUENCE)
++	      {
++		if (XVECLEN (seq, 0) == 2)
++		  {
++		    rtx pattern = PATTERN (XVECEXP (seq, 0, 1));
++
++		    if (GET_CODE (pattern) == SET
++			&& GET_CODE (XEXP (pattern, 0)) == REG
++			&& GET_CODE (XEXP (pattern, 1)) == PLUS
++			&& XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
++			&& CONSTANT_P (XEXP (XEXP (pattern, 1), 1)))
++		      {
++			rtx offset = XEXP (XEXP (pattern, 1), 1);
++			fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
++
++			/* replace the pattern of the insn */
++			add_insn_after (XVECEXP (seq, 0, 0), before);
++			delete_insn (trial);
++			goto next;
++		      }
++		  }
++
++		/* excerpt from emit-rtl.c: L3320 */
++		tem = emit_insn_after (seq, trial);
++
++		delete_insn (trial);
++		if (has_barrier)
++		  emit_barrier_after (tem);
++
++		/* Recursively call try_split for each new insn created */
++		for (tem = NEXT_INSN (before); tem != after;
++		     tem = NEXT_INSN (tem))
++		  if (! INSN_DELETED_P (tem)
++		      && GET_RTX_CLASS (GET_CODE(tem)) == 'i')
++		    tem = try_split (PATTERN (tem), tem, 1);
++	      }
++	  }
++
++      next:
++	/* push frame in NOTE */
++	push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
++
++	/* push frame in CALL EXPR_LIST */
++	if (GET_CODE (insn) == CALL_INSN)
++	  push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn), push_size, boundary);
++      }
++}
++
++
++static void
++push_frame_in_operand (insn, orig, push_size, boundary)
++     rtx insn, orig;
++     HOST_WIDE_INT push_size, boundary;
++{
++  register rtx x = orig;
++  register enum rtx_code code;
++  int i, j;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++    case USE:
++      return;
++	    
++    case SET:
++      /*
++	skip setjmp setup insn and setjmp restore insn
++	alpha case:
++	(set (MEM (reg:SI xx)) (frame_pointer_rtx)))
++	(set (frame_pointer_rtx) (REG))
++      */
++      if (GET_CODE (XEXP (x, 0)) == MEM
++	  && XEXP (x, 1) == frame_pointer_rtx)
++	return;
++      if (XEXP (x, 0) == frame_pointer_rtx
++	  && GET_CODE (XEXP (x, 1)) == REG)
++	return;
++
++      /*
++	powerpc case: restores setjmp address
++	(set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
++	or
++	(set (reg) (plus frame_pointer_rtx const_int -n))
++	(set (frame_pointer_rtx) (reg))
++      */
++      if (GET_CODE (XEXP (x, 0)) == REG
++	  && GET_CODE (XEXP (x, 1)) == PLUS
++	  && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
++	  && CONSTANT_P (XEXP (XEXP (x, 1), 1))
++	  && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
++	{
++	  x = XEXP (x, 1);
++	  offset = AUTO_OFFSET(x);
++	  if (x->used || abs (offset) < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
++	  x->used = 1; insn_pushed = TRUE;
++	  return;
++	}
++
++      /* reset fp_equiv register */
++      else if (GET_CODE (XEXP (x, 0)) == REG
++	  && fp_equiv[REGNO (XEXP (x, 0))])
++	fp_equiv[REGNO (XEXP (x, 0))] = 0;
++
++      /* propagete fp_equiv register */
++      else if (GET_CODE (XEXP (x, 0)) == REG
++	       && GET_CODE (XEXP (x, 1)) == REG
++	       && fp_equiv[REGNO (XEXP (x, 1))])
++	if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
++	    || reg_renumber[REGNO (XEXP (x, 0))] > 0)
++	  fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
++      break;
++
++    case MEM:
++      if (XEXP (x, 0) == frame_pointer_rtx
++	  && boundary == 0)
++	{
++	  XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
++	  XEXP (x, 0)->used = 1; insn_pushed = TRUE;
++	  return;
++	}
++      break;
++      
++    case PLUS:
++      offset = AUTO_OFFSET(x);
++
++      /* Handle special case of frame register plus constant.  */
++      if (CONSTANT_P (XEXP (x, 1))
++	  && XEXP (x, 0) == frame_pointer_rtx)
++	{
++	  if (x->used || offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle alpha case:
++	 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
++      */
++      if (CONSTANT_P (XEXP (x, 1))
++	  && GET_CODE (XEXP (x, 0)) == SUBREG
++	  && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
++	{
++	  if (x->used || offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle powerpc case:
++	 (set (reg x) (plus fp const))
++	 (set (.....) (... (plus (reg x) (const B))))
++      */
++      else if (CONSTANT_P (XEXP (x, 1))
++	       && GET_CODE (XEXP (x, 0)) == REG
++	       && fp_equiv[REGNO (XEXP (x, 0))])
++	{
++	  if (x->used) return;
++
++	  offset += fp_equiv[REGNO (XEXP (x, 0))];
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle special case of frame register plus reg (constant).
++	 (set (reg x) (const B))
++	 (set (....) (...(plus fp (reg x))))
++      */
++      else if (XEXP (x, 0) == frame_pointer_rtx
++	       && GET_CODE (XEXP (x, 1)) == REG
++	       && PREV_INSN (insn)
++	       && PATTERN (PREV_INSN (insn))
++	       && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
++	       && CONSTANT_P (SET_SRC (PATTERN (PREV_INSN (insn)))))
++	{
++	  HOST_WIDE_INT offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
++
++	  if (x->used || offset < boundary)
++	    return;
++	  
++	  SET_SRC (PATTERN (PREV_INSN (insn)))
++	    = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1;
++	  XEXP (x, 1)->used = 1;
++
++	  return;
++	}
++      /* Handle special case of frame register plus reg (used).  */
++      else if (XEXP (x, 0) == frame_pointer_rtx
++	       && XEXP (x, 1)->used)
++	{
++	  x->used = 1;
++	  return;
++	}
++      /*
++	process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      push_frame_of_insns (XEXP (x, 0), push_size, boundary);
++      push_frame_of_insns (XEXP (x, 1), push_size, boundary);
++      push_frame_of_insns (XEXP (x, 2), push_size, boundary);
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
++	  fatal_insn ("push_frame_in_operand", insn);
++	push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
++}   
++
++static void
++push_frame_of_reg_equiv_memory_loc (push_size, boundary)
++     HOST_WIDE_INT push_size, boundary;
++{
++  int i;
++  extern rtx *reg_equiv_memory_loc;
++
++  /* This function is processed if the push_frame is called from 
++     global_alloc (or reload) function */
++  if (reg_equiv_memory_loc == 0) return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_memory_loc[i])
++      {
++	rtx x = reg_equiv_memory_loc[i];
++	int offset;
++
++	if (GET_CODE (x) == MEM
++	    && GET_CODE (XEXP (x, 0)) == PLUS
++	    && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
++	  {
++	    offset = AUTO_OFFSET(XEXP (x, 0));
++	    
++	    if (! XEXP (x, 0)->used
++		&& offset >= boundary)
++	      {
++		offset += push_size;
++		XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		/* mark */
++		XEXP (x, 0)->used = 1;
++	      }
++	  }
++	else if (GET_CODE (x) == MEM
++		 && XEXP (x, 0) == frame_pointer_rtx
++		 && boundary == 0)
++	  {
++	    XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
++	    XEXP (x, 0)->used = 1; insn_pushed = TRUE;
++	  }
++      }
++}
++
++static void
++push_frame_of_reg_equiv_constant (push_size, boundary)
++     HOST_WIDE_INT push_size, boundary;
++{
++  int i;
++  extern rtx *reg_equiv_constant;
++
++  /* This function is processed if the push_frame is called from 
++     global_alloc (or reload) function */
++  if (reg_equiv_constant == 0) return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_constant[i])
++      {
++	rtx x = reg_equiv_constant[i];
++	int offset;
++
++	if (GET_CODE (x) == PLUS
++	    && XEXP (x, 0) == frame_pointer_rtx)
++	  {
++	    offset = AUTO_OFFSET(x);
++	    
++	    if (! x->used
++		&& offset >= boundary)
++	      {
++		offset += push_size;
++		XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		/* mark */
++		x->used = 1;
++	      }
++	  }
++	else if (x == frame_pointer_rtx
++		 && boundary == 0)
++	  {
++	    reg_equiv_constant[i]
++	      = plus_constant (frame_pointer_rtx, push_size);
++	    reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
++	  }
++      }
++}
++
++static int
++check_out_of_frame_access (insn, boundary)
++     rtx insn;
++     HOST_WIDE_INT boundary;
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
++	  return TRUE;
++      }
++  return FALSE;
++}
++
++
++static int
++check_out_of_frame_access_in_operand (orig, boundary)
++     rtx orig;
++     HOST_WIDE_INT boundary;
++{
++  register rtx x = orig;
++  register enum rtx_code code;
++  int i, j;
++  const char *fmt;
++
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return FALSE;
++	    
++    case MEM:
++      if (XEXP (x, 0) == frame_pointer_rtx)
++	if (0 < boundary) return TRUE;
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (CONSTANT_P (XEXP (x, 1))
++	  && XEXP (x, 0) == frame_pointer_rtx)
++	{
++	  if (0 <= AUTO_OFFSET(x)
++	      && AUTO_OFFSET(x) < boundary) return TRUE;
++	  return FALSE;
++	}
++      /*
++	process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      if (check_out_of_frame_access (XEXP (x, 0), boundary)) return TRUE;
++      if (check_out_of_frame_access (XEXP (x, 1), boundary)) return TRUE;
++      if (check_out_of_frame_access (XEXP (x, 2), boundary)) return TRUE;
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
++	  return TRUE;
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
++	  return TRUE;
++
++  return FALSE;
++}
++#endif
+diff -Naur gcc-2.95.3.orig/gcc/protector.h gcc-2.95.3.ssp/gcc/protector.h
+--- gcc-2.95.3.orig/gcc/protector.h	1970-01-01 00:00:00.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/protector.h	2004-05-17 00:30:52.000000000 +0000
+@@ -0,0 +1,49 @@
++/* RTL buffer overflow protection function for GNU C compiler
++   Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
++
++This file is part of GNU CC.
++
++GNU CC is free software; you can redistribute it and/or modify
++it under the terms of the GNU General Public License as published by
++the Free Software Foundation; either version 2, or (at your option)
++any later version.
++
++GNU CC is distributed in the hope that it will be useful,
++but WITHOUT ANY WARRANTY; without even the implied warranty of
++MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
++GNU General Public License for more details.
++
++You should have received a copy of the GNU General Public License
++along with GNU CC; see the file COPYING.  If not, write to
++the Free Software Foundation, 59 Temple Place - Suite 330,
++Boston, MA 02111-1307, USA.  */
++
++
++/* declaration of GUARD variable */
++#define GUARD_m		Pmode
++#define UNITS_PER_GUARD MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
++#define SSP_DUMMY_FILE	"_ssp_"
++
++#ifndef L_stack_smash_handler
++
++/* insert a guard variable before a character buffer and change the order
++ of pointer variables, character buffers and pointer arguments */
++
++extern void prepare_stack_protection  PARAMS ((int inlinable));
++
++#ifdef TREE_CODE
++/* search a character array from the specified type tree */
++
++extern int search_string_def PARAMS ((tree names));
++#endif
++
++/* examine whether the input contains frame pointer addressing */
++
++extern int contains_fp PARAMS ((rtx op));
++
++/* allocate a local variable in the stack area before character buffers
++   to avoid the corruption of it */
++
++extern rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int));
++
++#endif
+diff -Naur gcc-2.95.3.orig/gcc/reload1.c gcc-2.95.3.ssp/gcc/reload1.c
+--- gcc-2.95.3.orig/gcc/reload1.c	2001-01-25 14:03:21.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/reload1.c	2004-08-30 01:29:22.000000000 +0000
+@@ -39,6 +39,7 @@
+ #include "output.h"
+ #include "real.h"
+ #include "toplev.h"
++#include "protector.h"
+ 
+ #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
+ #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
+@@ -2424,7 +2425,7 @@
+       if (from_reg == -1)
+ 	{
+ 	  /* No known place to spill from => no slot to reuse.  */
+-	  x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
++	  x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
+ 				  inherent_size == total_size ? 0 : -1);
+ 	  if (BYTES_BIG_ENDIAN)
+ 	    /* Cancel the  big-endian correction done in assign_stack_local.
+diff -Naur gcc-2.95.3.orig/gcc/toplev.c gcc-2.95.3.ssp/gcc/toplev.c
+--- gcc-2.95.3.orig/gcc/toplev.c	2001-01-25 14:03:23.000000000 +0000
++++ gcc-2.95.3.ssp/gcc/toplev.c	2004-08-30 01:29:22.000000000 +0000
+@@ -772,6 +772,15 @@
+ 
+ int flag_no_ident = 0;
+ 
++#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
++/* Nonzero means use propolice as a stack protection method */
++int flag_propolice_protection = 1;
++int flag_stack_protection = 0;
++#else
++int flag_propolice_protection = 0;
++int flag_stack_protection = 0;
++#endif
++
+ /* Table of supported debugging formats.  */
+ static struct
+ {
+@@ -979,7 +988,11 @@
+   {"leading-underscore", &flag_leading_underscore, 1,
+    "External symbols have a leading underscore" },
+   {"ident", &flag_no_ident, 0,
+-   "Process #ident directives"}
++   "Process #ident directives"},
++  {"stack-protector", &flag_propolice_protection, 1,
++   "Enables stack protection" },
++  {"stack-protector-all", &flag_stack_protection, 1,
++   "Enables stack protection of every function" },
+ };
+ 
+ #define NUM_ELEM(a)  (sizeof (a) / sizeof ((a)[0]))
+@@ -1258,7 +1271,9 @@
+   {"uninitialized", &warn_uninitialized, 1,
+    "Warn about unitialized automatic variables"},
+   {"inline", &warn_inline, 1,
+-   "Warn when an inlined function cannot be inlined"}
++   "Warn when an inlined function cannot be inlined"},
++  {"stack-protector", &warn_stack_protector, 1,
++   "Warn when disabling stack protector for some reason"}
+ };
+ 

+ /* Output files for assembler code (real compiler output)
+@@ -3608,6 +3623,10 @@
+   int failure = 0;
+   int rebuild_label_notes_after_reload;
+ 
++  /* When processing delayed functions, init_function_start() won't
++     have been run to re-initialize it.  */
++  cse_not_expected = ! optimize;
++
+   /* If we are reconsidering an inline function
+      at the end of compilation, skip the stuff for making it inline.  */
+ 
+@@ -3645,6 +3664,8 @@
+ 
+       insns = get_insns ();
+ 
++      if (flag_propolice_protection) prepare_stack_protection (inlinable);
++  
+       /* Dump the rtl code if we are dumping rtl.  */
+ 
+       if (rtl_dump)
+@@ -5447,6 +5468,13 @@
+ 	print_switch_values (stderr, 0, MAX_LINE, "", " ", "\n");
+     }
+ 
++
++  /* This combination makes optimized frame addressings and causes
++     a internal compilation error at prepare_stack_protection.
++     so don't allow it.  */
++  if (flag_stack_protection && !flag_propolice_protection)
++    flag_propolice_protection = TRUE;
++    
+   compile_file (filename);
+ 
+ #if !defined(OS2) && !defined(VMS) && (!defined(_WIN32) || defined (__CYGWIN__)) && !defined(__INTERIX)

Added: trunk/gcc/gcc-3.3-ssp-3.patch
===================================================================
--- trunk/gcc/gcc-3.3-ssp-3.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/gcc/gcc-3.3-ssp-3.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,3591 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2004-08-29
+Initial Package Version: 3.3
+Upstream Status: Rejected Upstream
+Origin: http://www.research.ibm.com/trl/projects/security/ssp/
+Description: Smashing Stack Protector - protector-3.3.2-2.tar.gz
+This patch is made specificly to work with the Glibc SSP patch. All guard
+functions have been removed. Developers are encouraged to check the
+differences between this patch, the original from ibm, and the Glibc patch.
+
+You might also want to change the version after applying this patch:
+sed -e 's/3.3*/3.x ssp/' -i gcc/version.c
+Replace 3.x with your gcc version (3.3, 3.3.1, 3.3.2, 3.3.3).
+
+This patch, and Glibc's patch, depends on erandom sysctl from:
+http://frandom.sourceforge.net/
+Thanks to Eli Billauer.
+
+This patch will also work on *BSD with their ssp libc patch.
+
+Also see:
+http://www.linuxfromscratch.org/hlfs/
+http://www.linuxfromscratch.org/hints/downloads/files/ssp.txt
+http://www.linuxfromscratch.org/hints/downloads/files/entropy.txt
+http://www.linuxfromscratch.org/~robert/winter/winter.txt
+
+These patches may get updated, check for newest version:
+http://www.linuxfromscratch.org/patches/downloads/linux/\
+        linux-2.4.26-frandom-1.patch
+http://www.linuxfromscratch.org/patches/downloads/glibc/\
+        glibc-2.3.3-ssp-frandom-2.patch
+
+diff -Naur gcc-3.3.orig/gcc/Makefile.in gcc-3.3.ssp/gcc/Makefile.in
+--- gcc-3.3.orig/gcc/Makefile.in	2003-04-22 15:50:48.000000000 +0000
++++ gcc-3.3.ssp/gcc/Makefile.in	2004-08-30 01:16:18.000000000 +0000
+@@ -761,7 +761,7 @@
+  sibcall.o simplify-rtx.o ssa.o ssa-ccp.o ssa-dce.o stmt.o		   \
+  stor-layout.o stringpool.o timevar.o toplev.o tracer.o tree.o tree-dump.o \
+  tree-inline.o unroll.o varasm.o varray.o version.o vmsdbgout.o xcoffout.o \
+- et-forest.o $(GGC) $(out_object_file) $(EXTRA_OBJS)
++ et-forest.o protector.o $(GGC) $(out_object_file) $(EXTRA_OBJS)
+ 
+ BACKEND = main.o libbackend.a
+ 
+@@ -795,7 +795,7 @@
+ 
+ LIB2FUNCS_2 = _floatdixf _fixunsxfsi _fixtfdi _fixunstfdi _floatditf \
+     _clear_cache _trampoline __main _exit _absvsi2 _absvdi2 _addvsi3 \
+-    _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors
++    _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors _stack_smash_handler
+ 
+ # Defined in libgcc2.c, included only in the static library.
+ LIB2FUNCS_ST = _eprintf _bb __gcc_bcmp
+@@ -1662,6 +1662,7 @@
+    output.h except.h $(TM_P_H) real.h
+ params.o : params.c $(CONFIG_H) $(SYSTEM_H) $(PARAMS_H) toplev.h
+ hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) $(HOOKS_H)
++protector.o: protector.c $(CONFIG_H)
+ 
+ $(out_object_file): $(out_file) $(CONFIG_H) $(TREE_H) $(GGC_H) \
+    $(RTL_H) $(REGS_H) hard-reg-set.h real.h insn-config.h conditions.h \
+diff -Naur gcc-3.3.orig/gcc/calls.c gcc-3.3.ssp/gcc/calls.c
+--- gcc-3.3.orig/gcc/calls.c	2003-04-22 15:50:54.000000000 +0000
++++ gcc-3.3.ssp/gcc/calls.c	2004-08-30 01:16:18.000000000 +0000
+@@ -2324,7 +2324,7 @@
+ 	    /* For variable-sized objects, we must be called with a target
+ 	       specified.  If we were to allocate space on the stack here,
+ 	       we would have no way of knowing when to free it.  */
+-	    rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
++	    rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
+ 
+ 	    mark_temp_addr_taken (d);
+ 	    structure_value_addr = XEXP (d, 0);
+diff -Naur gcc-3.3.orig/gcc/combine.c gcc-3.3.ssp/gcc/combine.c
+--- gcc-3.3.orig/gcc/combine.c	2003-03-24 11:37:32.000000000 +0000
++++ gcc-3.3.ssp/gcc/combine.c	2004-08-30 01:16:18.000000000 +0000
+@@ -3859,7 +3859,17 @@
+ 	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
+ 	  rtx inner_op1 = XEXP (x, 1);
+ 	  rtx inner;
+-
++	  
++#ifndef FRAME_GROWS_DOWNWARD
++	  if (flag_propolice_protection
++	      && code == PLUS
++	      && other == frame_pointer_rtx
++	      && GET_CODE (inner_op0) == CONST_INT
++	      && GET_CODE (inner_op1) == CONST_INT
++	      && INTVAL (inner_op0) > 0
++	      && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
++	    return x;
++#endif
+ 	  /* Make sure we pass the constant operand if any as the second
+ 	     one if this is a commutative operation.  */
+ 	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
+@@ -4272,6 +4282,11 @@
+ 	 they are now checked elsewhere.  */
+       if (GET_CODE (XEXP (x, 0)) == PLUS
+ 	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
++#ifndef FRAME_GROWS_DOWNWARD
++	if (! (flag_propolice_protection
++	       && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
++	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
++#endif
+ 	return gen_binary (PLUS, mode,
+ 			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
+ 				       XEXP (x, 1)),
+@@ -4400,7 +4415,10 @@
+ 
+       /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
+ 	 integers.  */
+-      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
++      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
++	  && (! (flag_propolice_protection
++		 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
++		 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
+ 	return gen_binary (MINUS, mode,
+ 			   gen_binary (MINUS, mode, XEXP (x, 0),
+ 				       XEXP (XEXP (x, 1), 0)),
+diff -Naur gcc-3.3.orig/gcc/config/arm/arm.md gcc-3.3.ssp/gcc/config/arm/arm.md
+--- gcc-3.3.orig/gcc/config/arm/arm.md	2003-03-20 22:03:17.000000000 +0000
++++ gcc-3.3.ssp/gcc/config/arm/arm.md	2004-08-30 01:16:19.000000000 +0000
+@@ -3948,7 +3948,13 @@
+ 	(match_operand:DI 1 "general_operand" ""))]
+   "TARGET_EITHER"
+   "
+-  if (TARGET_THUMB)
++  if (TARGET_ARM)
++    {
++      /* Everything except mem = const or mem = mem can be done easily */
++      if (GET_CODE (operands[0]) == MEM)
++        operands[1] = force_reg (DImode, operands[1]);
++    }
++  else /* TARGET_THUMB.... */
+     {
+       if (!no_new_pseudos)
+         {
+diff -Naur gcc-3.3.orig/gcc/config/t-linux gcc-3.3.ssp/gcc/config/t-linux
+--- gcc-3.3.orig/gcc/config/t-linux	2002-11-28 14:47:01.000000000 +0000
++++ gcc-3.3.ssp/gcc/config/t-linux	2004-08-30 01:16:19.000000000 +0000
+@@ -4,7 +4,7 @@
+ # Compile crtbeginS.o and crtendS.o with pic.
+ CRTSTUFF_T_CFLAGS_S = $(CRTSTUFF_T_CFLAGS) -fPIC
+ # Compile libgcc2.a with pic.
+-TARGET_LIBGCC2_CFLAGS = -fPIC
++TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG
+ 
+ # Override t-slibgcc-elf-ver to export some libgcc symbols with
+ # the symbol versions that glibc used.
+diff -Naur gcc-3.3.orig/gcc/cse.c gcc-3.3.ssp/gcc/cse.c
+--- gcc-3.3.orig/gcc/cse.c	2003-04-29 19:16:40.000000000 +0000
++++ gcc-3.3.ssp/gcc/cse.c	2004-08-30 01:16:19.000000000 +0000
+@@ -4288,7 +4288,14 @@
+ 
+ 	      if (new_const == 0)
+ 		break;
+-
++#ifndef FRAME_GROWS_DOWNWARD
++	      if (flag_propolice_protection
++		  && GET_CODE (y) == PLUS
++		  && XEXP (y, 0) == frame_pointer_rtx
++		  && INTVAL (inner_const) > 0
++		  && INTVAL (new_const) <= 0)
++		break;
++#endif
+ 	      /* If we are associating shift operations, don't let this
+ 		 produce a shift of the size of the object or larger.
+ 		 This could occur when we follow a sign-extend by a right
+@@ -4823,6 +4830,14 @@
+       if (SET_DEST (x) == pc_rtx
+ 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
+ 	;
++      /* cut the reg propagation of stack-protected argument */
++      else if (x->volatil) {
++	rtx x1 = SET_DEST (x);
++	if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
++	  x1 = SUBREG_REG (x1);
++	if (! REGNO_QTY_VALID_P(REGNO (x1)))
++	  make_new_qty (REGNO (x1), GET_MODE (x1));
++      }
+ 
+       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
+ 	 The hard function value register is used only once, to copy to
+diff -Naur gcc-3.3.orig/gcc/explow.c gcc-3.3.ssp/gcc/explow.c
+--- gcc-3.3.orig/gcc/explow.c	2003-04-07 22:58:12.000000000 +0000
++++ gcc-3.3.ssp/gcc/explow.c	2004-08-30 01:16:19.000000000 +0000
+@@ -86,7 +86,8 @@
+   rtx tem;
+   int all_constant = 0;
+ 
+-  if (c == 0)
++  if (c == 0
++      && !(flag_propolice_protection && x == virtual_stack_vars_rtx))
+     return x;
+ 
+  restart:
+@@ -187,7 +188,8 @@
+       break;
+     }
+ 
+-  if (c != 0)
++  if (c != 0
++      || (flag_propolice_protection && x == virtual_stack_vars_rtx))
+     x = gen_rtx_PLUS (mode, x, GEN_INT (c));
+ 
+   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
+@@ -531,6 +533,21 @@
+ 	 in certain cases.  This is not necessary since the code
+ 	 below can handle all possible cases, but machine-dependent
+ 	 transformations can make better code.  */
++      if (flag_propolice_protection)
++	{
++#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS				\
++			&& XEXP (X, 0) == virtual_stack_vars_rtx	\
++			&& GET_CODE (XEXP (X, 1)) == CONST_INT)
++	  rtx y;
++	  if (FRAMEADDR_P (x)) goto win;
++	  for (y=x; y!=0 && GET_CODE (y)==PLUS; y = XEXP (y, 0))
++	    {
++	      if (FRAMEADDR_P (XEXP (y, 0)))
++		XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
++	      if (FRAMEADDR_P (XEXP (y, 1)))
++		XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
++	    }
++	}
+       LEGITIMIZE_ADDRESS (x, oldx, mode, win);
+ 
+       /* PLUS and MULT can appear in special ways
+diff -Naur gcc-3.3.orig/gcc/expr.c gcc-3.3.ssp/gcc/expr.c
+--- gcc-3.3.orig/gcc/expr.c	2003-04-22 23:08:15.000000000 +0000
++++ gcc-3.3.ssp/gcc/expr.c	2004-08-30 01:16:19.000000000 +0000
+@@ -45,6 +45,7 @@
+ #include "langhooks.h"
+ #include "intl.h"
+ #include "tm_p.h"
++#include "protector.h"
+ 
+ /* Decide whether a function's arguments should be processed
+    from first to last or from last to first.
+@@ -1517,7 +1518,7 @@
+ 
+       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
+ 	{
+-	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
++	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len-GET_MODE_SIZE (mode)));
+ 	  data.autinc_from = 1;
+ 	  data.explicit_inc_from = -1;
+ 	}
+@@ -1531,7 +1532,7 @@
+ 	data.from_addr = copy_addr_to_reg (from_addr);
+       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
+ 	{
+-	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
++	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len-GET_MODE_SIZE (mode)));
+ 	  data.autinc_to = 1;
+ 	  data.explicit_inc_to = -1;
+ 	}
+@@ -1648,11 +1649,13 @@
+ 	from1 = adjust_address (data->from, mode, data->offset);
+ 
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
+-	emit_insn (gen_add2_insn (data->to_addr,
+-				  GEN_INT (-(HOST_WIDE_INT)size)));
++	if (data->explicit_inc_to < -1)
++	  emit_insn (gen_add2_insn (data->to_addr,
++				    GEN_INT (-(HOST_WIDE_INT)size)));
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
+-	emit_insn (gen_add2_insn (data->from_addr,
+-				  GEN_INT (-(HOST_WIDE_INT)size)));
++	if (data->explicit_inc_from < -1)
++	  emit_insn (gen_add2_insn (data->from_addr,
++				    GEN_INT (-(HOST_WIDE_INT)size)));
+ 
+       if (data->to)
+ 	emit_insn ((*genfun) (to1, from1));
+@@ -2816,7 +2819,7 @@
+ 
+       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
+ 	{
+-	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
++	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len-GET_MODE_SIZE (mode)));
+ 	  data->autinc_to = 1;
+ 	  data->explicit_inc_to = -1;
+ 	}
+@@ -2887,8 +2890,9 @@
+ 	to1 = adjust_address (data->to, mode, data->offset);
+ 
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
+-	emit_insn (gen_add2_insn (data->to_addr,
+-				  GEN_INT (-(HOST_WIDE_INT) size)));
++	if (data->explicit_inc_to < -1)
++	  emit_insn (gen_add2_insn (data->to_addr,
++				    GEN_INT (-(HOST_WIDE_INT) size)));
+ 
+       cst = (*data->constfun) (data->constfundata, data->offset, mode);
+       emit_insn ((*genfun) (to1, cst));
+@@ -5882,7 +5886,9 @@
+ 	  && GET_CODE (XEXP (value, 0)) == PLUS
+ 	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
+ 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
+-	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
++	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
++	  && (!flag_propolice_protection
++	      || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
+ 	{
+ 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
+ 					  XEXP (XEXP (value, 0), 0), op2,
+@@ -8049,7 +8055,8 @@
+       /* If adding to a sum including a constant,
+ 	 associate it to put the constant outside.  */
+       if (GET_CODE (op1) == PLUS
+-	  && CONSTANT_P (XEXP (op1, 1)))
++	  && CONSTANT_P (XEXP (op1, 1))
++	  && !(flag_propolice_protection && (contains_fp (op0) || contains_fp (op1))))
+ 	{
+ 	  rtx constant_term = const0_rtx;
+ 
+diff -Naur gcc-3.3.orig/gcc/flags.h gcc-3.3.ssp/gcc/flags.h
+--- gcc-3.3.orig/gcc/flags.h	2003-05-02 21:01:17.000000000 +0000
++++ gcc-3.3.ssp/gcc/flags.h	2004-08-30 01:16:19.000000000 +0000
+@@ -685,4 +685,13 @@
+ #define HONOR_SIGN_DEPENDENT_ROUNDING(MODE) \
+   (MODE_HAS_SIGN_DEPENDENT_ROUNDING (MODE) && !flag_unsafe_math_optimizations)
+ 
++/* Nonzero means use propolice as a stack protection method */
++
++extern int flag_propolice_protection;
++extern int flag_stack_protection;
++
++/* Warn when not issuing stack smashing protection for some reason */
++
++extern int warn_stack_protector;
++
+ #endif /* ! GCC_FLAGS_H */
+diff -Naur gcc-3.3.orig/gcc/function.c gcc-3.3.ssp/gcc/function.c
+--- gcc-3.3.orig/gcc/function.c	2003-04-10 22:26:04.000000000 +0000
++++ gcc-3.3.ssp/gcc/function.c	2004-08-30 01:16:19.000000000 +0000
+@@ -59,6 +59,7 @@
+ #include "tm_p.h"
+ #include "integrate.h"
+ #include "langhooks.h"
++#include "protector.h"
+ 
+ #ifndef TRAMPOLINE_ALIGNMENT
+ #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
+@@ -142,6 +143,10 @@
+ /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
+    in this function.  */
+ static GTY(()) varray_type sibcall_epilogue;
++
++/* Current boundary mark for character arrays.  */
++int temp_boundary_mark = 0;
++
+ 

+ /* In order to evaluate some expressions, such as function calls returning
+    structures in memory, we need to temporarily allocate stack locations.
+@@ -195,6 +200,8 @@
+   /* The size of the slot, including extra space for alignment.  This
+      info is for combine_temp_slots.  */
+   HOST_WIDE_INT full_size;
++  /* Boundary mark of a character array and the others. This info is for propolice */
++  int boundary_mark;
+ };
+ 

+ /* This structure is used to record MEMs or pseudos used to replace VAR, any
+@@ -629,6 +636,7 @@
+    whose lifetime is controlled by CLEANUP_POINT_EXPRs.  KEEP is 3
+    if we are to allocate something at an inner level to be treated as
+    a variable in the block (e.g., a SAVE_EXPR).
++   KEEP is 5 if we allocate a place to return structure.
+ 
+    TYPE is the type that will be used for the stack slot.  */
+ 
+@@ -642,6 +650,8 @@
+   unsigned int align;
+   struct temp_slot *p, *best_p = 0;
+   rtx slot;
++  int char_array = (flag_propolice_protection
++		    && keep == 1 && search_string_def (type));
+ 
+   /* If SIZE is -1 it means that somebody tried to allocate a temporary
+      of a variable size.  */
+@@ -667,7 +677,8 @@
+ 	&& ! p->in_use
+ 	&& objects_must_conflict_p (p->type, type)
+ 	&& (best_p == 0 || best_p->size > p->size
+-	    || (best_p->size == p->size && best_p->align > p->align)))
++	    || (best_p->size == p->size && best_p->align > p->align))
++	&& (! char_array || p->boundary_mark != 0))
+       {
+ 	if (p->align == align && p->size == size)
+ 	  {
+@@ -702,6 +713,7 @@
+ 	      p->address = 0;
+ 	      p->rtl_expr = 0;
+ 	      p->type = best_p->type;
++	      p->boundary_mark = best_p->boundary_mark;
+ 	      p->next = temp_slots;
+ 	      temp_slots = p;
+ 
+@@ -762,6 +774,7 @@
+       p->full_size = frame_offset - frame_offset_old;
+ #endif
+       p->address = 0;
++      p->boundary_mark = char_array?++temp_boundary_mark:0;
+       p->next = temp_slots;
+       temp_slots = p;
+     }
+@@ -932,14 +945,16 @@
+ 	    int delete_q = 0;
+ 	    if (! q->in_use && GET_MODE (q->slot) == BLKmode)
+ 	      {
+-		if (p->base_offset + p->full_size == q->base_offset)
++		if (p->base_offset + p->full_size == q->base_offset &&
++		    p->boundary_mark == q->boundary_mark)
+ 		  {
+ 		    /* Q comes after P; combine Q into P.  */
+ 		    p->size += q->size;
+ 		    p->full_size += q->full_size;
+ 		    delete_q = 1;
+ 		  }
+-		else if (q->base_offset + q->full_size == p->base_offset)
++		else if (q->base_offset + q->full_size == p->base_offset &&
++			 p->boundary_mark == q->boundary_mark)
+ 		  {
+ 		    /* P comes after Q; combine P into Q.  */
+ 		    q->size += p->size;
+@@ -1497,7 +1512,9 @@
+     new = func->x_parm_reg_stack_loc[regno];
+ 
+   if (new == 0)
+-    new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
++    new = function ?
++	assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func):
++	assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
+ 
+   PUT_CODE (reg, MEM);
+   PUT_MODE (reg, decl_mode);
+@@ -3961,7 +3978,8 @@
+ 		 constant with that register.  */
+ 	      temp = gen_reg_rtx (Pmode);
+ 	      XEXP (x, 0) = new;
+-	      if (validate_change (object, &XEXP (x, 1), temp, 0))
++	      if (validate_change (object, &XEXP (x, 1), temp, 0)
++		  && ! flag_propolice_protection)
+ 		emit_insn_before (gen_move_insn (temp, new_offset), object);
+ 	      else
+ 		{
+diff -Naur gcc-3.3.orig/gcc/gcse.c gcc-3.3.ssp/gcc/gcse.c
+--- gcc-3.3.orig/gcc/gcse.c	2003-05-01 18:19:39.000000000 +0000
++++ gcc-3.3.ssp/gcc/gcse.c	2004-08-30 01:16:19.000000000 +0000
+@@ -4208,7 +4208,7 @@
+       /* Find an assignment that sets reg_used and is available
+ 	 at the start of the block.  */
+       set = find_avail_set (regno, insn);
+-      if (! set)
++      if (! set || set->expr->volatil)
+ 	continue;
+ 
+       pat = set->expr;
+diff -Naur gcc-3.3.orig/gcc/integrate.c gcc-3.3.ssp/gcc/integrate.c
+--- gcc-3.3.orig/gcc/integrate.c	2003-03-20 23:20:02.000000000 +0000
++++ gcc-3.3.ssp/gcc/integrate.c	2004-08-30 01:16:19.000000000 +0000
+@@ -399,6 +399,10 @@
+   /* These args would always appear unused, if not for this.  */
+   TREE_USED (copy) = 1;
+ 
++  /* The inlined variable is marked as INLINE not to sweep by propolice */
++  if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL)
++    DECL_VAR_INLINE (copy) = 1;
++
+   /* Set the context for the new declaration.  */
+   if (!DECL_CONTEXT (decl))
+     /* Globals stay global.  */
+@@ -1963,6 +1967,10 @@
+ 
+ 	      seq = get_insns ();
+ 	      end_sequence ();
++#ifdef FRAME_GROWS_DOWNWARD
++	      if (flag_propolice_protection && GET_CODE (seq) == SET)
++		RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
++#endif
+ 	      emit_insn_after (seq, map->insns_at_start);
+ 	      return temp;
+ 	    }
+diff -Naur gcc-3.3.orig/gcc/loop.c gcc-3.3.ssp/gcc/loop.c
+--- gcc-3.3.orig/gcc/loop.c	2003-04-25 00:36:00.000000000 +0000
++++ gcc-3.3.ssp/gcc/loop.c	2004-08-30 01:16:19.000000000 +0000
+@@ -6516,6 +6516,14 @@
+   if (GET_CODE (*mult_val) == USE)
+     *mult_val = XEXP (*mult_val, 0);
+ 
++#ifndef FRAME_GROWS_DOWNWARD
++  if (flag_propolice_protection
++      && GET_CODE (*add_val) == PLUS
++      && (XEXP (*add_val, 0) == frame_pointer_rtx
++	  || XEXP (*add_val, 1) == frame_pointer_rtx))
++    return 0;
++#endif
++
+   if (is_addr)
+     *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost;
+   else
+diff -Naur gcc-3.3.orig/gcc/optabs.c gcc-3.3.ssp/gcc/optabs.c
+--- gcc-3.3.orig/gcc/optabs.c	2003-02-07 19:43:50.000000000 +0000
++++ gcc-3.3.ssp/gcc/optabs.c	2004-08-30 01:16:19.000000000 +0000
+@@ -696,6 +696,26 @@
+   if (target)
+     target = protect_from_queue (target, 1);
+ 
++  if (flag_propolice_protection
++      && binoptab->code == PLUS
++      && op0 == virtual_stack_vars_rtx
++      && GET_CODE(op1) == CONST_INT)
++    {
++      int icode = (int) binoptab->handlers[(int) mode].insn_code;
++      if (target)
++	temp = target;
++      else
++	temp = gen_reg_rtx (mode);
++
++      if (! (*insn_data[icode].operand[0].predicate) (temp, mode)
++	  || GET_CODE (temp) != REG)
++	temp = gen_reg_rtx (mode);
++
++      emit_insn (gen_rtx_SET (VOIDmode, temp,
++			      gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
++      return temp;
++    }
++
+   if (flag_force_mem)
+     {
+       op0 = force_not_mem (op0);
+diff -Naur gcc-3.3.orig/gcc/protector.c gcc-3.3.ssp/gcc/protector.c
+--- gcc-3.3.orig/gcc/protector.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.3.ssp/gcc/protector.c	2004-05-11 10:43:58.000000000 +0000
+@@ -0,0 +1,2725 @@
++/* RTL buffer overflow protection function for GNU C compiler
++   Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
++
++This file is part of GCC.
++
++GCC is free software; you can redistribute it and/or modify it under
++the terms of the GNU General Public License as published by the Free
++Software Foundation; either version 2, or (at your option) any later
++version.
++
++GCC is distributed in the hope that it will be useful, but WITHOUT ANY
++WARRANTY; without even the implied warranty of MERCHANTABILITY or
++FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
++for more details.
++
++You should have received a copy of the GNU General Public License
++along with GCC; see the file COPYING.  If not, write to the Free
++Software Foundation, 59 Temple Place - Suite 330, Boston, MA
++02111-1307, USA.  */
++
++#include "config.h"
++#include "system.h"
++#include "machmode.h"
++
++#include "rtl.h"
++#include "tree.h"
++#include "regs.h"
++#include "flags.h"
++#include "insn-config.h"
++#include "insn-flags.h"
++#include "expr.h"
++#include "output.h"
++#include "recog.h"
++#include "hard-reg-set.h"
++#include "real.h"
++#include "except.h"
++#include "function.h"
++#include "toplev.h"
++#include "conditions.h"
++#include "insn-attr.h"
++#include "c-tree.h"
++#include "optabs.h"
++#include "reload.h"
++#include "protector.h"
++
++
++/* Warn when not issuing stack smashing protection for some reason */
++int warn_stack_protector;
++
++/* Round a value to the lowest integer less than it that is a multiple of
++   the required alignment.  Avoid using division in case the value is
++   negative.  Assume the alignment is a power of two.  */
++#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
++
++/* Similar, but round to the next highest integer that meets the
++   alignment.  */
++#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
++
++
++/* Nonzero means use propolice as a stack protection method */
++extern int flag_propolice_protection;
++
++/* This file contains several memory arrangement functions to protect
++   the return address and the frame pointer of the stack
++   from a stack-smashing attack. It also
++   provides the function that protects pointer variables. */
++
++/* Nonzero if function being compiled can define string buffers that may be
++   damaged by the stack-smash attack */
++static int current_function_defines_vulnerable_string;
++static int current_function_defines_short_string;
++static int current_function_has_variable_string;
++static int current_function_defines_vsized_array;
++static int current_function_is_inlinable;
++static int is_array;
++
++static rtx guard_area, _guard;
++static rtx function_first_insn, prologue_insert_point;
++
++/*  */
++static HOST_WIDE_INT sweep_frame_offset;
++static HOST_WIDE_INT push_allocated_offset = 0;
++static HOST_WIDE_INT push_frame_offset = 0;
++static int saved_cse_not_expected = 0;
++
++static int search_string_from_argsandvars PARAMS ((int caller));
++static int search_string_from_local_vars PARAMS ((tree block));
++static int search_pointer_def PARAMS ((tree names));
++static int search_func_pointer PARAMS ((tree type));
++static int check_used_flag PARAMS ((rtx x));
++static void reset_used_flags_for_insns PARAMS ((rtx insn));
++static void reset_used_flags_for_decls PARAMS ((tree block));
++static void reset_used_flags_of_plus PARAMS ((rtx x));
++static void rtl_prologue PARAMS ((rtx insn));
++static void rtl_epilogue PARAMS ((rtx fnlastinsn));
++static void arrange_var_order PARAMS ((tree blocks));
++static void copy_args_for_protection PARAMS ((void));
++static void sweep_string_variable
++	PARAMS ((rtx sweep_var, HOST_WIDE_INT var_size));
++static void sweep_string_in_decls
++	PARAMS ((tree block, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
++static void sweep_string_in_args
++	PARAMS ((tree parms, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
++static void sweep_string_use_of_insns
++	PARAMS ((rtx insn, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
++static void sweep_string_in_operand
++	PARAMS ((rtx insn, rtx *loc,
++		 HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
++static void move_arg_location
++	PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size));
++static void change_arg_use_of_insns
++	PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
++static void change_arg_use_of_insns_2
++	PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
++static void change_arg_use_in_operand
++	PARAMS ((rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size));
++static void validate_insns_of_varrefs PARAMS ((rtx insn));
++static void validate_operand_of_varrefs PARAMS ((rtx insn, rtx *loc));
++
++#ifndef SUSPICIOUS_BUF_SIZE
++#define SUSPICIOUS_BUF_SIZE 8
++#endif
++
++#define AUTO_BASEPTR(X) \
++  (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
++#define AUTO_OFFSET(X) \
++  (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
++#undef PARM_PASSED_IN_MEMORY
++#define PARM_PASSED_IN_MEMORY(PARM) \
++ (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
++#define VIRTUAL_STACK_VARS_P(X) \
++ ((X) == virtual_stack_vars_rtx || (GET_CODE (X) == REG && (X)->used))
++#define TREE_VISITED(NODE) ((NODE)->common.unused_0)
++
++
++
++void
++prepare_stack_protection (inlinable)
++     int inlinable;
++{
++  tree blocks = DECL_INITIAL (current_function_decl);
++  current_function_is_inlinable = inlinable && !flag_no_inline;
++  push_frame_offset = push_allocated_offset = 0;
++  saved_cse_not_expected = 0;
++
++  /*
++    skip the protection if the function has no block
++    or it is an inline function
++  */
++  if (current_function_is_inlinable) validate_insns_of_varrefs (get_insns ());
++  if (! blocks || current_function_is_inlinable) return;
++
++  current_function_defines_vulnerable_string
++    = search_string_from_argsandvars (0);
++
++  if (current_function_defines_vulnerable_string
++      || flag_stack_protection)
++    {
++      HOST_WIDE_INT offset;
++      function_first_insn = get_insns ();
++
++      if (current_function_contains_functions) {
++	  if (warn_stack_protector)
++             warning ("not protecting function: it contains functions");
++	  return;
++      }
++
++      /* Initialize recognition, indicating that volatile is OK.  */
++      init_recog ();
++
++      sweep_frame_offset = 0;
++	
++#ifdef STACK_GROWS_DOWNWARD
++      /*
++	frame_offset: offset to end of allocated area of stack frame.
++	 It is defined in the function.c
++      */
++
++      /* the location must be before buffers */
++      guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
++      PUT_MODE (guard_area, GUARD_m);
++      MEM_VOLATILE_P (guard_area) = 1;
++
++#ifndef FRAME_GROWS_DOWNWARD
++      sweep_frame_offset = frame_offset;
++#endif
++
++      /* For making room for guard value, scan all insns and fix the offset
++	 address of the variable that is based on frame pointer.
++	 Scan all declarations of variables and fix the offset address
++	 of the variable that is based on the frame pointer */
++      sweep_string_variable (guard_area, UNITS_PER_GUARD);
++
++	
++      /* the location of guard area moves to the beginning of stack frame */
++      if ((offset = AUTO_OFFSET(XEXP (guard_area, 0))))
++	XEXP (XEXP (guard_area, 0), 1)
++	  = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
++
++
++      /* Insert prologue rtl instructions */
++      rtl_prologue (function_first_insn);
++
++      if (! current_function_has_variable_string)
++	{
++	  /* Generate argument saving instruction */
++	  copy_args_for_protection ();
++
++#ifndef FRAME_GROWS_DOWNWARD
++	  /* If frame grows upward, character string copied from an arg
++	     stays top of the guard variable.
++	     So sweep the guard variable again */
++	  sweep_frame_offset = CEIL_ROUND (frame_offset,
++					   BIGGEST_ALIGNMENT / BITS_PER_UNIT);
++	  sweep_string_variable (guard_area, UNITS_PER_GUARD);
++#endif
++	}
++      else if (warn_stack_protector)
++	warning ("not protecting variables: it has a variable length buffer");
++#endif
++#ifndef FRAME_GROWS_DOWNWARD
++      if (STARTING_FRAME_OFFSET == 0)
++	{
++	  /* this may be only for alpha */
++	  push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++	  assign_stack_local (BLKmode, push_allocated_offset, -1);
++	  sweep_frame_offset = frame_offset;
++	  sweep_string_variable (const0_rtx, -push_allocated_offset);
++	  sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
++	}
++#endif
++
++      /* Arrange the order of local variables */
++      arrange_var_order (blocks);
++
++#ifdef STACK_GROWS_DOWNWARD
++      /* Insert epilogue rtl instructions */
++      rtl_epilogue (get_last_insn ());
++#endif
++      init_recog_no_volatile ();
++    }
++  else if (current_function_defines_short_string
++	   && warn_stack_protector)
++    warning ("not protecting function: buffer is less than %d bytes long",
++	     SUSPICIOUS_BUF_SIZE);
++}
++
++/*
++  search string from arguments and local variables
++  caller: 0 means call from protector_stack_protection
++          1 means call from push_frame
++*/
++static int
++search_string_from_argsandvars (caller)
++     int caller;
++{
++  tree blocks, parms;
++  int string_p;
++
++  /* saves a latest search result as a cached infomation */
++  static tree __latest_search_decl = 0;
++  static int  __latest_search_result = FALSE;
++
++  if (__latest_search_decl == current_function_decl)
++    return __latest_search_result;
++  else if (caller) return FALSE;
++  __latest_search_decl = current_function_decl;
++  __latest_search_result = TRUE;
++  
++  current_function_defines_short_string = FALSE;
++  current_function_has_variable_string = FALSE;
++  current_function_defines_vsized_array = FALSE;
++
++  /*
++    search a string variable from local variables
++  */
++  blocks = DECL_INITIAL (current_function_decl);
++  string_p = search_string_from_local_vars (blocks);
++
++  if (!current_function_defines_vsized_array && current_function_calls_alloca)
++    {
++      current_function_has_variable_string = TRUE;
++      return TRUE;
++    }
++
++  if (string_p) return TRUE;
++
++#ifdef STACK_GROWS_DOWNWARD
++  /*
++    search a string variable from arguments
++  */
++  parms = DECL_ARGUMENTS (current_function_decl);
++
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    string_p = search_string_def (TREE_TYPE(parms));
++	    if (string_p) return TRUE;
++	  }
++      }
++#endif
++
++  __latest_search_result = FALSE;
++  return FALSE;
++}
++
++
++static int
++search_string_from_local_vars (block)
++     tree block;
++{
++  tree types;
++  int found = FALSE;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      types = BLOCK_VARS(block);
++
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable */
++	  /* name: types.decl.name.identifier.id                   */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
++	      && TREE_CODE (types) == VAR_DECL
++	      && ! DECL_ARTIFICIAL (types)
++	      && DECL_RTL_SET_P (types)
++	      && GET_CODE (DECL_RTL (types)) == MEM
++
++	      && search_string_def (TREE_TYPE (types)))
++	    {
++	      rtx home = DECL_RTL (types);
++
++	      if (GET_CODE (home) == MEM
++		  && (GET_CODE (XEXP (home, 0)) == MEM
++		      ||
++		      (GET_CODE (XEXP (home, 0)) == REG
++		       && XEXP (home, 0) != virtual_stack_vars_rtx
++		       && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
++		       && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
++#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
++		       && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
++#endif
++		       )))
++		/* If the value is indirect by memory or by a register
++		   that isn't the frame pointer then it means the object is
++		   variable-sized and address through
++		   that register or stack slot.
++		   The protection has no way to hide pointer variables
++		   behind the array, so all we can do is staying
++		   the order of variables and arguments. */
++		{
++		  current_function_has_variable_string = TRUE;
++		}
++	    
++	      /* found character array */
++	      found = TRUE;
++	    }
++
++	  types = TREE_CHAIN(types);
++	}
++
++      if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
++	{
++	  found = TRUE;
++	}
++
++      block = BLOCK_CHAIN (block);
++    }
++    
++  return found;
++}
++
++
++/*
++ * search a character array from the specified type tree
++ */
++int
++search_string_def (type)
++     tree type;
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case ARRAY_TYPE:
++      /* Check if the array is a variable-sized array */
++      if (TYPE_DOMAIN (type) == 0
++	  || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++	      && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
++	current_function_defines_vsized_array = TRUE;
++
++      /* TREE_CODE( TREE_TYPE(type) ) == INTEGER_TYPE */
++      if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
++	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
++	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
++	{
++	  /* Check if the string is a variable string */
++	  if (TYPE_DOMAIN (type) == 0
++	      ||
++	      (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++	       && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
++	    return TRUE;
++
++#if SUSPICIOUS_BUF_SIZE > 0
++	  /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE */
++	  if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++	      &&
++	      TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
++	      >= SUSPICIOUS_BUF_SIZE)
++	    return TRUE;
++
++	  current_function_defines_short_string = TRUE;
++#else
++	  return TRUE;
++#endif
++	}
++      
++      /* to protect every functions, sweep any arrays to the frame top */
++      is_array = TRUE;
++
++      return search_string_def(TREE_TYPE(type));
++	
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++      /* Output the name, type, position (in bits), size (in bits) of each
++	 field.  */
++      for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	{
++	  /* Omit here local type decls until we know how to support them. */
++	  if ((TREE_CODE (tem) == TYPE_DECL)
++	      || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
++	    continue;
++
++	  if (search_string_def(TREE_TYPE(tem))) return TRUE;
++	}
++      break;
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++      /* I'm not sure whether OFFSET_TYPE needs this treatment,
++	 so I'll play safe and return 1.  */
++    case OFFSET_TYPE:
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++/*
++ * examine whether the input contains frame pointer addressing
++ */
++int
++contains_fp (op)
++     rtx op;
++{
++  register enum rtx_code code;
++  rtx x;
++  int i, j;
++  const char *fmt;
++
++  x = op;
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case REG:
++    case ADDRESSOF:
++      return FALSE;
++
++    case PLUS:
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && CONSTANT_P (XEXP (x, 1)))
++	return TRUE;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (contains_fp (XEXP (x, i))) return TRUE;
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	if (contains_fp (XVECEXP (x, i, j))) return TRUE;
++
++  return FALSE;
++}
++
++
++static int
++search_pointer_def (type)
++     tree type;
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++      /* Output the name, type, position (in bits), size (in bits) of each
++	 field.  */
++      for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	{
++	  /* Omit here local type decls until we know how to support them. */
++	  if ((TREE_CODE (tem) == TYPE_DECL)
++	      || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
++	    continue;
++
++	  if (search_pointer_def (TREE_TYPE(tem))) return TRUE;
++	}
++      break;
++
++    case ARRAY_TYPE:
++      return search_pointer_def (TREE_TYPE(type));
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++    case OFFSET_TYPE:
++      if (TYPE_READONLY (TREE_TYPE (type)))
++	{
++	  /* unless this pointer contains function pointer,
++	     it should be protected */
++	  return search_func_pointer (TREE_TYPE (type));
++	}
++      return TRUE;
++	
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++static int
++search_func_pointer (type)
++     tree type;
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++	if (! TREE_VISITED (type))
++	  {
++	    /* mark the type as having been visited already */
++	    TREE_VISITED (type) = 1;
++
++	    /* Output the name, type, position (in bits), size (in bits) of
++	       each field.  */
++	    for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	      {
++		if (TREE_CODE (tem) == FIELD_DECL
++		    && search_func_pointer (TREE_TYPE(tem))) {
++		  TREE_VISITED (type) = 0;
++		  return TRUE;
++		}
++	      }
++	    
++	    TREE_VISITED (type) = 0;
++	  }
++	break;
++
++    case ARRAY_TYPE:
++      return search_func_pointer (TREE_TYPE(type));
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++      /* I'm not sure whether OFFSET_TYPE needs this treatment,
++	 so I'll play safe and return 1.  */
++    case OFFSET_TYPE:
++      if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
++	return TRUE;
++      return search_func_pointer (TREE_TYPE(type));
++	
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++/*
++ * check whether the specified rtx contains PLUS rtx with used flag.
++ */
++static int
++check_used_flag (x)
++     rtx x;
++{
++  register int i, j;
++  register enum rtx_code code;
++  register const char *format_ptr;
++
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case REG:
++    case QUEUED:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++      return FALSE;
++
++    case PLUS:
++      if (x->used)
++	return TRUE;
++
++    default:
++      break;
++    }
++
++  format_ptr = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++)
++    {
++      switch (*format_ptr++)
++	{
++	case 'e':
++	  if (check_used_flag (XEXP (x, i)))
++	    return TRUE;
++	  break;
++
++	case 'E':
++	  for (j = 0; j < XVECLEN (x, i); j++)
++	    if (check_used_flag (XVECEXP (x, i, j)))
++	      return TRUE;
++	  break;
++	}
++    }
++
++  return FALSE;
++}
++
++
++static void
++reset_used_flags_for_insns (insn)
++     rtx insn;
++{
++  register int i, j;
++  register enum rtx_code code;
++  register const char *format_ptr;
++
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	code = GET_CODE (insn);
++	insn->used = 0;
++	format_ptr = GET_RTX_FORMAT (code);
++
++	for (i = 0; i < GET_RTX_LENGTH (code); i++)
++	  {
++	    switch (*format_ptr++) {
++	    case 'e':
++	      reset_used_flags_of_plus (XEXP (insn, i));
++	      break;
++			
++	    case 'E':
++	      for (j = 0; j < XVECLEN (insn, i); j++)
++		reset_used_flags_of_plus (XVECEXP (insn, i, j));
++	      break;
++	    }
++	  }
++      }
++}
++
++static void
++reset_used_flags_for_decls (block)
++     tree block;
++{
++  tree types;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      types = BLOCK_VARS(block);
++	
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable and
++	     also skip an global variable */
++	  if (! DECL_EXTERNAL (types))
++	    {
++	      if (!DECL_RTL_SET_P (types)) goto next;
++	      home = DECL_RTL (types);
++
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == PLUS
++		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++		{
++		  XEXP (home, 0)->used = 0;
++		}
++	    }
++	next:
++	  types = TREE_CHAIN(types);
++	}
++
++      reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++/* Clear the USED bits only of type PLUS in X */
++
++static void
++reset_used_flags_of_plus (x)
++     rtx x;
++{
++  register int i, j;
++  register enum rtx_code code;
++  register const char *format_ptr;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  /* These types may be freely shared so we needn't do any resetting
++     for them.  */
++
++  switch (code)
++    {
++    case REG:
++    case QUEUED:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++      return;
++
++    case INSN:
++    case JUMP_INSN:
++    case CALL_INSN:
++    case NOTE:
++    case LABEL_REF:
++    case BARRIER:
++      /* The chain of insns is not being copied.  */
++      return;
++      
++    case PLUS:
++      x->used = 0;
++      break;
++
++    case CALL_PLACEHOLDER:
++      reset_used_flags_for_insns (XEXP (x, 0));
++      reset_used_flags_for_insns (XEXP (x, 1));
++      reset_used_flags_for_insns (XEXP (x, 2));
++      break;
++
++    default:
++      break;
++    }
++
++  format_ptr = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++)
++    {
++      switch (*format_ptr++)
++	{
++	case 'e':
++	  reset_used_flags_of_plus (XEXP (x, i));
++	  break;
++
++	case 'E':
++	  for (j = 0; j < XVECLEN (x, i); j++)
++	    reset_used_flags_of_plus (XVECEXP (x, i, j));
++	  break;
++	}
++    }
++}
++
++
++static void
++rtl_prologue (insn)
++     rtx insn;
++{
++#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
++#undef HAS_INIT_SECTION
++#define HAS_INIT_SECTION
++#endif
++
++  rtx _val;
++
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == NOTE
++	&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
++      break;
++  
++#if !defined (HAS_INIT_SECTION)
++  /* If this function is `main', skip a call to `__main'
++     to run guard instruments after global initializers, etc.  */
++  if (DECL_NAME (current_function_decl)
++      && MAIN_NAME_P (DECL_NAME (current_function_decl))
++      && DECL_CONTEXT (current_function_decl) == NULL_TREE)
++    {
++      rtx fbinsn = insn;
++      for (; insn; insn = NEXT_INSN (insn))
++	if (GET_CODE (insn) == NOTE
++	    && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
++	  break;
++      if (insn == 0) insn = fbinsn;
++    }
++#endif
++
++  /* mark the next insn of FUNCTION_BEG insn */
++  prologue_insert_point = NEXT_INSN (insn);
++		
++  start_sequence ();
++
++  _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
++  emit_move_insn ( guard_area, _guard);
++
++  _val = get_insns ();
++  end_sequence ();
++
++  emit_insn_before (_val, prologue_insert_point);
++}
++
++static void
++rtl_epilogue (insn)
++     rtx insn;
++{
++  rtx if_false_label;
++  rtx _val;
++  rtx funcname;
++  tree funcstr;
++  int  flag_have_return = FALSE;
++		
++  start_sequence ();
++
++#ifdef HAVE_return
++  if (HAVE_return)
++    {
++      rtx insn;
++      return_label = gen_label_rtx ();
++      
++      for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
++	if (GET_CODE (insn) == JUMP_INSN
++	    && GET_CODE (PATTERN (insn)) == RETURN
++	    && GET_MODE (PATTERN (insn)) == VOIDmode)
++	  {
++	    rtx pat = gen_rtx_SET (VOIDmode,
++				   pc_rtx,
++				   gen_rtx_LABEL_REF (VOIDmode,
++						      return_label));
++	    PATTERN (insn) = pat;
++	    flag_have_return = TRUE;
++	  }
++
++
++      emit_label (return_label);
++    }
++#endif
++
++  /*                                          if (guard_area != _guard) */
++  compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
++
++  if_false_label = gen_label_rtx ();		/* { */
++  emit_jump_insn ( gen_beq(if_false_label));
++
++  /* generate string for the current function name */
++  funcstr = build_string (strlen(current_function_name)+1,
++			  current_function_name);
++  TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
++  funcname = output_constant_def (funcstr, 1);
++
++  emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__stack_smash_handler"),
++		     0, VOIDmode, 2,
++                     XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
++
++  /* generate RTL to return from the current function */
++		
++  emit_barrier ();				/* } */
++  emit_label (if_false_label);
++
++  /* generate RTL to return from the current function */
++  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
++    use_return_register ();
++
++#ifdef HAVE_return
++  if (HAVE_return && flag_have_return)
++    {
++      emit_jump_insn (gen_return ());
++      emit_barrier ();
++    }
++#endif
++  
++  _val = get_insns ();
++  end_sequence ();
++
++  emit_insn_after (_val, insn);
++}
++
++
++static void
++arrange_var_order (block)
++     tree block;
++{
++  tree types;
++  HOST_WIDE_INT offset;
++    
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      /* arrange the location of character arrays in depth first.  */
++      arrange_var_order (BLOCK_SUBBLOCKS (block));
++
++      types = BLOCK_VARS (block);
++
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable */
++	  /* name: types.decl.assembler_name.id			   */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
++	      && TREE_CODE (types) == VAR_DECL
++	      && ! DECL_ARTIFICIAL (types)
++	      && ! DECL_VAR_INLINE (types)	/* don't sweep inlined string.  */
++	      && DECL_RTL_SET_P (types)
++	      && GET_CODE (DECL_RTL (types)) == MEM
++	      && GET_MODE (DECL_RTL (types)) == BLKmode
++
++	      && (is_array=0, search_string_def (TREE_TYPE (types))
++		  || (! current_function_defines_vulnerable_string
++		      && is_array)))
++	    {
++	      rtx home = DECL_RTL (types);
++
++	      if (!(GET_CODE (home) == MEM
++		    && (GET_CODE (XEXP (home, 0)) == MEM
++			||
++			(GET_CODE (XEXP (home, 0)) == REG
++			 && XEXP (home, 0) != virtual_stack_vars_rtx
++			 && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
++			 && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
++#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
++			 && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
++#endif
++			 ))))
++		{
++		  /* found a string variable */
++		  HOST_WIDE_INT var_size =
++		    ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
++		     / BITS_PER_UNIT);
++
++		  /* confirmed it is BLKmode.  */
++		  int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++		  var_size = CEIL_ROUND (var_size, alignment);
++
++		  /* skip the variable if it is top of the region
++		     specified by sweep_frame_offset */
++		  offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
++		  if (offset == sweep_frame_offset - var_size)
++		    sweep_frame_offset -= var_size;
++		      
++		  else if (offset < sweep_frame_offset - var_size)
++		    sweep_string_variable (DECL_RTL (types), var_size);
++		}
++	    }
++
++	  types = TREE_CHAIN(types);
++	}
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++static void
++copy_args_for_protection ()
++{
++  tree parms = DECL_ARGUMENTS (current_function_decl);
++  rtx temp_rtx;
++
++  parms = DECL_ARGUMENTS (current_function_decl);
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    int string_p;
++
++	    /*
++	      skip arguemnt protection if the last argument is used
++	      for the variable argument
++	    */
++	    /*
++	      tree fntype;
++	      if (TREE_CHAIN (parms) == 0)
++	      {
++	        fntype = TREE_TYPE (current_function_decl);
++
++	        if ((TYPE_ARG_TYPES (fntype) != 0
++		     && TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) 
++		          != void_type_node)
++	             || current_function_varargs)
++	          continue;
++	      }
++	    */
++
++	    string_p = search_string_def (TREE_TYPE(parms));
++
++	    /* check if it is a candidate to move */
++	    if (string_p || search_pointer_def (TREE_TYPE (parms)))
++	      {
++		int arg_size
++		  = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
++		     / BITS_PER_UNIT);
++		tree passed_type = DECL_ARG_TYPE (parms);
++		tree nominal_type = TREE_TYPE (parms);
++		
++		start_sequence ();
++
++		if (GET_CODE (DECL_RTL (parms)) == REG)
++		  {
++		    rtx safe = 0;
++		    
++		    change_arg_use_of_insns (prologue_insert_point,
++					     DECL_RTL (parms), &safe, 0);
++		    if (safe)
++		      {
++			/* generate codes for copying the content */
++			rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
++		    
++			/* avoid register elimination in gcse.c (COPY-PROP)*/
++			PATTERN (movinsn)->volatil = 1;
++			
++			/* save debugger info */
++			DECL_INCOMING_RTL (parms) = safe;
++		      }
++		  }
++		else if (GET_CODE (DECL_RTL (parms)) == MEM
++			 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
++		  {
++		    rtx movinsn;
++		    rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
++
++		    /* generate codes for copying the content */
++		    movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
++		    /* avoid register elimination in gcse.c (COPY-PROP)*/
++		    PATTERN (movinsn)->volatil = 1;
++
++		    /* change the addressof information to the newly
++		       allocated pseudo register */
++		    emit_move_insn (DECL_RTL (parms), safe);
++
++		    /* save debugger info */
++		    DECL_INCOMING_RTL (parms) = safe;
++		  }
++			
++		/* See if the frontend wants to pass this by invisible
++		   reference.  */
++		else if (passed_type != nominal_type
++			 && POINTER_TYPE_P (passed_type)
++			 && TREE_TYPE (passed_type) == nominal_type)
++		  {
++		    rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
++
++		    change_arg_use_of_insns (prologue_insert_point,
++					     orig, &safe, 0);
++		    if (safe)
++		      {
++			/* generate codes for copying the content */
++			rtx movinsn = emit_move_insn (safe, orig);
++		    
++			/* avoid register elimination in gcse.c (COPY-PROP)*/
++			PATTERN (movinsn)->volatil = 1;
++			
++			/* save debugger info */
++			DECL_INCOMING_RTL (parms) = safe;
++		      }
++		  }
++
++		else
++		  {
++		    /* declare temporary local variable DECL_NAME (parms) */
++		    temp_rtx
++		      = assign_stack_local (DECL_MODE (parms), arg_size,
++					    DECL_MODE (parms) == BLKmode ?
++					    -1 : 0);
++		    
++		    MEM_IN_STRUCT_P (temp_rtx)
++		      = AGGREGATE_TYPE_P (TREE_TYPE (parms));
++		    set_mem_alias_set (temp_rtx, get_alias_set (parms));
++
++		    /* move_arg_location may change the contents of
++		       DECL_RTL (parms). to avoid this, copies the contents */
++		    /* SET_DECL_RTL (parms, copy_rtx (DECL_RTL (parms))); */
++
++		    /* generate codes for copying the content */
++		    store_expr (parms, temp_rtx, 0);
++
++		    /* change the reference for each instructions */
++		    move_arg_location (prologue_insert_point, DECL_RTL (parms),
++				       temp_rtx, arg_size);
++
++		    /* change the location of parms variable */
++		    SET_DECL_RTL (parms, temp_rtx);
++
++		    /* change debugger info */
++		    DECL_INCOMING_RTL (parms) = temp_rtx;
++		  }
++
++		emit_insn_before (get_insns (), prologue_insert_point);
++		end_sequence ();
++
++#ifdef FRAME_GROWS_DOWNWARD
++		/* process the string argument */
++		if (string_p && DECL_MODE (parms) == BLKmode)
++		  {
++		    int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++		    arg_size = CEIL_ROUND (arg_size, alignment);
++			
++		    /* change the reference for each instructions */
++		    sweep_string_variable (DECL_RTL (parms), arg_size);
++		  }
++#endif
++	      }
++	  }
++      }
++}
++
++
++/*
++  sweep a string variable to the local variable addressed
++  by sweep_frame_offset, that is a last position of string variables.
++*/
++static void
++sweep_string_variable (sweep_var, var_size)
++     rtx sweep_var;
++     HOST_WIDE_INT var_size;
++{
++  HOST_WIDE_INT sweep_offset;
++
++  switch (GET_CODE (sweep_var))
++    {
++    case MEM:
++      if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
++	  && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
++	return;
++      sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
++      break;
++    case CONST_INT:
++      sweep_offset = INTVAL (sweep_var);
++      break;
++    default:
++      abort ();
++    }
++
++  /* scan all declarations of variables and fix the offset address of
++     the variable based on the frame pointer */
++  sweep_string_in_decls (DECL_INITIAL (current_function_decl),
++			 sweep_offset, var_size);
++
++  /* scan all argument variable and fix the offset address based on
++     the frame pointer */
++  sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
++			sweep_offset, var_size);
++
++  /* For making room for sweep variable, scan all insns and
++     fix the offset address of the variable that is based on frame pointer */
++  sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
++
++
++  /* Clear all the USED bits in operands of all insns and declarations of
++     local vars */
++  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
++  reset_used_flags_for_insns (function_first_insn);
++
++  sweep_frame_offset -= var_size;
++}
++
++
++
++/*
++  move an argument to the local variable addressed by frame_offset
++*/
++static void
++move_arg_location (insn, orig, new, var_size)
++     rtx  insn, orig, new;
++     HOST_WIDE_INT var_size;
++{
++  /* For making room for sweep variable, scan all insns and
++     fix the offset address of the variable that is based on frame pointer */
++  change_arg_use_of_insns (insn, orig, &new, var_size);
++
++
++  /* Clear all the USED bits in operands of all insns and declarations
++     of local vars */
++  reset_used_flags_for_insns (insn);
++}
++
++
++static void
++sweep_string_in_decls (block, sweep_offset, sweep_size)
++     tree block;
++     HOST_WIDE_INT sweep_offset, sweep_size;
++{
++  tree types;
++  HOST_WIDE_INT offset;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      types = BLOCK_VARS(block);
++	
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable and
++	     also skip an global variable */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
++	    
++	    if (!DECL_RTL_SET_P (types)) goto next;
++	    home = DECL_RTL (types);
++
++	    /* process for static local variable */
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
++	      goto next;
++
++	    if (GET_CODE (home) == MEM
++		&& XEXP (home, 0) == virtual_stack_vars_rtx)
++	      {
++		offset = 0;
++		
++		/* the operand related to the sweep variable */
++		if (sweep_offset <= offset
++		    && offset < sweep_offset + sweep_size)
++		  {
++		    offset = sweep_frame_offset - sweep_size - sweep_offset;
++
++		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
++						    offset);
++		    XEXP (home, 0)->used = 1;
++		  }
++		else if (sweep_offset <= offset
++			 && offset < sweep_frame_offset)
++		  {
++		    /* the rest of variables under sweep_frame_offset,
++		       shift the location */
++		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
++						    -sweep_size);
++		    XEXP (home, 0)->used = 1;
++		  }
++	      }
++		
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == MEM)
++	      {
++		/* process for dynamically allocated aray */
++		home = XEXP (home, 0);
++	      }
++		
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == PLUS
++		&& XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
++		&& GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++	      {
++		if (! XEXP (home, 0)->used)
++		  {
++		    offset = AUTO_OFFSET(XEXP (home, 0));
++
++		    /* the operand related to the sweep variable */
++		    if (sweep_offset <= offset
++			&& offset < sweep_offset + sweep_size)
++		      {
++
++			offset
++			  += sweep_frame_offset - sweep_size - sweep_offset;
++			XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								      offset);
++
++			/* mark */
++			XEXP (home, 0)->used = 1;
++		      }
++		    else if (sweep_offset <= offset
++			     && offset < sweep_frame_offset)
++		      {	/* the rest of variables under sweep_frame_offset,
++			   so shift the location */
++
++			XEXP (XEXP (home, 0), 1)
++			  = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++
++			/* mark */
++			XEXP (home, 0)->used = 1;
++		      }
++		  }
++	      }
++
++	  }
++	next:
++	  types = TREE_CHAIN(types);
++	}
++
++      sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
++			     sweep_offset, sweep_size);
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++static void
++sweep_string_in_args (parms, sweep_offset, sweep_size)
++     tree parms;
++     HOST_WIDE_INT sweep_offset, sweep_size;
++{
++  rtx home;
++  HOST_WIDE_INT offset;
++    
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    home = DECL_INCOMING_RTL (parms);
++
++	    if (XEXP (home, 0)->used) continue;
++
++	    offset = AUTO_OFFSET(XEXP (home, 0));
++
++	    /* the operand related to the sweep variable */
++	    if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
++	      {
++		if (sweep_offset <= offset
++		    && offset < sweep_offset + sweep_size)
++		  {
++		    offset += sweep_frame_offset - sweep_size - sweep_offset;
++		    XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								  offset);
++
++		    /* mark */
++		    XEXP (home, 0)->used = 1;
++		  }
++		else if (sweep_offset <= offset
++			 && offset < sweep_frame_offset)
++		  {
++		    /* the rest of variables under sweep_frame_offset,
++		       shift the location */
++		    XEXP (XEXP (home, 0), 1)
++		      = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++
++		    /* mark */
++		    XEXP (home, 0)->used = 1;
++		  }
++	      }
++	  }
++      }
++}
++
++
++static int has_virtual_reg;
++
++static void
++sweep_string_use_of_insns (insn, sweep_offset, sweep_size)
++     rtx insn;
++     HOST_WIDE_INT sweep_offset, sweep_size;
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	has_virtual_reg = FALSE;
++	sweep_string_in_operand (insn, &PATTERN (insn),
++				 sweep_offset, sweep_size);
++	sweep_string_in_operand (insn, &REG_NOTES (insn),
++				 sweep_offset, sweep_size);
++      }
++}
++
++
++static void
++sweep_string_in_operand (insn, loc, sweep_offset, sweep_size)
++     rtx insn, *loc;
++     HOST_WIDE_INT sweep_offset, sweep_size;
++{
++  register rtx x = *loc;
++  register enum rtx_code code;
++  int i, j, k = 0;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case ADDRESSOF:
++      return;
++	    
++    case REG:
++      if (x == virtual_incoming_args_rtx
++	  || x == virtual_stack_vars_rtx
++	  || x == virtual_stack_dynamic_rtx
++	  || x == virtual_outgoing_args_rtx
++	  || x == virtual_cfa_rtx)
++	has_virtual_reg = TRUE;
++      return;
++      
++    case SET:
++      /*
++	skip setjmp setup insn and setjmp restore insn
++	Example:
++	(set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
++	(set (virtual_stack_vars_rtx) (REG))
++      */
++      if (GET_CODE (XEXP (x, 0)) == MEM
++	  && XEXP (x, 1) == virtual_stack_vars_rtx)
++	return;
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == REG)
++	return;
++      break;
++	    
++    case PLUS:
++      /* Handle typical case of frame register plus constant.  */
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && CONSTANT_P (XEXP (x, 1)))
++	{
++	  if (x->used) goto single_use_of_virtual_reg;
++	  
++	  offset = AUTO_OFFSET(x);
++	  if (RTX_INTEGRATED_P (x)) k = -1; /* for inline base ptr */
++
++	  /* the operand related to the sweep variable */
++	  if (sweep_offset <= offset + k
++	      && offset + k < sweep_offset + sweep_size)
++	    {
++	      offset += sweep_frame_offset - sweep_size - sweep_offset;
++
++	      XEXP (x, 0) = virtual_stack_vars_rtx;
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	      x->used = 1;
++	    }
++	  else if (sweep_offset <= offset + k
++		   && offset + k < sweep_frame_offset)
++	    {
++	      /* the rest of variables under sweep_frame_offset,
++		 shift the location */
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++	      x->used = 1;
++	    }
++	  
++	single_use_of_virtual_reg:
++	  if (has_virtual_reg) {
++	    /* excerpt from insn_invalid_p in recog.c */
++	    int icode = recog_memoized (insn);
++
++	    if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
++	      {
++		rtx temp, seq;
++		
++		start_sequence ();
++		temp = force_operand (x, NULL_RTX);
++		seq = get_insns ();
++		end_sequence ();
++		
++		emit_insn_before (seq, insn);
++		if (! validate_change (insn, loc, temp, 0)
++		    && ! validate_replace_rtx (x, temp, insn))
++		  fatal_insn ("sweep_string_in_operand", insn);
++	      }
++	  }
++
++	  has_virtual_reg = TRUE;
++	  return;
++	}
++
++#ifdef FRAME_GROWS_DOWNWARD
++      /*
++	alert the case of frame register plus constant given by reg.
++	*/
++      else if (XEXP (x, 0) == virtual_stack_vars_rtx
++	       && GET_CODE (XEXP (x, 1)) == REG)
++	fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
++#endif
++
++      /*
++	process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      sweep_string_use_of_insns (XEXP (x, i),
++					 sweep_offset, sweep_size);
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	/*
++	  virtual_stack_vars_rtx without offset
++	  Example:
++	    (set (reg:SI xx) (reg:SI 78))
++	    (set (reg:SI xx) (MEM (reg:SI 78)))
++	*/
++	if (XEXP (x, i) == virtual_stack_vars_rtx)
++	  fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
++	sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
++}   
++
++
++/*
++  change a argument variable to the local variable addressed
++  by the "new" variable.
++*/
++static void
++change_arg_use_of_insns (insn, orig, new, size)
++     rtx insn, orig, *new;
++     HOST_WIDE_INT size;
++{
++  change_arg_use_of_insns_2 (insn, orig, new, size);
++}
++
++static void
++change_arg_use_of_insns_2 (insn, orig, new, size)
++     rtx insn, orig, *new;
++     HOST_WIDE_INT size;
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	rtx seq;
++	
++	start_sequence ();
++	change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
++
++	seq = get_insns ();
++	end_sequence ();
++	emit_insn_before (seq, insn);
++
++	/* load_multiple insn from virtual_incoming_args_rtx have several
++	   load insns. If every insn change the load address of arg
++	   to frame region, those insns are moved before the PARALLEL insn
++	   and remove the PARALLEL insn.  */
++	if (GET_CODE (PATTERN (insn)) == PARALLEL
++	    && XVECLEN (PATTERN (insn), 0) == 0)
++	  delete_insn (insn);
++      }
++}
++
++
++
++static void
++change_arg_use_in_operand (insn, x, orig, new, size)
++     rtx insn, x, orig, *new;
++     HOST_WIDE_INT size;
++{
++  register enum rtx_code code;
++  int i, j;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return;
++
++    case MEM:
++      /* Handle special case of MEM (incoming_args)  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 0) == virtual_incoming_args_rtx)
++	{
++	  offset = 0;
++
++	  /* the operand related to the sweep variable */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
++
++	    offset = AUTO_OFFSET(XEXP (*new, 0))
++	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
++
++	    XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
++	    XEXP (x, 0)->used = 1;
++
++	    return;
++	  }
++	}
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 0) == virtual_incoming_args_rtx
++	  && CONSTANT_P (XEXP (x, 1))
++	  && ! x->used)
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  /* the operand related to the sweep variable */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
++
++	    offset = AUTO_OFFSET(XEXP (*new, 0))
++	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
++
++	    XEXP (x, 0) = virtual_stack_vars_rtx;
++	    XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	    x->used = 1;
++
++	    return;
++	  }
++
++	  /*
++	    process further subtree:
++	    Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	    (const_int 5))
++	  */
++	}
++      break;
++
++    case SET:
++      /* Handle special case of "set (REG or MEM) (incoming_args)".
++	 It means that the the address of the 1st argument is stored. */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 1) == virtual_incoming_args_rtx)
++	{
++	  offset = 0;
++
++	  /* the operand related to the sweep variable */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
++
++	    offset = AUTO_OFFSET(XEXP (*new, 0))
++	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
++
++	    XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
++							offset), NULL_RTX);
++	    XEXP (x, 1)->used = 1;
++
++	    return;
++	  }
++	}
++      break;
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      change_arg_use_of_insns_2 (XEXP (x, i), orig, new, size);
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    case PARALLEL:
++      for (j = 0; j < XVECLEN (x, 0); j++)
++	{
++	  change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
++	}
++      if (recog_memoized (insn) < 0)
++	{
++	  for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
++	    {
++	      /* if parallel insn has a insn used virtual_incoming_args_rtx,
++		 the insn is removed from this PARALLEL insn.  */
++	      if (check_used_flag (XVECEXP (x, 0, j)))
++		{
++		  emit_insn (XVECEXP (x, 0, j));
++		  XVECEXP (x, 0, j) = NULL;
++		}
++	      else
++		XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
++	    }
++	  PUT_NUM_ELEM (XVEC (x, 0), i);
++	}
++      return;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (XEXP (x, i) == orig)
++	  {
++	    if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
++	    XEXP (x, i) = *new;
++	    continue;
++	  }
++	change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	{
++
++	  if (XVECEXP (x, i, j) == orig)
++	    {
++	      if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
++	      XVECEXP (x, i, j) = *new;
++	      continue;
++	    }
++	  change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
++	}
++}   
++
++
++static void
++validate_insns_of_varrefs (insn)
++     rtx insn;
++{
++  rtx next;
++
++  /* Initialize recognition, indicating that volatile is OK.  */
++  init_recog ();
++
++  for (; insn; insn = next)
++    {
++      next = NEXT_INSN (insn);
++      if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	  || GET_CODE (insn) == CALL_INSN)
++	{
++	  /* excerpt from insn_invalid_p in recog.c */
++	  int icode = recog_memoized (insn);
++
++	  if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
++	    validate_operand_of_varrefs (insn, &PATTERN (insn));
++	}
++    }
++
++  init_recog_no_volatile ();
++}
++
++
++static void
++validate_operand_of_varrefs (insn, loc)
++     rtx insn, *loc;
++{
++  register enum rtx_code code;
++  rtx x, temp, seq;
++  int i, j;
++  const char *fmt;
++
++  x = *loc;
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case USE:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return;
++
++    case PLUS:
++      /* validate insn of frame register plus constant.  */
++      if (GET_CODE (x) == PLUS
++	  && XEXP (x, 0) == virtual_stack_vars_rtx
++	  && CONSTANT_P (XEXP (x, 1)))
++	{
++	  start_sequence ();
++
++	  { /* excerpt from expand_binop in optabs.c */
++	    optab binoptab = add_optab;
++	    enum machine_mode mode = GET_MODE (x);
++	    int icode = (int) binoptab->handlers[(int) mode].insn_code;
++	    enum machine_mode mode1 = insn_data[icode].operand[2].mode;
++	    rtx pat;
++	    rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
++	    temp = gen_reg_rtx (mode);
++
++	    /* Now, if insn's predicates don't allow offset operands,
++	       put them into pseudo regs.  */
++
++	    if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
++		&& mode1 != VOIDmode)
++	      xop1 = copy_to_mode_reg (mode1, xop1);
++
++	    pat = GEN_FCN (icode) (temp, xop0, xop1);
++	    if (pat)
++	      emit_insn (pat);
++	  }	      
++	  seq = get_insns ();
++	  end_sequence ();
++	  
++	  emit_insn_before (seq, insn);
++	  if (! validate_change (insn, loc, temp, 0))
++	    abort ();
++	  return;
++	}
++	break;
++      
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      validate_insns_of_varrefs (XEXP (x, i));
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      validate_operand_of_varrefs (insn, &XEXP (x, i));
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
++}
++
++
++
++/* Return size that is not allocated for stack frame. It will be allocated
++   to modify the home of pseudo registers called from global_alloc.  */
++
++HOST_WIDE_INT
++get_frame_free_size ()
++{
++  if (! flag_propolice_protection)
++    return 0;
++
++  return push_allocated_offset - push_frame_offset;
++}
++
++
++/*
++  The following codes are invoked after the instantiation of pseuso registers.
++
++  Reorder local variables to place a peudo register after buffers to avoid
++  the corruption of local variables that could be used to further corrupt
++  arbitrary memory locations.
++*/
++#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
++static void push_frame
++	PARAMS ((HOST_WIDE_INT var_size, HOST_WIDE_INT boundary));
++static void push_frame_in_decls
++	PARAMS ((tree block, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_in_args
++	PARAMS ((tree parms, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_of_insns
++	PARAMS ((rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_in_operand
++	PARAMS ((rtx insn, rtx orig,
++		 HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_of_reg_equiv_memory_loc
++	PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void push_frame_of_reg_equiv_constant
++	PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
++static void reset_used_flags_for_push_frame PARAMS ((void));
++static int check_out_of_frame_access
++	PARAMS ((rtx insn, HOST_WIDE_INT boundary));
++static int check_out_of_frame_access_in_operand
++	PARAMS ((rtx, HOST_WIDE_INT boundary));
++#endif
++
++rtx
++assign_stack_local_for_pseudo_reg (mode, size, align)
++     enum machine_mode mode;
++     HOST_WIDE_INT size;
++     int align;
++{
++#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
++  return assign_stack_local (mode, size, align);
++#else
++  tree blocks = DECL_INITIAL (current_function_decl);
++  rtx new;
++  HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
++  int first_call_from_purge_addressof, first_call_from_global_alloc;
++
++  if (! flag_propolice_protection
++      || size == 0
++      || ! blocks
++      || current_function_is_inlinable
++      || ! search_string_from_argsandvars (1)
++      || current_function_contains_functions)
++    return assign_stack_local (mode, size, align);
++
++  first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
++  first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
++  saved_cse_not_expected = cse_not_expected;
++
++  starting_frame = (STARTING_FRAME_OFFSET)?
++    STARTING_FRAME_OFFSET:BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++  units_per_push = MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT,
++		       GET_MODE_SIZE (mode));
++    
++  if (first_call_from_purge_addressof)
++    {
++      push_frame_offset = push_allocated_offset;
++      if (check_out_of_frame_access (get_insns (), starting_frame))
++	{
++	  /* if there is an access beyond frame, push dummy region to seperate
++	     the address of instantiated variables */
++	  push_frame (GET_MODE_SIZE (DImode), 0);
++	  assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
++	}
++    }
++
++  if (first_call_from_global_alloc)
++    {
++      push_frame_offset = push_allocated_offset = 0;
++      if (check_out_of_frame_access (get_insns (), starting_frame))
++	{
++	  if (STARTING_FRAME_OFFSET)
++	    {
++	      /* if there is an access beyond frame, push dummy region 
++		 to seperate the address of instantiated variables */
++	      push_frame (GET_MODE_SIZE (DImode), 0);
++	      assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
++	    }
++	  else
++	    push_allocated_offset = starting_frame;
++	}
++    }
++
++  saved_frame_offset = frame_offset;
++  frame_offset = push_frame_offset;
++
++  new = assign_stack_local (mode, size, align);
++
++  push_frame_offset = frame_offset;
++  frame_offset = saved_frame_offset;
++  
++  if (push_frame_offset > push_allocated_offset)
++    {
++      push_frame (units_per_push,
++		  push_allocated_offset + STARTING_FRAME_OFFSET);
++
++      assign_stack_local (BLKmode, units_per_push, -1);
++      push_allocated_offset += units_per_push;
++    }
++
++  /* At the second call from global alloc, alpha push frame and assign
++     a local variable to the top of the stack */
++  if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
++    push_frame_offset = push_allocated_offset = 0;
++
++  return new;
++#endif
++}
++
++
++#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
++/*
++  push frame infomation for instantiating pseudo register at the top of stack.
++  This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is 
++  not defined.
++
++  It is called by purge_addressof function and global_alloc (or reload)
++  function.
++*/
++static void
++push_frame (var_size, boundary)
++     HOST_WIDE_INT var_size, boundary;
++{
++  reset_used_flags_for_push_frame();
++
++  /* scan all declarations of variables and fix the offset address of
++     the variable based on the frame pointer */
++  push_frame_in_decls (DECL_INITIAL (current_function_decl),
++		       var_size, boundary);
++
++  /* scan all argument variable and fix the offset address based on
++     the frame pointer */
++  push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
++		      var_size, boundary);
++
++  /* scan all operands of all insns and fix the offset address
++     based on the frame pointer */
++  push_frame_of_insns (get_insns (), var_size, boundary);
++
++  /* scan all reg_equiv_memory_loc and reg_equiv_constant*/
++  push_frame_of_reg_equiv_memory_loc (var_size, boundary);
++  push_frame_of_reg_equiv_constant (var_size, boundary);
++
++  reset_used_flags_for_push_frame();
++}
++
++static void
++reset_used_flags_for_push_frame()
++{
++  int i;
++  extern rtx *reg_equiv_memory_loc;
++  extern rtx *reg_equiv_constant;
++
++  /* Clear all the USED bits in operands of all insns and declarations of
++     local vars */
++  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
++  reset_used_flags_for_insns (get_insns ());
++
++
++  /* The following codes are processed if the push_frame is called from 
++     global_alloc (or reload) function */
++  if (reg_equiv_memory_loc == 0) return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_memory_loc[i])
++      {
++	rtx x = reg_equiv_memory_loc[i];
++
++	if (GET_CODE (x) == MEM
++	    && GET_CODE (XEXP (x, 0)) == PLUS
++	    && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
++	  {
++	    /* reset */
++	    XEXP (x, 0)->used = 0;
++	  }
++      }
++
++  
++  if (reg_equiv_constant == 0) return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_constant[i])
++      {
++	rtx x = reg_equiv_constant[i];
++
++	if (GET_CODE (x) == PLUS
++	    && AUTO_BASEPTR (x) == frame_pointer_rtx)
++	  {
++	    /* reset */
++	    x->used = 0;
++	  }
++      }
++}
++
++static void
++push_frame_in_decls (block, push_size, boundary)
++     tree block;
++     HOST_WIDE_INT push_size, boundary;
++{
++  tree types;
++  HOST_WIDE_INT offset;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      types = BLOCK_VARS(block);
++	
++      while (types)
++	{
++	  /* skip the declaration that refers an external variable and
++	     also skip an global variable */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
++	    {
++	    
++	      if (!DECL_RTL_SET_P (types)) goto next;
++	      home = DECL_RTL (types);
++
++	      /* process for static local variable */
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
++		goto next;
++
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == REG)
++		{
++		  if (XEXP (home, 0) != frame_pointer_rtx
++		      || boundary != 0)
++		    goto next;
++
++		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
++						  push_size);
++
++		  /* mark */
++		  XEXP (home, 0)->used = 1;
++		}
++		
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == MEM)
++		{
++
++		  /* process for dynamically allocated aray */
++		  home = XEXP (home, 0);
++		}
++		
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == PLUS
++		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++		{
++		  offset = AUTO_OFFSET(XEXP (home, 0));
++
++		  if (! XEXP (home, 0)->used
++		      && offset >= boundary)
++		    {
++		      offset += push_size;
++		      XEXP (XEXP (home, 0), 1)
++			= gen_rtx_CONST_INT (VOIDmode, offset);
++		      
++		      /* mark */
++		      XEXP (home, 0)->used = 1;
++		    }
++		}
++
++	    }
++	next:
++	  types = TREE_CHAIN(types);
++	}
++
++      push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++static void
++push_frame_in_args (parms, push_size, boundary)
++     tree parms;
++     HOST_WIDE_INT push_size, boundary;
++{
++  rtx home;
++  HOST_WIDE_INT offset;
++    
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    home = DECL_INCOMING_RTL (parms);
++	    offset = AUTO_OFFSET(XEXP (home, 0));
++
++	    if (XEXP (home, 0)->used || offset < boundary) continue;
++
++	    /* the operand related to the sweep variable */
++	    if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
++	      {
++		if (XEXP (home, 0) == frame_pointer_rtx)
++		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
++						  push_size);
++		else {
++		  offset += push_size;
++		  XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								offset);
++		}
++
++		/* mark */
++		XEXP (home, 0)->used = 1;
++	      }
++	  }
++      }
++}
++
++
++static int insn_pushed;
++static int *fp_equiv = 0;
++
++static void
++push_frame_of_insns (insn, push_size, boundary)
++     rtx insn;
++     HOST_WIDE_INT push_size, boundary;
++{
++  /* init fp_equiv */
++  fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
++		
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	rtx last;
++	
++	insn_pushed = FALSE;
++
++	/* push frame in INSN operation */
++	push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
++
++	/* push frame in NOTE */
++	push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
++
++	/* push frame in CALL EXPR_LIST */
++	if (GET_CODE (insn) == CALL_INSN)
++	  push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
++				 push_size, boundary);
++
++	if (insn_pushed
++	    && (last = try_split (PATTERN (insn), insn, 1)) != insn)
++	  {
++	    rtx first = NEXT_INSN (insn);
++	    rtx trial = NEXT_INSN (first);
++	    rtx pattern = PATTERN (trial);
++	    rtx set;
++
++	    /* update REG_EQUIV info to the first splitted insn */
++	    if ((set = single_set (insn))
++		&& find_reg_note (insn, REG_EQUIV, SET_SRC (set))
++		&& GET_CODE (PATTERN (first)) == SET)
++	      {
++		REG_NOTES (first)
++		  = gen_rtx_EXPR_LIST (REG_EQUIV,
++				       SET_SRC (PATTERN (first)),
++				       REG_NOTES (first));
++	      }
++
++	    /* copy the first insn of splitted insns to the original insn and
++	       delete the first insn,
++	       because the original insn is pointed from records:
++	       insn_chain, reg_equiv_init, used for global_alloc.  */
++	    if (cse_not_expected)
++	      {
++		add_insn_before (insn, first);
++		
++		/* Copy the various flags, and other information.  */
++		memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
++		PATTERN (insn) = PATTERN (first);
++		REG_NOTES (insn) = REG_NOTES (first);
++
++		/* then remove the first insn of splitted insns.  */
++		remove_insn (first);
++		INSN_DELETED_P (first) = 1;
++	      }
++
++	    if (GET_CODE (pattern) == SET
++		&& GET_CODE (XEXP (pattern, 0)) == REG
++		&& GET_CODE (XEXP (pattern, 1)) == PLUS
++		&& XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
++		&& CONSTANT_P (XEXP (XEXP (pattern, 1), 1)))
++	      {
++		rtx offset = XEXP (XEXP (pattern, 1), 1);
++		fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
++
++		delete_insn (trial);
++	      }
++
++	    insn = last;
++	  }
++      }
++
++  /* Clean up.  */
++  free (fp_equiv);
++}
++
++
++static void
++push_frame_in_operand (insn, orig, push_size, boundary)
++     rtx insn, orig;
++     HOST_WIDE_INT push_size, boundary;
++{
++  register rtx x = orig;
++  register enum rtx_code code;
++  int i, j;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++    case USE:
++      return;
++	    
++    case SET:
++      /*
++	skip setjmp setup insn and setjmp restore insn
++	alpha case:
++	(set (MEM (reg:SI xx)) (frame_pointer_rtx)))
++	(set (frame_pointer_rtx) (REG))
++      */
++      if (GET_CODE (XEXP (x, 0)) == MEM
++	  && XEXP (x, 1) == frame_pointer_rtx)
++	return;
++      if (XEXP (x, 0) == frame_pointer_rtx
++	  && GET_CODE (XEXP (x, 1)) == REG)
++	return;
++
++      /*
++	powerpc case: restores setjmp address
++	(set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
++	or
++	(set (reg) (plus frame_pointer_rtx const_int -n))
++	(set (frame_pointer_rtx) (reg))
++      */
++      if (GET_CODE (XEXP (x, 0)) == REG
++	  && GET_CODE (XEXP (x, 1)) == PLUS
++	  && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
++	  && CONSTANT_P (XEXP (XEXP (x, 1), 1))
++	  && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
++	{
++	  x = XEXP (x, 1);
++	  offset = AUTO_OFFSET(x);
++	  if (x->used || abs (offset) < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
++	  x->used = 1; insn_pushed = TRUE;
++	  return;
++	}
++
++      /* reset fp_equiv register */
++      else if (GET_CODE (XEXP (x, 0)) == REG
++	  && fp_equiv[REGNO (XEXP (x, 0))])
++	fp_equiv[REGNO (XEXP (x, 0))] = 0;
++
++      /* propagete fp_equiv register */
++      else if (GET_CODE (XEXP (x, 0)) == REG
++	       && GET_CODE (XEXP (x, 1)) == REG
++	       && fp_equiv[REGNO (XEXP (x, 1))])
++	if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
++	    || reg_renumber[REGNO (XEXP (x, 0))] > 0)
++	  fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
++      break;
++
++    case MEM:
++      if (XEXP (x, 0) == frame_pointer_rtx
++	  && boundary == 0)
++	{
++	  XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
++	  XEXP (x, 0)->used = 1; insn_pushed = TRUE;
++	  return;
++	}
++      break;
++      
++    case PLUS:
++      offset = AUTO_OFFSET(x);
++
++      /* Handle special case of frame register plus constant.  */
++      if (CONSTANT_P (XEXP (x, 1))
++	  && XEXP (x, 0) == frame_pointer_rtx)
++	{
++	  if (x->used || offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle alpha case:
++	 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
++      */
++      if (CONSTANT_P (XEXP (x, 1))
++	  && GET_CODE (XEXP (x, 0)) == SUBREG
++	  && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
++	{
++	  if (x->used || offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle powerpc case:
++	 (set (reg x) (plus fp const))
++	 (set (.....) (... (plus (reg x) (const B))))
++      */
++      else if (CONSTANT_P (XEXP (x, 1))
++	       && GET_CODE (XEXP (x, 0)) == REG
++	       && fp_equiv[REGNO (XEXP (x, 0))])
++	{
++	  if (x->used) return;
++
++	  offset += fp_equiv[REGNO (XEXP (x, 0))];
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle special case of frame register plus reg (constant).
++	 (set (reg x) (const B))
++	 (set (....) (...(plus fp (reg x))))
++      */
++      else if (XEXP (x, 0) == frame_pointer_rtx
++	       && GET_CODE (XEXP (x, 1)) == REG
++	       && PREV_INSN (insn)
++	       && PATTERN (PREV_INSN (insn))
++	       && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
++	       && CONSTANT_P (SET_SRC (PATTERN (PREV_INSN (insn)))))
++	{
++	  HOST_WIDE_INT offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
++
++	  if (x->used || offset < boundary)
++	    return;
++	  
++	  SET_SRC (PATTERN (PREV_INSN (insn)))
++	    = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1;
++	  XEXP (x, 1)->used = 1;
++
++	  return;
++	}
++      /* Handle special case of frame register plus reg (used).  */
++      else if (XEXP (x, 0) == frame_pointer_rtx
++	       && XEXP (x, 1)->used)
++	{
++	  x->used = 1;
++	  return;
++	}
++      /*
++	process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      push_frame_of_insns (XEXP (x, 0), push_size, boundary);
++      push_frame_of_insns (XEXP (x, 1), push_size, boundary);
++      push_frame_of_insns (XEXP (x, 2), push_size, boundary);
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
++	  fatal_insn ("push_frame_in_operand", insn);
++	push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
++}   
++
++static void
++push_frame_of_reg_equiv_memory_loc (push_size, boundary)
++     HOST_WIDE_INT push_size, boundary;
++{
++  int i;
++  extern rtx *reg_equiv_memory_loc;
++
++  /* This function is processed if the push_frame is called from 
++     global_alloc (or reload) function */
++  if (reg_equiv_memory_loc == 0) return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_memory_loc[i])
++      {
++	rtx x = reg_equiv_memory_loc[i];
++	int offset;
++
++	if (GET_CODE (x) == MEM
++	    && GET_CODE (XEXP (x, 0)) == PLUS
++	    && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
++	  {
++	    offset = AUTO_OFFSET(XEXP (x, 0));
++	    
++	    if (! XEXP (x, 0)->used
++		&& offset >= boundary)
++	      {
++		offset += push_size;
++		XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		/* mark */
++		XEXP (x, 0)->used = 1;
++	      }
++	  }
++	else if (GET_CODE (x) == MEM
++		 && XEXP (x, 0) == frame_pointer_rtx
++		 && boundary == 0)
++	  {
++	    XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
++	    XEXP (x, 0)->used = 1; insn_pushed = TRUE;
++	  }
++      }
++}
++
++static void
++push_frame_of_reg_equiv_constant (push_size, boundary)
++     HOST_WIDE_INT push_size, boundary;
++{
++  int i;
++  extern rtx *reg_equiv_constant;
++
++  /* This function is processed if the push_frame is called from 
++     global_alloc (or reload) function */
++  if (reg_equiv_constant == 0) return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_constant[i])
++      {
++	rtx x = reg_equiv_constant[i];
++	int offset;
++
++	if (GET_CODE (x) == PLUS
++	    && XEXP (x, 0) == frame_pointer_rtx)
++	  {
++	    offset = AUTO_OFFSET(x);
++	    
++	    if (! x->used
++		&& offset >= boundary)
++	      {
++		offset += push_size;
++		XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		/* mark */
++		x->used = 1;
++	      }
++	  }
++	else if (x == frame_pointer_rtx
++		 && boundary == 0)
++	  {
++	    reg_equiv_constant[i]
++	      = plus_constant (frame_pointer_rtx, push_size);
++	    reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
++	  }
++      }
++}
++
++static int
++check_out_of_frame_access (insn, boundary)
++     rtx insn;
++     HOST_WIDE_INT boundary;
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
++	  return TRUE;
++      }
++  return FALSE;
++}
++
++
++static int
++check_out_of_frame_access_in_operand (orig, boundary)
++     rtx orig;
++     HOST_WIDE_INT boundary;
++{
++  register rtx x = orig;
++  register enum rtx_code code;
++  int i, j;
++  const char *fmt;
++
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return FALSE;
++	    
++    case MEM:
++      if (XEXP (x, 0) == frame_pointer_rtx)
++	if (0 < boundary) return TRUE;
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (CONSTANT_P (XEXP (x, 1))
++	  && XEXP (x, 0) == frame_pointer_rtx)
++	{
++	  if (0 <= AUTO_OFFSET(x)
++	      && AUTO_OFFSET(x) < boundary) return TRUE;
++	  return FALSE;
++	}
++      /*
++	process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      if (check_out_of_frame_access (XEXP (x, 0), boundary)) return TRUE;
++      if (check_out_of_frame_access (XEXP (x, 1), boundary)) return TRUE;
++      if (check_out_of_frame_access (XEXP (x, 2), boundary)) return TRUE;
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
++	  return TRUE;
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
++	  return TRUE;
++
++  return FALSE;
++}
++#endif
+diff -Naur gcc-3.3.orig/gcc/protector.h gcc-3.3.ssp/gcc/protector.h
+--- gcc-3.3.orig/gcc/protector.h	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.3.ssp/gcc/protector.h	2004-05-11 10:43:58.000000000 +0000
+@@ -0,0 +1,53 @@
++/* RTL buffer overflow protection function for GNU C compiler
++   Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
++
++This file is part of GCC.
++
++GCC is free software; you can redistribute it and/or modify it under
++the terms of the GNU General Public License as published by the Free
++Software Foundation; either version 2, or (at your option) any later
++version.
++
++GCC is distributed in the hope that it will be useful, but WITHOUT ANY
++WARRANTY; without even the implied warranty of MERCHANTABILITY or
++FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
++for more details.
++
++You should have received a copy of the GNU General Public License
++along with GCC; see the file COPYING.  If not, write to the Free
++Software Foundation, 59 Temple Place - Suite 330, Boston, MA
++02111-1307, USA.  */
++
++
++/* declaration of GUARD variable */
++#define GUARD_m		Pmode
++#define UNITS_PER_GUARD MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
++
++#ifndef L_stack_smash_handler
++
++/* insert a guard variable before a character buffer and change the order
++ of pointer variables, character buffers and pointer arguments */
++
++extern void prepare_stack_protection  PARAMS ((int inlinable));
++
++#ifdef TREE_CODE
++/* search a character array from the specified type tree */
++
++extern int search_string_def PARAMS ((tree names));
++#endif
++
++/* examine whether the input contains frame pointer addressing */
++
++extern int contains_fp PARAMS ((rtx op));
++
++/* Return size that is not allocated for stack frame. It will be allocated
++   to modify the home of pseudo registers called from global_alloc.  */
++
++extern HOST_WIDE_INT get_frame_free_size PARAMS ((void));
++
++/* allocate a local variable in the stack area before character buffers
++   to avoid the corruption of it */
++
++extern rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int));
++
++#endif
+diff -Naur gcc-3.3.orig/gcc/reload1.c gcc-3.3.ssp/gcc/reload1.c
+--- gcc-3.3.orig/gcc/reload1.c	2003-03-28 23:22:05.000000000 +0000
++++ gcc-3.3.ssp/gcc/reload1.c	2004-08-30 01:16:19.000000000 +0000
+@@ -42,6 +42,7 @@
+ #include "toplev.h"
+ #include "except.h"
+ #include "tree.h"
++#include "protector.h"
+ 
+ /* This file contains the reload pass of the compiler, which is
+    run after register allocation has been done.  It checks that
+@@ -925,7 +926,7 @@
+       if (cfun->stack_alignment_needed)
+         assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
+ 
+-      starting_frame_size = get_frame_size ();
++      starting_frame_size = get_frame_size () - get_frame_free_size ();
+ 
+       set_initial_elim_offsets ();
+       set_initial_label_offsets ();
+@@ -989,7 +990,7 @@
+ 	setup_save_areas ();
+ 
+       /* If we allocated another stack slot, redo elimination bookkeeping.  */
+-      if (starting_frame_size != get_frame_size ())
++      if (starting_frame_size != get_frame_size () - get_frame_free_size ())
+ 	continue;
+ 
+       if (caller_save_needed)
+@@ -1008,7 +1009,7 @@
+ 
+       /* If we allocated any new memory locations, make another pass
+ 	 since it might have changed elimination offsets.  */
+-      if (starting_frame_size != get_frame_size ())
++      if (starting_frame_size != get_frame_size () - get_frame_free_size ())
+ 	something_changed = 1;
+ 
+       {
+@@ -1100,11 +1101,11 @@
+   if (insns_need_reload != 0 || something_needs_elimination
+       || something_needs_operands_changed)
+     {
+-      HOST_WIDE_INT old_frame_size = get_frame_size ();
++      HOST_WIDE_INT old_frame_size = get_frame_size () - get_frame_free_size ();
+ 
+       reload_as_needed (global);
+ 
+-      if (old_frame_size != get_frame_size ())
++      if (old_frame_size != get_frame_size () - get_frame_free_size ())
+ 	abort ();
+ 
+       if (num_eliminable)
+@@ -1992,7 +1993,7 @@
+       if (from_reg == -1)
+ 	{
+ 	  /* No known place to spill from => no slot to reuse.  */
+-	  x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
++	  x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
+ 				  inherent_size == total_size ? 0 : -1);
+ 	  if (BYTES_BIG_ENDIAN)
+ 	    /* Cancel the  big-endian correction done in assign_stack_local.
+diff -Naur gcc-3.3.orig/gcc/rtl.h gcc-3.3.ssp/gcc/rtl.h
+--- gcc-3.3.orig/gcc/rtl.h	2003-05-02 01:21:07.000000000 +0000
++++ gcc-3.3.ssp/gcc/rtl.h	2004-08-30 01:16:19.000000000 +0000
+@@ -440,6 +440,18 @@
+        			     __FUNCTION__);				\
+    _rtx; })
+ 
++#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9)	\
++  __extension__		 		 		 		\
++({ rtx const _rtx = (RTX);						\
++   if (GET_CODE(_rtx) != C1 && GET_CODE(_rtx) != C2	 		\
++       && GET_CODE(_rtx) != C3 && GET_CODE(_rtx) != C4			\
++       && GET_CODE(_rtx) != C5 && GET_CODE(_rtx) != C6			\
++       && GET_CODE(_rtx) != C7 && GET_CODE(_rtx) != C8			\
++       && GET_CODE(_rtx) != C9)						\
++     rtl_check_failed_flag  (NAME, _rtx, __FILE__, __LINE__,		\
++       			     __FUNCTION__);				\
++   _rtx; })
++
+ extern void rtl_check_failed_flag PARAMS ((const char *, rtx, const char *,
+       					   int, const char *))
+     ATTRIBUTE_NORETURN
+@@ -455,6 +467,7 @@
+ #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6)		(RTX)
+ #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7)		(RTX)
+ #define RTL_FLAG_CHECK8(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8)	(RTX)
++#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9)	(RTX)
+ #endif
+ 
+ #define CLEAR_RTX_FLAGS(RTX)	\
+@@ -549,9 +562,9 @@
+ #define LOG_LINKS(INSN)	XEXP(INSN, 7)
+ 
+ #define RTX_INTEGRATED_P(RTX)						\
+-  (RTL_FLAG_CHECK8("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN,		\
++  (RTL_FLAG_CHECK9("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN,		\
+ 		   JUMP_INSN, INSN_LIST, BARRIER, CODE_LABEL, CONST,	\
+-		   NOTE)->integrated)
++		   NOTE, PLUS)->integrated)
+ #define RTX_UNCHANGING_P(RTX)						\
+   (RTL_FLAG_CHECK3("RTX_UNCHANGING_P", (RTX), REG, MEM, CONCAT)->unchanging)
+ #define RTX_FRAME_RELATED_P(RTX)					\
+diff -Naur gcc-3.3.orig/gcc/simplify-rtx.c gcc-3.3.ssp/gcc/simplify-rtx.c
+--- gcc-3.3.orig/gcc/simplify-rtx.c	2003-02-11 22:26:30.000000000 +0000
++++ gcc-3.3.ssp/gcc/simplify-rtx.c	2004-08-30 01:16:19.000000000 +0000
+@@ -1670,7 +1670,8 @@
+   int n_ops = 2, input_ops = 2, input_consts = 0, n_consts;
+   int first, negate, changed;
+   int i, j;
+-
++  HOST_WIDE_INT fp_offset = 0;
++  
+   memset ((char *) ops, 0, sizeof ops);
+ 
+   /* Set up the two operands and then expand them until nothing has been
+@@ -1695,6 +1696,10 @@
+ 	  switch (this_code)
+ 	    {
+ 	    case PLUS:
++	    if (flag_propolice_protection
++		&& XEXP (this_op, 0) == virtual_stack_vars_rtx
++		&& GET_CODE (XEXP (this_op, 1)) == CONST_INT)
++	      fp_offset = INTVAL (XEXP (this_op, 1));
+ 	    case MINUS:
+ 	      if (n_ops == 7)
+ 		return NULL_RTX;
+@@ -1849,10 +1854,10 @@
+       && GET_CODE (ops[n_ops - 1].op) == CONST_INT
+       && CONSTANT_P (ops[n_ops - 2].op))
+     {
+-      rtx value = ops[n_ops - 1].op;
++      int value = INTVAL (ops[n_ops - 1].op);
+       if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
+-	value = neg_const_int (mode, value);
+-      ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
++	value = -value;
++      ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value);
+       n_ops--;
+     }
+ 
+@@ -1871,6 +1876,54 @@
+ 	  || (n_ops + n_consts == input_ops && n_consts <= input_consts)))
+     return NULL_RTX;
+ 
++  if (flag_propolice_protection)
++    {
++      /* keep the addressing style of local variables
++	 as (plus (virtual_stack_vars_rtx) (CONST_int x))
++	 (1) inline function is expanded, (+ (+VFP c1) -c2)=>(+ VFP c1-c2)
++	 (2) the case ary[r-1], (+ (+VFP c1) (+r -1))=>(+ R (+r -1))
++      */
++      for (i = 0; i < n_ops; i++)
++#ifdef FRAME_GROWS_DOWNWARD
++	if (ops[i].op == virtual_stack_vars_rtx)
++#else
++	if (ops[i].op == virtual_stack_vars_rtx
++	    || ops[i].op == frame_pointer_rtx)
++#endif
++	  {
++	    if (GET_CODE (ops[n_ops - 1].op) == CONST_INT)
++	      {
++		HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
++		if (n_ops < 3 || value >= fp_offset)
++		  {
++		    ops[i].op = plus_constant (ops[i].op, value);
++		    n_ops--;
++		  }
++		else
++		  {
++		    if (!force
++			&& (n_ops+1 + n_consts > input_ops
++			    || (n_ops+1 + n_consts == input_ops && n_consts <= input_consts)))
++		      return NULL_RTX;
++		    ops[n_ops - 1].op = GEN_INT (value-fp_offset);
++		    ops[i].op = plus_constant (ops[i].op, fp_offset);
++		  }
++	      }
++	    /* buf[BUFSIZE]: buf is the first local variable (+ (+ fp -S) S) 
++	       or (+ (fp 0) r) ==> ((+ (+fp 1) r) -1) */
++	    else if (fp_offset != 0)
++	      return NULL_RTX;
++#ifndef FRAME_GROWS_DOWNWARD
++	    /*
++	     * For the case of buf[i], i: REG, buf: (plus fp 0),
++	     */
++	    else if (fp_offset == 0)
++	      return NULL_RTX;
++#endif
++	    break;
++	  }
++    }
++
+   /* Put a non-negated operand first.  If there aren't any, make all
+      operands positive and negate the whole thing later.  */
+ 
+diff -Naur gcc-3.3.orig/gcc/toplev.c gcc-3.3.ssp/gcc/toplev.c
+--- gcc-3.3.orig/gcc/toplev.c	2003-05-05 21:55:26.000000000 +0000
++++ gcc-3.3.ssp/gcc/toplev.c	2004-08-30 01:16:19.000000000 +0000
+@@ -904,6 +904,15 @@
+    minimum function alignment.  Zero means no alignment is forced.  */
+ int force_align_functions_log;
+ 
++#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
++/* Nonzero means use propolice as a stack protection method */
++int flag_propolice_protection = 1;
++int flag_stack_protection = 0;
++#else
++int flag_propolice_protection = 0;
++int flag_stack_protection = 0;
++#endif
++
+ /* Table of supported debugging formats.  */
+ static const struct
+ {
+@@ -1188,6 +1197,10 @@
+    N_("Trap for signed overflow in addition / subtraction / multiplication") },
+   { "new-ra", &flag_new_regalloc, 1,
+    N_("Use graph coloring register allocation.") },
++  {"stack-protector", &flag_propolice_protection, 1,
++   N_("Enables stack protection") },
++  {"stack-protector-all", &flag_stack_protection, 1,
++   N_("Enables stack protection of every function") } ,
+ };
+ 
+ /* Table of language-specific options.  */
+@@ -1547,7 +1560,9 @@
+   {"missing-noreturn", &warn_missing_noreturn, 1,
+    N_("Warn about functions which might be candidates for attribute noreturn") },
+   {"strict-aliasing", &warn_strict_aliasing, 1,
+-   N_ ("Warn about code which might break the strict aliasing rules") }
++   N_ ("Warn about code which might break the strict aliasing rules") },
++  {"stack-protector", &warn_stack_protector, 1,
++   N_("Warn when disabling stack protector for some reason")}
+ };
+ 
+ void
+@@ -2449,6 +2464,8 @@
+ 
+       insns = get_insns ();
+ 
++      if (flag_propolice_protection) prepare_stack_protection (inlinable);
++  
+       /* Dump the rtl code if we are dumping rtl.  */
+ 
+       if (open_dump_file (DFI_rtl, decl))
+@@ -5186,6 +5203,12 @@
+     /* The presence of IEEE signaling NaNs, implies all math can trap.  */
+     if (flag_signaling_nans)
+       flag_trapping_math = 1;
++
++    /* This combination makes optimized frame addressings and causes
++       a internal compilation error at prepare_stack_protection.
++       so don't allow it.  */
++    if (flag_stack_protection && !flag_propolice_protection)
++      flag_propolice_protection = TRUE;
+ }
+ 

+ /* Initialize the compiler back end.  */
+diff -Naur gcc-3.3.orig/gcc/tree.h gcc-3.3.ssp/gcc/tree.h
+--- gcc-3.3.orig/gcc/tree.h	2003-03-24 17:59:37.000000000 +0000
++++ gcc-3.3.ssp/gcc/tree.h	2004-08-30 01:16:19.000000000 +0000
+@@ -1642,6 +1642,9 @@
+    where it is called.  */
+ #define DECL_INLINE(NODE) (FUNCTION_DECL_CHECK (NODE)->decl.inline_flag)
+ 
++/* Nonzero in a VAR_DECL means this variable is skipped by propolice. */
++#define DECL_VAR_INLINE(NODE) (VAR_DECL_CHECK (NODE)->decl.inline_flag)
++
+ /* Nonzero in a FUNCTION_DECL means this function has been found inlinable
+    only by virtue of -finline-functions  */
+ #define DID_INLINE_FUNC(NODE) \

Added: trunk/gcc/gcc-3.3.4-no_fixincludes-1.patch
===================================================================
--- trunk/gcc/gcc-3.3.4-no_fixincludes-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/gcc/gcc-3.3.4-no_fixincludes-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,30 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org>
+Date: 2003-08-30
+Initial Package Version: 3.3.4
+Upstream Status: Not submitted - LFS Specific
+Origin: Originally developed for GCC 3.2 by
+	Greg Schafer <gschafer at zip dot com dot au>
+Description: Prevent fixincludes script from running.
+
+diff -Naur gcc-3.3.4.orig/gcc/Makefile.in gcc-3.3.4.nofixincludes/gcc/Makefile.in
+--- gcc-3.3.4.orig/gcc/Makefile.in	2004-04-01 16:55:23.000000000 +0000
++++ gcc-3.3.4.nofixincludes/gcc/Makefile.in	2004-08-30 20:01:21.000000000 +0000
+@@ -2341,10 +2341,6 @@
+ 	rm -f include/limits.h
+ 	cp xlimits.h include/limits.h
+ 	chmod a+r include/limits.h
+-# Install the README
+-	rm -f include/README
+-	cp $(srcdir)/README-fixinc include/README
+-	chmod a+r include/README
+ 	$(STAMP) $@
+ 
+ # fixinc.sh depends on this, not on specs directly.
+@@ -2386,7 +2382,6 @@
+ 	(TARGET_MACHINE='$(target)'; srcdir=`cd $(srcdir); ${PWD_COMMAND}`; \
+ 	SHELL='$(SHELL)' ;\
+ 	export TARGET_MACHINE srcdir SHELL ; \
+-	$(SHELL) ./fixinc.sh `${PWD_COMMAND}`/include $(SYSTEM_HEADER_DIR) $(OTHER_FIXINCLUDES_DIRS); \
+ 	rm -f include/syslimits.h; \
+ 	if [ -f include/limits.h ]; then \
+ 	  mv include/limits.h include/syslimits.h; \

Added: trunk/gcc/gcc-3.3.4-specs-1.patch
===================================================================
--- trunk/gcc/gcc-3.3.4-specs-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/gcc/gcc-3.3.4-specs-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,262 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org>
+Date: 2004-08-30
+Initial Package Version: 3.3.4
+Upstream Status: Not submitted - LFS Specific
+Origin: Idea originally developed by Ryan Oliver and Greg Schafer for
+the Pure LFS project.
+
+More architectures added by Zack Winkles.
+Further fine tunings by Greg Schafer.
+Modified for gcc 3.3.2 by Oliver Brakmann
+Description: This patch modifies the location of the dynamic linker for the GCC
+Pass 2 build in LFS Chapter 5. It also removes /usr/include from the include
+search path.
+
+NOTE - !defined(USE_GNULIBC_1) is assumed i.e. libc5 is not supported.
+
+WARNING - Not all architectures addressed by this patch have been properly
+tested due to lack of access to those architectures. If you notice any
+problems with this patch on your architecture, please report them to 
+lfs-dev at linuxfromscratch dot org
+
+diff -Naur gcc-3.3.4.orig/gcc/config/alpha/linux-elf.h gcc-3.3.4.specs/gcc/config/alpha/linux-elf.h
+--- gcc-3.3.4.orig/gcc/config/alpha/linux-elf.h	2003-11-14 06:46:13.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/alpha/linux-elf.h	2004-08-30 20:04:36.000000000 +0000
+@@ -30,7 +30,7 @@
+ #ifdef USE_GNULIBC_1
+ #define ELF_DYNAMIC_LINKER	"/lib/ld.so.1"
+ #else
+-#define ELF_DYNAMIC_LINKER	"/lib/ld-linux.so.2"
++#define ELF_DYNAMIC_LINKER	"/tools/lib/ld-linux.so.2"
+ #endif
+ 
+ #define LINK_SPEC "-m elf64alpha %{G*} %{relax:-relax}		\
+diff -Naur gcc-3.3.4.orig/gcc/config/arm/linux-elf.h gcc-3.3.4.specs/gcc/config/arm/linux-elf.h
+--- gcc-3.3.4.orig/gcc/config/arm/linux-elf.h	2004-03-30 20:43:45.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/arm/linux-elf.h	2004-08-30 20:04:36.000000000 +0000
+@@ -86,7 +86,7 @@
+    %{shared:-shared} \
+    %{symbolic:-Bsymbolic} \
+    %{rdynamic:-export-dynamic} \
+-   %{!dynamic-linker:-dynamic-linker /lib/ld-linux.so.2} \
++   %{!dynamic-linker:-dynamic-linker /tools/lib/ld-linux.so.2} \
+    -X \
+    %{mbig-endian:-EB}" \
+    SUBTARGET_EXTRA_LINK_SPEC
+diff -Naur gcc-3.3.4.orig/gcc/config/i386/linux.h gcc-3.3.4.specs/gcc/config/i386/linux.h
+--- gcc-3.3.4.orig/gcc/config/i386/linux.h	2003-11-14 06:46:12.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/i386/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -141,7 +141,7 @@
+     %{!ibcs: \
+       %{!static: \
+ 	%{rdynamic:-export-dynamic} \
+-	%{!dynamic-linker:-dynamic-linker /lib/ld-linux.so.2}} \
++	%{!dynamic-linker:-dynamic-linker /tools/lib/ld-linux.so.2}} \
+ 	%{static:-static}}}"
+ #endif
+ 
+diff -Naur gcc-3.3.4.orig/gcc/config/i386/linux64.h gcc-3.3.4.specs/gcc/config/i386/linux64.h
+--- gcc-3.3.4.orig/gcc/config/i386/linux64.h	2003-11-14 06:46:12.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/i386/linux64.h	2004-08-30 20:04:36.000000000 +0000
+@@ -67,8 +67,8 @@
+   %{!shared: \
+     %{!static: \
+       %{rdynamic:-export-dynamic} \
+-      %{m32:%{!dynamic-linker:-dynamic-linker /lib/ld-linux.so.2}} \
+-      %{!m32:%{!dynamic-linker:-dynamic-linker /lib64/ld-linux-x86-64.so.2}}} \
++      %{m32:%{!dynamic-linker:-dynamic-linker /tools/lib/ld-linux.so.2}} \
++      %{!m32:%{!dynamic-linker:-dynamic-linker /tools/lib64/ld-linux-x86-64.so.2}}} \
+     %{static:-static}}"
+ 
+ #undef  STARTFILE_SPEC
+diff -Naur gcc-3.3.4.orig/gcc/config/ia64/linux.h gcc-3.3.4.specs/gcc/config/ia64/linux.h
+--- gcc-3.3.4.orig/gcc/config/ia64/linux.h	2003-12-12 16:10:09.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/ia64/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -43,7 +43,7 @@
+   %{!shared: \
+     %{!static: \
+       %{rdynamic:-export-dynamic} \
+-      %{!dynamic-linker:-dynamic-linker /lib/ld-linux-ia64.so.2}} \
++      %{!dynamic-linker:-dynamic-linker /tools/lib/ld-linux-ia64.so.2}} \
+       %{static:-static}}"
+ 
+ 
+diff -Naur gcc-3.3.4.orig/gcc/config/linux.h gcc-3.3.4.specs/gcc/config/linux.h
+--- gcc-3.3.4.orig/gcc/config/linux.h	2003-09-16 15:39:22.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -115,3 +115,7 @@
+ #define HANDLE_PRAGMA_PACK_PUSH_POP
+ 
+ #define TARGET_HAS_F_SETLKW
++
++/* Remove /usr/include from the end of the include search path.  */
++#undef STANDARD_INCLUDE_DIR
++#define STANDARD_INCLUDE_DIR 0
+diff -Naur gcc-3.3.4.orig/gcc/config/m68k/linux.h gcc-3.3.4.specs/gcc/config/m68k/linux.h
+--- gcc-3.3.4.orig/gcc/config/m68k/linux.h	2003-11-14 06:46:12.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/m68k/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -179,7 +179,7 @@
+   %{!shared: \
+     %{!static: \
+       %{rdynamic:-export-dynamic} \
+-      %{!dynamic-linker*:-dynamic-linker /lib/ld.so.1}} \
++      %{!dynamic-linker*:-dynamic-linker /tools/lib/ld.so.1}} \
+     %{static}}"
+ #endif
+ 
+diff -Naur gcc-3.3.4.orig/gcc/config/mips/linux.h gcc-3.3.4.specs/gcc/config/mips/linux.h
+--- gcc-3.3.4.orig/gcc/config/mips/linux.h	2003-12-23 08:58:00.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/mips/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -182,7 +182,7 @@
+     %{!ibcs: \
+       %{!static: \
+         %{rdynamic:-export-dynamic} \
+-        %{!dynamic-linker:-dynamic-linker /lib/ld.so.1}} \
++        %{!dynamic-linker:-dynamic-linker /tools/lib/ld.so.1}} \
+         %{static:-static}}}"
+ 
+ #undef SUBTARGET_ASM_SPEC
+diff -Naur gcc-3.3.4.orig/gcc/config/pa/pa-linux.h gcc-3.3.4.specs/gcc/config/pa/pa-linux.h
+--- gcc-3.3.4.orig/gcc/config/pa/pa-linux.h	2002-12-10 10:55:31.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/pa/pa-linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -88,7 +88,7 @@
+   %{!shared: \
+     %{!static: \
+       %{rdynamic:-export-dynamic} \
+-      %{!dynamic-linker:-dynamic-linker /lib/ld.so.1}} \
++      %{!dynamic-linker:-dynamic-linker /tools/lib/ld.so.1}} \
+       %{static:-static}}"
+ 
+ /* glibc's profiling functions don't need gcc to allocate counters.  */
+diff -Naur gcc-3.3.4.orig/gcc/config/rs6000/linux64.h gcc-3.3.4.specs/gcc/config/rs6000/linux64.h
+--- gcc-3.3.4.orig/gcc/config/rs6000/linux64.h	2003-11-14 06:46:10.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/rs6000/linux64.h	2004-08-30 20:04:36.000000000 +0000
+@@ -158,7 +158,7 @@
+ #undef  LINK_OS_LINUX_SPEC
+ #define LINK_OS_LINUX_SPEC "-m elf64ppc %{!shared: %{!static: \
+   %{rdynamic:-export-dynamic} \
+-  %{!dynamic-linker:-dynamic-linker /lib64/ld64.so.1}}}"
++  %{!dynamic-linker:-dynamic-linker /tools/lib64/ld64.so.1}}}"
+ 
+ #ifdef NATIVE_CROSS
+ #define STARTFILE_PREFIX_SPEC "/usr/local/lib64/ /lib64/ /usr/lib64/"
+diff -Naur gcc-3.3.4.orig/gcc/config/rs6000/sysv4.h gcc-3.3.4.specs/gcc/config/rs6000/sysv4.h
+--- gcc-3.3.4.orig/gcc/config/rs6000/sysv4.h	2004-03-29 19:16:43.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/rs6000/sysv4.h	2004-08-30 20:04:36.000000000 +0000
+@@ -1161,7 +1161,7 @@
+ 
+ #define LINK_OS_LINUX_SPEC "-m elf32ppclinux %{!shared: %{!static: \
+   %{rdynamic:-export-dynamic} \
+-  %{!dynamic-linker:-dynamic-linker /lib/ld.so.1}}}"
++  %{!dynamic-linker:-dynamic-linker /tools/lib/ld.so.1}}}"
+ 
+ #if !defined(USE_GNULIBC_1) && defined(HAVE_LD_EH_FRAME_HDR)
+ # define LINK_EH_SPEC "%{!static:--eh-frame-hdr} "
+diff -Naur gcc-3.3.4.orig/gcc/config/s390/linux.h gcc-3.3.4.specs/gcc/config/s390/linux.h
+--- gcc-3.3.4.orig/gcc/config/s390/linux.h	2003-11-14 06:46:10.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/s390/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -94,7 +94,7 @@
+       %{static:-static} \
+       %{!static: \
+ 	%{rdynamic:-export-dynamic} \
+-	%{!dynamic-linker:-dynamic-linker /lib/ld.so.1}}}"
++	%{!dynamic-linker:-dynamic-linker /tools/lib/ld.so.1}}}"
+ 
+ #define LINK_ARCH64_SPEC \
+   "-m elf64_s390 \
+@@ -103,7 +103,7 @@
+       %{static:-static} \
+       %{!static: \
+ 	%{rdynamic:-export-dynamic} \
+-	%{!dynamic-linker:-dynamic-linker /lib/ld64.so.1}}}"
++	%{!dynamic-linker:-dynamic-linker /tools/lib/ld64.so.1}}}"
+ 
+ #ifdef DEFAULT_TARGET_64BIT
+ #undef  LINK_SPEC
+diff -Naur gcc-3.3.4.orig/gcc/config/sh/linux.h gcc-3.3.4.specs/gcc/config/sh/linux.h
+--- gcc-3.3.4.orig/gcc/config/sh/linux.h	2003-11-06 23:13:33.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/sh/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -48,7 +48,7 @@
+   "%{shared:-shared} \
+    %{!static: \
+      %{rdynamic:-export-dynamic} \
+-     %{!dynamic-linker:-dynamic-linker /lib/ld-linux.so.2}} \
++     %{!dynamic-linker:-dynamic-linker /tools/lib/ld-linux.so.2}} \
+    %{static:-static}"
+ 
+ /* The GNU C++ standard library requires that these macros be defined.  */
+diff -Naur gcc-3.3.4.orig/gcc/config/sparc/linux.h gcc-3.3.4.specs/gcc/config/sparc/linux.h
+--- gcc-3.3.4.orig/gcc/config/sparc/linux.h	2004-04-29 04:42:52.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/sparc/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -170,13 +170,13 @@
+         %{static:-static}}}"
+ #endif
+ #else
+-#define LINK_SPEC "-m elf32_sparc -Y P,/usr/lib %{shared:-shared} \
++#define LINK_SPEC "-m elf32_sparc -Y P,/tools/lib %{shared:-shared} \
+   %{!mno-relax:%{!r:-relax}} \
+   %{!shared: \
+     %{!ibcs: \
+       %{!static: \
+         %{rdynamic:-export-dynamic} \
+-        %{!dynamic-linker:-dynamic-linker /lib/ld-linux.so.2}} \
++        %{!dynamic-linker:-dynamic-linker /tools/lib/ld-linux.so.2}} \
+         %{static:-static}}}"
+ #endif
+ 
+diff -Naur gcc-3.3.4.orig/gcc/config/sparc/linux64.h gcc-3.3.4.specs/gcc/config/sparc/linux64.h
+--- gcc-3.3.4.orig/gcc/config/sparc/linux64.h	2004-04-29 04:42:52.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/sparc/linux64.h	2004-08-30 20:04:36.000000000 +0000
+@@ -153,21 +153,21 @@
+   { "link_arch_default", LINK_ARCH_DEFAULT_SPEC },	  \
+   { "link_arch",	 LINK_ARCH_SPEC },
+     
+-#define LINK_ARCH32_SPEC "-m elf32_sparc -Y P,/usr/lib %{shared:-shared} \
++#define LINK_ARCH32_SPEC "-m elf32_sparc -Y P,/tools/lib %{shared:-shared} \
+   %{!shared: \
+     %{!ibcs: \
+       %{!static: \
+         %{rdynamic:-export-dynamic} \
+-        %{!dynamic-linker:-dynamic-linker /lib/ld-linux.so.2}} \
++        %{!dynamic-linker:-dynamic-linker /tools/lib/ld-linux.so.2}} \
+         %{static:-static}}} \
+ "
+ 
+-#define LINK_ARCH64_SPEC "-m elf64_sparc -Y P,/usr/lib64 %{shared:-shared} \
++#define LINK_ARCH64_SPEC "-m elf64_sparc -Y P,/tools/lib64 %{shared:-shared} \
+   %{!shared: \
+     %{!ibcs: \
+       %{!static: \
+         %{rdynamic:-export-dynamic} \
+-        %{!dynamic-linker:-dynamic-linker /lib64/ld-linux.so.2}} \
++        %{!dynamic-linker:-dynamic-linker /tools/lib64/ld-linux.so.2}} \
+         %{static:-static}}} \
+ "
+ 
+@@ -222,12 +222,12 @@
+ #else /* !SPARC_BI_ARCH */
+ 
+ #undef LINK_SPEC
+-#define LINK_SPEC "-m elf64_sparc -Y P,/usr/lib64 %{shared:-shared} \
++#define LINK_SPEC "-m elf64_sparc -Y P,/tools/lib64 %{shared:-shared} \
+   %{!shared: \
+     %{!ibcs: \
+       %{!static: \
+         %{rdynamic:-export-dynamic} \
+-        %{!dynamic-linker:-dynamic-linker /lib64/ld-linux.so.2}} \
++        %{!dynamic-linker:-dynamic-linker /tools/lib64/ld-linux.so.2}} \
+         %{static:-static}}} \
+ %{mlittle-endian:-EL} \
+ %{!mno-relax:%{!r:-relax}} \
+diff -Naur gcc-3.3.4.orig/gcc/config/xtensa/linux.h gcc-3.3.4.specs/gcc/config/xtensa/linux.h
+--- gcc-3.3.4.orig/gcc/config/xtensa/linux.h	2003-04-26 00:43:44.000000000 +0000
++++ gcc-3.3.4.specs/gcc/config/xtensa/linux.h	2004-08-30 20:04:36.000000000 +0000
+@@ -52,7 +52,7 @@
+     %{!ibcs: \
+       %{!static: \
+         %{rdynamic:-export-dynamic} \
+-        %{!dynamic-linker:-dynamic-linker /lib/ld.so.1}} \
++        %{!dynamic-linker:-dynamic-linker /tools/lib/ld.so.1}} \
+       %{static:-static}}}"
+ 
+ #undef LOCAL_LABEL_PREFIX

Added: trunk/gcc/gcc-3.4-ssp-1.patch
===================================================================
--- trunk/gcc/gcc-3.4-ssp-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/gcc/gcc-3.4-ssp-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,3929 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2004-08-29
+Initial Package Version: 3.4
+Upstream Status: Rejected Upstream
+Origin: http://www.research.ibm.com/trl/projects/security/ssp/
+Description: Smashing Stack Protector - protector-3.4-2.tar.gz
+This patch is made specificly to work with the Glibc SSP patch. All guard
+functions have been removed. Developers are encouraged to check the
+differences between this patch, the original from ibm, and the Glibc patch.
+
+You might also want to change the version after applying this patch:
+sed -e 's/3.4.1/3.4.1 ssp/' -i gcc/version.c
+
+This patch, and Glibc's patch, depends on erandom sysctl from:
+http://frandom.sourceforge.net/
+Thanks to Eli Billauer.
+
+Also see:
+http://www.linuxfromscratch.org/hlfs/
+http://www.linuxfromscratch.org/hints/downloads/files/ssp.txt
+http://www.linuxfromscratch.org/hints/downloads/files/entropy.txt
+
+diff -Naur gcc-3.4.1.orig/gcc/Makefile.in gcc-3.4.1.ssp/gcc/Makefile.in
+--- gcc-3.4.1.orig/gcc/Makefile.in	2004-06-17 21:56:58.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/Makefile.in	2004-08-30 00:57:21.000000000 +0000
+@@ -859,7 +859,7 @@
+  sibcall.o simplify-rtx.o sreal.o stmt.o stor-layout.o stringpool.o 	   \
+  targhooks.o timevar.o toplev.o tracer.o tree.o tree-dump.o unroll.o	   \
+  varasm.o varray.o version.o vmsdbgout.o xcoffout.o alloc-pool.o	   \
+- et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o
++ et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o protector.o
+ 
+ OBJS-md = $(out_object_file)
+ OBJS-archive = $(EXTRA_OBJS) $(host_hook_obj) hashtable.o tree-inline.o	   \
+@@ -1835,6 +1835,10 @@
+ params.o : params.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(PARAMS_H) toplev.h
+ hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(HOOKS_H)
+ pretty-print.o: $(CONFIG_H) $(SYSTEM_H) pretty-print.c $(PRETTY_PRINT_H)
++protector.o : protector.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
++   flags.h function.h $(EXPR_H) $(OPTABS_H) $(REGS_H) toplev.h hard-reg-set.h \
++   insn-config.h insn-flags.h $(RECOG_H) output.h toplev.h except.h reload.h \
++   $(TM_P_H) conditions.h $(INSN_ATTR_H) real.h protector.h
+ 
+ $(out_object_file): $(out_file) $(CONFIG_H) coretypes.h $(TM_H) $(TREE_H) $(GGC_H) \
+    $(RTL_H) $(REGS_H) hard-reg-set.h real.h insn-config.h conditions.h \
+diff -Naur gcc-3.4.1.orig/gcc/calls.c gcc-3.4.1.ssp/gcc/calls.c
+--- gcc-3.4.1.orig/gcc/calls.c	2004-06-24 07:26:50.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/calls.c	2004-08-30 00:57:21.000000000 +0000
+@@ -2321,8 +2321,12 @@
+ 	  {
+ 	    /* For variable-sized objects, we must be called with a target
+ 	       specified.  If we were to allocate space on the stack here,
+-	       we would have no way of knowing when to free it.  */
+-	    rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
++	       we would have no way of knowing when to free it.
++
++	       This is the structure of a function return object and it isn't
++	       a character array for the stack protection, so it is
++	       marked using the assignment of the KEEP argument to 5.  */
++	    rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
+ 
+ 	    mark_temp_addr_taken (d);
+ 	    structure_value_addr = XEXP (d, 0);
+diff -Naur gcc-3.4.1.orig/gcc/combine.c gcc-3.4.1.ssp/gcc/combine.c
+--- gcc-3.4.1.orig/gcc/combine.c	2004-05-18 10:51:30.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/combine.c	2004-08-30 00:57:22.000000000 +0000
+@@ -1401,6 +1401,10 @@
+ 	      && ! fixed_regs[REGNO (dest)]
+ 	      && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
+     return 1;
++  /* Never combine loads and stores protecting argument that use set insn
++     with used flag on.  */
++  if (SET_VOLATILE_P (set))
++    return 1;
+ 
+   return 0;
+ }
+@@ -3780,7 +3784,20 @@
+ 	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
+ 	  rtx inner_op1 = XEXP (x, 1);
+ 	  rtx inner;
+-
++	  
++#ifndef FRAME_GROWS_DOWNWARD
++	  /* For the case where the frame grows upward,
++	     the stack protector keeps the offset of the frame pointer
++	     positive integer.  */
++	  if (flag_propolice_protection
++	      && code == PLUS
++	      && other == frame_pointer_rtx
++	      && GET_CODE (inner_op0) == CONST_INT
++	      && GET_CODE (inner_op1) == CONST_INT
++	      && INTVAL (inner_op0) > 0
++	      && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
++	    return x;
++#endif
+ 	  /* Make sure we pass the constant operand if any as the second
+ 	     one if this is a commutative operation.  */
+ 	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
+@@ -4145,6 +4162,13 @@
+ 	 they are now checked elsewhere.  */
+       if (GET_CODE (XEXP (x, 0)) == PLUS
+ 	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
++#ifndef FRAME_GROWS_DOWNWARD
++	/* The stack protector keeps the addressing style of a local variable
++	   to be able to change its stack position.  */
++	if (! (flag_propolice_protection
++	       && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
++	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
++#endif
+ 	return gen_binary (PLUS, mode,
+ 			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
+ 				       XEXP (x, 1)),
+@@ -4272,8 +4296,14 @@
+ 	}
+ 
+       /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
+-	 integers.  */
+-      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
++	 integers.
++	 
++	 The stack protector keeps the addressing style of
++	 a local variable.  */
++      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
++	  && (! (flag_propolice_protection
++		 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
++		 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
+ 	return gen_binary (MINUS, mode,
+ 			   gen_binary (MINUS, mode, XEXP (x, 0),
+ 				       XEXP (XEXP (x, 1), 0)),
+diff -Naur gcc-3.4.1.orig/gcc/common.opt gcc-3.4.1.ssp/gcc/common.opt
+--- gcc-3.4.1.orig/gcc/common.opt	2004-02-18 00:09:04.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/common.opt	2004-08-30 00:57:22.000000000 +0000
+@@ -152,6 +152,10 @@
+ Common
+ Warn when a variable is unused
+ 
++Wstack-protector
++Common
++Warn when not issuing stack smashing protection for some reason
++
+ aux-info
+ Common Separate
+ -aux-info <file>	Emit declaration information into <file>
+@@ -738,6 +742,14 @@
+ Common
+ Put zero initialized data in the bss section
+ 
++fstack-protector
++Common
++Enables stack protection
++
++fstack-protector-all
++Common
++Enables stack protection of every function
++
+ g
+ Common JoinedOrMissing
+ Generate debug information in default format
+diff -Naur gcc-3.4.1.orig/gcc/config/arm/arm.md gcc-3.4.1.ssp/gcc/config/arm/arm.md
+--- gcc-3.4.1.orig/gcc/config/arm/arm.md	2004-01-13 13:24:37.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/config/arm/arm.md	2004-08-30 00:57:22.000000000 +0000
+@@ -3840,7 +3840,13 @@
+ 	(match_operand:DI 1 "general_operand" ""))]
+   "TARGET_EITHER"
+   "
+-  if (TARGET_THUMB)
++  if (TARGET_ARM)
++    {
++      /* Everything except mem = const or mem = mem can be done easily */
++      if (GET_CODE (operands[0]) == MEM)
++        operands[1] = force_reg (DImode, operands[1]);
++    }
++  else /* TARGET_THUMB.... */
+     {
+       if (!no_new_pseudos)
+         {
+diff -Naur gcc-3.4.1.orig/gcc/config/t-linux gcc-3.4.1.ssp/gcc/config/t-linux
+--- gcc-3.4.1.orig/gcc/config/t-linux	2003-09-23 18:55:57.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/config/t-linux	2004-08-30 00:57:22.000000000 +0000
+@@ -1,7 +1,7 @@
+ # Compile crtbeginS.o and crtendS.o with pic.
+ CRTSTUFF_T_CFLAGS_S = $(CRTSTUFF_T_CFLAGS) -fPIC
+ # Compile libgcc2.a with pic.
+-TARGET_LIBGCC2_CFLAGS = -fPIC
++TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG
+ 
+ # Override t-slibgcc-elf-ver to export some libgcc symbols with
+ # the symbol versions that glibc used.
+diff -Naur gcc-3.4.1.orig/gcc/cse.c gcc-3.4.1.ssp/gcc/cse.c
+--- gcc-3.4.1.orig/gcc/cse.c	2004-06-21 23:32:58.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/cse.c	2004-08-30 00:57:22.000000000 +0000
+@@ -4207,7 +4207,14 @@
+ 
+ 	      if (new_const == 0)
+ 		break;
+-
++#ifndef FRAME_GROWS_DOWNWARD
++	      if (flag_propolice_protection
++		  && GET_CODE (y) == PLUS
++		  && XEXP (y, 0) == frame_pointer_rtx
++		  && INTVAL (inner_const) > 0
++		  && INTVAL (new_const) <= 0)
++		break;
++#endif
+ 	      /* If we are associating shift operations, don't let this
+ 		 produce a shift of the size of the object or larger.
+ 		 This could occur when we follow a sign-extend by a right
+@@ -4739,6 +4746,14 @@
+       if (SET_DEST (x) == pc_rtx
+ 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
+ 	;
++      /* cut the reg propagation of stack-protected argument.  */
++      else if (SET_VOLATILE_P (x)) {
++	rtx x1 = SET_DEST (x);
++	if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
++	  x1 = SUBREG_REG (x1);
++	if (! REGNO_QTY_VALID_P(REGNO (x1)))
++	  make_new_qty (REGNO (x1), GET_MODE (x1));
++      }
+ 
+       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
+ 	 The hard function value register is used only once, to copy to
+diff -Naur gcc-3.4.1.orig/gcc/explow.c gcc-3.4.1.ssp/gcc/explow.c
+--- gcc-3.4.1.orig/gcc/explow.c	2004-04-02 23:05:26.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/explow.c	2004-08-30 00:57:22.000000000 +0000
+@@ -84,7 +84,8 @@
+   rtx tem;
+   int all_constant = 0;
+ 
+-  if (c == 0)
++  if (c == 0
++      && ! (flag_propolice_protection && x == virtual_stack_vars_rtx))
+     return x;
+ 
+  restart:
+@@ -185,7 +186,8 @@
+       break;
+     }
+ 
+-  if (c != 0)
++  if (c != 0
++      || (flag_propolice_protection && x == virtual_stack_vars_rtx))
+     x = gen_rtx_PLUS (mode, x, GEN_INT (c));
+ 
+   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
+@@ -474,6 +476,26 @@
+       if (memory_address_p (mode, oldx))
+ 	goto win2;
+ 
++      /* The stack protector keeps the addressing style of a local variable.
++	 LEGITIMIZE_ADDRESS changes the addressing to the machine-dependent
++	 style, so the protector split the frame address to a register using
++	 force_reg. */
++      if (flag_propolice_protection)
++	{
++#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS				\
++			&& XEXP (X, 0) == virtual_stack_vars_rtx	\
++			&& GET_CODE (XEXP (X, 1)) == CONST_INT)
++	  rtx y;
++	  if (FRAMEADDR_P (x))
++	    goto win;
++	  for (y = x; y != 0 && GET_CODE (y) == PLUS; y = XEXP (y, 0))
++	    {
++	      if (FRAMEADDR_P (XEXP (y, 0)))
++		XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
++	      if (FRAMEADDR_P (XEXP (y, 1)))
++		XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
++	    }
++	}
+       /* Perform machine-dependent transformations on X
+ 	 in certain cases.  This is not necessary since the code
+ 	 below can handle all possible cases, but machine-dependent
+diff -Naur gcc-3.4.1.orig/gcc/expr.c gcc-3.4.1.ssp/gcc/expr.c
+--- gcc-3.4.1.orig/gcc/expr.c	2004-05-27 19:35:17.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/expr.c	2004-08-30 00:57:22.000000000 +0000
+@@ -48,6 +48,7 @@
+ #include "intl.h"
+ #include "tm_p.h"
+ #include "target.h"
++#include "protector.h"
+ 
+ /* Decide whether a function's arguments should be processed
+    from first to last or from last to first.
+@@ -1060,7 +1061,11 @@
+ 
+    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
+    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
+-   stpcpy.  */
++   stpcpy.
++
++   When the stack protector is used at the reverse move, it starts the move
++   instruction from the address within the region of a variable.
++   So it eliminates the first address decrement instruction.  */
+ 
+ rtx
+ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
+@@ -1123,6 +1128,8 @@
+ 
+       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
+ 	{
++	  if (flag_propolice_protection)
++	    len = len - GET_MODE_SIZE (mode);
+ 	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
+ 	  data.autinc_from = 1;
+ 	  data.explicit_inc_from = -1;
+@@ -1137,6 +1144,8 @@
+ 	data.from_addr = copy_addr_to_reg (from_addr);
+       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
+ 	{
++	  if (flag_propolice_protection)
++	    len = len - GET_MODE_SIZE (mode);
+ 	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
+ 	  data.autinc_to = 1;
+ 	  data.explicit_inc_to = -1;
+@@ -1280,11 +1289,15 @@
+ 	from1 = adjust_address (data->from, mode, data->offset);
+ 
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
+-	emit_insn (gen_add2_insn (data->to_addr,
+-				  GEN_INT (-(HOST_WIDE_INT)size)));
++	/* The stack protector skips the first address decrement instruction
++	   at the reverse move.  */
++	if (!flag_propolice_protection || data->explicit_inc_to < -1)
++	  emit_insn (gen_add2_insn (data->to_addr,
++				    GEN_INT (-(HOST_WIDE_INT)size)));
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
+-	emit_insn (gen_add2_insn (data->from_addr,
+-				  GEN_INT (-(HOST_WIDE_INT)size)));
++	if (!flag_propolice_protection || data->explicit_inc_from < -1)
++	  emit_insn (gen_add2_insn (data->from_addr,
++				    GEN_INT (-(HOST_WIDE_INT)size)));
+ 
+       if (data->to)
+ 	emit_insn ((*genfun) (to1, from1));
+@@ -2475,7 +2488,12 @@
+ 
+       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
+ 	{
+-	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
++	  int len = data->len;
++	  /* The stack protector starts the store instruction from
++	     the address within the region of a variable.  */
++	  if (flag_propolice_protection)
++	    len -= GET_MODE_SIZE (mode);
++	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
+ 	  data->autinc_to = 1;
+ 	  data->explicit_inc_to = -1;
+ 	}
+@@ -2544,8 +2562,11 @@
+ 	to1 = adjust_address (data->to, mode, data->offset);
+ 
+       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
+-	emit_insn (gen_add2_insn (data->to_addr,
+-				  GEN_INT (-(HOST_WIDE_INT) size)));
++	/* The stack protector skips the first address decrement instruction
++	   at the reverse store.  */
++	if (!flag_propolice_protection || data->explicit_inc_to < -1)
++	  emit_insn (gen_add2_insn (data->to_addr,
++				    GEN_INT (-(HOST_WIDE_INT) size)));
+ 
+       cst = (*data->constfun) (data->constfundata, data->offset, mode);
+       emit_insn ((*genfun) (to1, cst));
+@@ -5701,7 +5722,9 @@
+ 	  && GET_CODE (XEXP (value, 0)) == PLUS
+ 	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
+ 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
+-	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
++	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
++	  && (!flag_propolice_protection
++	      || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
+ 	{
+ 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
+ 					  XEXP (XEXP (value, 0), 0), op2,
+diff -Naur gcc-3.4.1.orig/gcc/flags.h gcc-3.4.1.ssp/gcc/flags.h
+--- gcc-3.4.1.orig/gcc/flags.h	2004-02-18 00:09:04.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/flags.h	2004-08-30 00:57:22.000000000 +0000
+@@ -186,6 +186,10 @@
+ 
+ extern bool warn_strict_aliasing;
+ 
++/* Warn when not issuing stack smashing protection for some reason.  */
++
++extern bool warn_stack_protector;
++
+ /* Nonzero if generating code to do profiling.  */
+ 
+ extern int profile_flag;
+@@ -771,4 +775,12 @@
+ #define HONOR_SIGN_DEPENDENT_ROUNDING(MODE) \
+   (MODE_HAS_SIGN_DEPENDENT_ROUNDING (MODE) && flag_rounding_math)
+ 
++/* Nonzero means use propolice as a stack protection method.  */
++
++extern int flag_propolice_protection;
++
++/* Nonzero means use a stack protection method for every function.  */
++
++extern int flag_stack_protection;
++
+ #endif /* ! GCC_FLAGS_H */
+diff -Naur gcc-3.4.1.orig/gcc/function.c gcc-3.4.1.ssp/gcc/function.c
+--- gcc-3.4.1.orig/gcc/function.c	2004-06-22 23:54:02.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/function.c	2004-08-30 00:57:22.000000000 +0000
+@@ -63,6 +63,7 @@
+ #include "integrate.h"
+ #include "langhooks.h"
+ #include "target.h"
++#include "protector.h"
+ 
+ #ifndef TRAMPOLINE_ALIGNMENT
+ #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
+@@ -155,6 +156,10 @@
+ /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
+    in this function.  */
+ static GTY(()) varray_type sibcall_epilogue;
++
++/* Current boundary mark for character arrays.  */
++static int temp_boundary_mark = 0;
++
+ 

+ /* In order to evaluate some expressions, such as function calls returning
+    structures in memory, we need to temporarily allocate stack locations.
+@@ -208,6 +213,8 @@
+   /* The size of the slot, including extra space for alignment.  This
+      info is for combine_temp_slots.  */
+   HOST_WIDE_INT full_size;
++  /* Boundary mark of a character array and the others. This info is for propolice.  */
++  int boundary_mark;
+ };
+ 

+ /* This structure is used to record MEMs or pseudos used to replace VAR, any
+@@ -638,6 +645,7 @@
+    whose lifetime is controlled by CLEANUP_POINT_EXPRs.  KEEP is 3
+    if we are to allocate something at an inner level to be treated as
+    a variable in the block (e.g., a SAVE_EXPR).
++   KEEP is 5 if we allocate a place to return structure.
+ 
+    TYPE is the type that will be used for the stack slot.  */
+ 
+@@ -648,6 +656,8 @@
+   unsigned int align;
+   struct temp_slot *p, *best_p = 0;
+   rtx slot;
++  int char_array = (flag_propolice_protection
++		    && keep == 1 && search_string_def (type));
+ 
+   /* If SIZE is -1 it means that somebody tried to allocate a temporary
+      of a variable size.  */
+@@ -673,7 +683,8 @@
+ 	&& ! p->in_use
+ 	&& objects_must_conflict_p (p->type, type)
+ 	&& (best_p == 0 || best_p->size > p->size
+-	    || (best_p->size == p->size && best_p->align > p->align)))
++	    || (best_p->size == p->size && best_p->align > p->align))
++	&& (! char_array || p->boundary_mark != 0))
+       {
+ 	if (p->align == align && p->size == size)
+ 	  {
+@@ -708,6 +719,7 @@
+ 	      p->address = 0;
+ 	      p->rtl_expr = 0;
+ 	      p->type = best_p->type;
++	      p->boundary_mark = best_p->boundary_mark;
+ 	      p->next = temp_slots;
+ 	      temp_slots = p;
+ 
+@@ -768,6 +780,7 @@
+       p->full_size = frame_offset - frame_offset_old;
+ #endif
+       p->address = 0;
++      p->boundary_mark = char_array ? ++temp_boundary_mark : 0;
+       p->next = temp_slots;
+       temp_slots = p;
+     }
+@@ -932,14 +945,16 @@
+ 	    int delete_q = 0;
+ 	    if (! q->in_use && GET_MODE (q->slot) == BLKmode)
+ 	      {
+-		if (p->base_offset + p->full_size == q->base_offset)
++		if (p->base_offset + p->full_size == q->base_offset &&
++		    p->boundary_mark == q->boundary_mark)
+ 		  {
+ 		    /* Q comes after P; combine Q into P.  */
+ 		    p->size += q->size;
+ 		    p->full_size += q->full_size;
+ 		    delete_q = 1;
+ 		  }
+-		else if (q->base_offset + q->full_size == p->base_offset)
++		else if (q->base_offset + q->full_size == p->base_offset &&
++			 p->boundary_mark == q->boundary_mark)
+ 		  {
+ 		    /* P comes after Q; combine P into Q.  */
+ 		    q->size += p->size;
+@@ -1449,7 +1464,9 @@
+     }
+ 
+   if (new == 0)
+-    new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
++    new = function ?
++      assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func)
++      :	assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
+ 
+   PUT_CODE (reg, MEM);
+   PUT_MODE (reg, decl_mode);
+@@ -3930,7 +3947,8 @@
+ 		 constant with that register.  */
+ 	      temp = gen_reg_rtx (Pmode);
+ 	      XEXP (x, 0) = new;
+-	      if (validate_change (object, &XEXP (x, 1), temp, 0))
++	      if (validate_change (object, &XEXP (x, 1), temp, 0)
++		  && !flag_propolice_protection)
+ 		emit_insn_before (gen_move_insn (temp, new_offset), object);
+ 	      else
+ 		{
+diff -Naur gcc-3.4.1.orig/gcc/gcse.c gcc-3.4.1.ssp/gcc/gcse.c
+--- gcc-3.4.1.orig/gcc/gcse.c	2004-03-25 16:44:42.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/gcse.c	2004-08-30 00:57:22.000000000 +0000
+@@ -4178,7 +4178,7 @@
+       /* Find an assignment that sets reg_used and is available
+ 	 at the start of the block.  */
+       set = find_avail_set (regno, insn);
+-      if (! set)
++      if (! set || SET_VOLATILE_P (set->expr))
+ 	continue;
+ 
+       pat = set->expr;
+diff -Naur gcc-3.4.1.orig/gcc/integrate.c gcc-3.4.1.ssp/gcc/integrate.c
+--- gcc-3.4.1.orig/gcc/integrate.c	2004-01-23 23:36:00.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/integrate.c	2004-08-30 00:57:22.000000000 +0000
+@@ -393,6 +393,10 @@
+   /* These args would always appear unused, if not for this.  */
+   TREE_USED (copy) = 1;
+ 
++  /* The inlined variable is marked as INLINE not to sweep by propolice */
++  if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL)
++    DECL_COPIED (copy) = 1;
++
+   /* Set the context for the new declaration.  */
+   if (!DECL_CONTEXT (decl))
+     /* Globals stay global.  */
+@@ -1970,6 +1974,10 @@
+ 
+ 	      seq = get_insns ();
+ 	      end_sequence ();
++#ifdef ARGS_GROWS_DOWNWARD
++	      if (flag_propolice_protection && GET_CODE (seq) == SET)
++		RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
++#endif
+ 	      emit_insn_after (seq, map->insns_at_start);
+ 	      return temp;
+ 	    }
+diff -Naur gcc-3.4.1.orig/gcc/loop.c gcc-3.4.1.ssp/gcc/loop.c
+--- gcc-3.4.1.orig/gcc/loop.c	2004-06-25 21:47:46.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/loop.c	2004-08-30 00:57:22.000000000 +0000
+@@ -6514,6 +6514,14 @@
+   if (GET_CODE (*mult_val) == USE)
+     *mult_val = XEXP (*mult_val, 0);
+ 
++#ifndef FRAME_GROWS_DOWNWARD
++  if (flag_propolice_protection
++      && GET_CODE (*add_val) == PLUS
++      && (XEXP (*add_val, 0) == frame_pointer_rtx
++	  || XEXP (*add_val, 1) == frame_pointer_rtx))
++    return 0;
++#endif
++
+   if (is_addr)
+     *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost;
+   else
+diff -Naur gcc-3.4.1.orig/gcc/mklibgcc.in gcc-3.4.1.ssp/gcc/mklibgcc.in
+--- gcc-3.4.1.orig/gcc/mklibgcc.in	2003-11-21 04:53:09.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/mklibgcc.in	2004-08-30 00:57:22.000000000 +0000
+@@ -51,7 +51,7 @@
+ 	_trampoline __main _absvsi2 _absvdi2 _addvsi3 _addvdi3
+ 	_subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors
+ 	_ffssi2 _ffsdi2 _clz _clzsi2 _clzdi2 _ctzsi2 _ctzdi2 _popcount_tab
+-	_popcountsi2 _popcountdi2 _paritysi2 _paritydi2'
++	_popcountsi2 _popcountdi2 _paritysi2 _paritydi2 _stack_smash_handler'
+ 
+ # Disable SHLIB_LINK if shared libgcc not enabled.
+ if [ "@enable_shared@" = "no" ]; then
+diff -Naur gcc-3.4.1.orig/gcc/optabs.c gcc-3.4.1.ssp/gcc/optabs.c
+--- gcc-3.4.1.orig/gcc/optabs.c	2004-03-03 00:45:01.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/optabs.c	2004-08-30 00:57:22.000000000 +0000
+@@ -678,6 +678,26 @@
+   if (target)
+     target = protect_from_queue (target, 1);
+ 
++  if (flag_propolice_protection
++      && binoptab->code == PLUS
++      && op0 == virtual_stack_vars_rtx
++      && GET_CODE(op1) == CONST_INT)
++    {
++      int icode = (int) binoptab->handlers[(int) mode].insn_code;
++      if (target)
++	temp = target;
++      else
++	temp = gen_reg_rtx (mode);
++
++      if (! (*insn_data[icode].operand[0].predicate) (temp, mode)
++	  || GET_CODE (temp) != REG)
++	temp = gen_reg_rtx (mode);
++
++      emit_insn (gen_rtx_SET (VOIDmode, temp,
++			      gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
++      return temp;
++    }
++
+   if (flag_force_mem)
+     {
+       /* Load duplicate non-volatile operands once.  */
+diff -Naur gcc-3.4.1.orig/gcc/opts.c gcc-3.4.1.ssp/gcc/opts.c
+--- gcc-3.4.1.orig/gcc/opts.c	2004-02-18 00:09:04.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/opts.c	2004-08-30 00:57:22.000000000 +0000
+@@ -125,6 +125,9 @@
+ bool warn_unused_variable;
+ bool warn_unused_value;
+ 
++/* Warn when not issuing stack smashing protection for some reason */
++bool warn_stack_protector;
++
+ /* Hack for cooperation between set_Wunused and set_Wextra.  */
+ static bool maybe_warn_unused_parameter;
+ 
+@@ -798,6 +801,10 @@
+       warn_unused_variable = value;
+       break;
+ 
++    case OPT_Wstack_protector:
++      warn_stack_protector = value;
++      break;
++
+     case OPT_aux_info:
+     case OPT_aux_info_:
+       aux_info_file_name = arg;
+@@ -1361,6 +1368,14 @@
+       stack_limit_rtx = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (arg));
+       break;
+ 
++    case OPT_fstack_protector:
++      flag_propolice_protection = value;
++      break;
++
++    case OPT_fstack_protector_all:
++      flag_stack_protection = value;
++      break;
++
+     case OPT_fstrength_reduce:
+       flag_strength_reduce = value;
+       break;
+diff -Naur gcc-3.4.1.orig/gcc/protector.c gcc-3.4.1.ssp/gcc/protector.c
+--- gcc-3.4.1.orig/gcc/protector.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/protector.c	2004-03-22 07:34:40.000000000 +0000
+@@ -0,0 +1,2712 @@
++/* RTL buffer overflow protection function for GNU C compiler
++   Copyright (C) 2003 Free Software Foundation, Inc.
++
++This file is part of GCC.
++
++GCC is free software; you can redistribute it and/or modify it under
++the terms of the GNU General Public License as published by the Free
++Software Foundation; either version 2, or (at your option) any later
++version.
++
++GCC is distributed in the hope that it will be useful, but WITHOUT ANY
++WARRANTY; without even the implied warranty of MERCHANTABILITY or
++FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
++for more details.
++
++You should have received a copy of the GNU General Public License
++along with GCC; see the file COPYING.  If not, write to the Free
++Software Foundation, 59 Temple Place - Suite 330, Boston, MA
++02111-1307, USA.  */
++
++/* This file contains several memory arrangement functions to protect
++   the return address and the frame pointer of the stack
++   from a stack-smashing attack. It also
++   provides the function that protects pointer variables.  */
++
++#include "config.h"
++#include "system.h"
++#include "coretypes.h"
++#include "tm.h"
++#include "machmode.h"
++#include "real.h"
++#include "rtl.h"
++#include "tree.h"
++#include "regs.h"
++#include "flags.h"
++#include "insn-config.h"
++#include "insn-flags.h"
++#include "expr.h"
++#include "output.h"
++#include "recog.h"
++#include "hard-reg-set.h"
++#include "except.h"
++#include "function.h"
++#include "toplev.h"
++#include "tm_p.h"
++#include "conditions.h"
++#include "insn-attr.h"
++#include "optabs.h"
++#include "reload.h"
++#include "protector.h"
++
++
++/* Round a value to the lowest integer less than it that is a multiple of
++   the required alignment.  Avoid using division in case the value is
++   negative.  Assume the alignment is a power of two.  */
++#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
++
++/* Similar, but round to the next highest integer that meets the
++   alignment.  */
++#define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
++
++
++/* Nonzero if function being compiled can define string buffers that may be
++   damaged by the stack-smash attack.  */
++static int current_function_defines_vulnerable_string;
++static int current_function_defines_short_string;
++static int current_function_has_variable_string;
++static int current_function_defines_vsized_array;
++static int current_function_is_inlinable;
++static int is_array;
++
++static rtx guard_area, _guard;
++static rtx function_first_insn, prologue_insert_point;
++
++/* Offset to end of sweeped area for gathering character arrays.  */
++static HOST_WIDE_INT sweep_frame_offset;
++
++/* Offset to end of allocated area for instantiating pseudo registers.  */
++static HOST_WIDE_INT push_allocated_offset = 0;
++
++/* Offset to end of assigned area for instantiating pseudo registers.  */
++static HOST_WIDE_INT push_frame_offset = 0;
++
++/* Set to 1 after cse_not_expected becomes nonzero. it is used to identify
++   which stage assign_stack_local_for_pseudo_reg is called from.  */
++static int saved_cse_not_expected = 0;
++
++static int search_string_from_argsandvars (int);
++static int search_string_from_local_vars (tree);
++static int search_pointer_def (tree);
++static int search_func_pointer (tree);
++static int check_used_flag (rtx);
++static void reset_used_flags_for_insns (rtx);
++static void reset_used_flags_for_decls (tree);
++static void reset_used_flags_of_plus (rtx);
++static void rtl_prologue (rtx);
++static void rtl_epilogue (rtx);
++static void arrange_var_order (tree);
++static void copy_args_for_protection (void);
++static void sweep_string_variable (rtx, HOST_WIDE_INT);
++static void sweep_string_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
++static void sweep_string_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
++static void sweep_string_use_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
++static void sweep_string_in_operand (rtx, rtx *, HOST_WIDE_INT, HOST_WIDE_INT);
++static void move_arg_location (rtx, rtx, rtx, HOST_WIDE_INT);
++static void change_arg_use_of_insns (rtx, rtx, rtx *, HOST_WIDE_INT);
++static void change_arg_use_in_operand (rtx, rtx, rtx, rtx *, HOST_WIDE_INT);
++static void validate_insns_of_varrefs (rtx);
++static void validate_operand_of_varrefs (rtx, rtx *);
++
++/* Specify which size of buffers should be protected from a stack smashing
++   attack. Because small buffers are not used in situations which may
++   overflow buffer, the default size sets to the size of 64 bit register.  */
++#ifndef SUSPICIOUS_BUF_SIZE
++#define SUSPICIOUS_BUF_SIZE 8
++#endif
++
++#define AUTO_BASEPTR(X) \
++  (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
++#define AUTO_OFFSET(X) \
++  (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
++#undef PARM_PASSED_IN_MEMORY
++#define PARM_PASSED_IN_MEMORY(PARM) \
++ (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
++#define TREE_VISITED(NODE) ((NODE)->common.unused_0)
++
++/* Argument values for calling search_string_from_argsandvars.  */
++#define CALL_FROM_PREPARE_STACK_PROTECTION	0
++#define CALL_FROM_PUSH_FRAME			1
++
++
++/* Prepare several stack protection instruments for the current function
++   if the function has an array as a local variable, which may be vulnerable
++   from a stack smashing attack, and it is not inlinable.
++
++   The overall steps are as follows;
++   (1)search an array,
++   (2)insert guard_area on the stack,
++   (3)duplicate pointer arguments into local variables, and
++   (4)arrange the location of local variables.  */
++void
++prepare_stack_protection (int inlinable)
++{
++  tree blocks = DECL_INITIAL (current_function_decl);
++  current_function_is_inlinable = inlinable && !flag_no_inline;
++  push_frame_offset = push_allocated_offset = 0;
++  saved_cse_not_expected = 0;
++
++  /* Skip the protection if the function has no block
++    or it is an inline function.  */
++  if (current_function_is_inlinable)
++    validate_insns_of_varrefs (get_insns ());
++  if (! blocks || current_function_is_inlinable)
++    return;
++
++  current_function_defines_vulnerable_string
++    = search_string_from_argsandvars (CALL_FROM_PREPARE_STACK_PROTECTION);
++
++  if (current_function_defines_vulnerable_string
++      || flag_stack_protection)
++    {
++      function_first_insn = get_insns ();
++
++      if (current_function_contains_functions)
++	{
++	  if (warn_stack_protector)
++	    warning ("not protecting function: it contains functions");
++	  return;
++	}
++
++      /* Initialize recognition, indicating that volatile is OK.  */
++      init_recog ();
++
++      sweep_frame_offset = 0;
++	
++#ifdef STACK_GROWS_DOWNWARD
++      /* frame_offset: offset to end of allocated area of stack frame.
++	 It is defined in the function.c.  */
++
++      /* the location must be before buffers.  */
++      guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
++      PUT_MODE (guard_area, GUARD_m);
++      MEM_VOLATILE_P (guard_area) = 1;
++
++#ifndef FRAME_GROWS_DOWNWARD
++      sweep_frame_offset = frame_offset;
++#endif
++
++      /* For making room for guard value, scan all insns and fix the offset
++	 address of the variable that is based on frame pointer.
++	 Scan all declarations of variables and fix the offset address
++	 of the variable that is based on the frame pointer.  */
++      sweep_string_variable (guard_area, UNITS_PER_GUARD);
++
++	
++      /* the location of guard area moves to the beginning of stack frame.  */
++      if (AUTO_OFFSET(XEXP (guard_area, 0)))
++	XEXP (XEXP (guard_area, 0), 1)
++	  = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
++
++
++      /* Insert prologue rtl instructions.  */
++      rtl_prologue (function_first_insn);
++
++      if (! current_function_has_variable_string)
++	{
++	  /* Generate argument saving instruction.  */
++	  copy_args_for_protection ();
++
++#ifndef FRAME_GROWS_DOWNWARD
++	  /* If frame grows upward, character arrays for protecting args
++	     may copy to the top of the guard variable.
++	     So sweep the guard variable again.  */
++	  sweep_frame_offset = CEIL_ROUND (frame_offset,
++					   BIGGEST_ALIGNMENT / BITS_PER_UNIT);
++	  sweep_string_variable (guard_area, UNITS_PER_GUARD);
++#endif
++	}
++      /* Variable can't be protected from the overflow of variable length
++	 buffer. But variable reordering is still effective against
++	 the overflow of fixed size character arrays.  */
++      else if (warn_stack_protector)
++	warning ("not protecting variables: it has a variable length buffer");
++#endif
++#ifndef FRAME_GROWS_DOWNWARD
++      if (STARTING_FRAME_OFFSET == 0)
++	{
++	  /* This part may be only for alpha.  */
++	  push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++	  assign_stack_local (BLKmode, push_allocated_offset, -1);
++	  sweep_frame_offset = frame_offset;
++	  sweep_string_variable (const0_rtx, -push_allocated_offset);
++	  sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
++	}
++#endif
++
++      /* Arrange the order of local variables.  */
++      arrange_var_order (blocks);
++
++#ifdef STACK_GROWS_DOWNWARD
++      /* Insert epilogue rtl instructions.  */
++      rtl_epilogue (get_last_insn ());
++#endif
++      init_recog_no_volatile ();
++    }
++  else if (current_function_defines_short_string
++	   && warn_stack_protector)
++    warning ("not protecting function: buffer is less than %d bytes long",
++	     SUSPICIOUS_BUF_SIZE);
++}
++
++/*
++  Search string from arguments and local variables.
++   caller: CALL_FROM_PREPARE_STACK_PROTECTION (0)
++	   CALL_FROM_PUSH_FRAME (1)
++*/
++static int
++search_string_from_argsandvars (int caller)
++{
++  tree blocks, parms;
++  int string_p;
++
++  /* Saves a latest search result as a cached infomation.  */
++  static tree __latest_search_decl = 0;
++  static int  __latest_search_result = FALSE;
++
++  if (__latest_search_decl == current_function_decl)
++    return __latest_search_result;
++  else
++    if (caller == CALL_FROM_PUSH_FRAME)
++      return FALSE;
++
++  __latest_search_decl = current_function_decl;
++  __latest_search_result = TRUE;
++  
++  current_function_defines_short_string = FALSE;
++  current_function_has_variable_string = FALSE;
++  current_function_defines_vsized_array = FALSE;
++
++  /* Search a string variable from local variables.  */
++  blocks = DECL_INITIAL (current_function_decl);
++  string_p = search_string_from_local_vars (blocks);
++
++  if (! current_function_defines_vsized_array && current_function_calls_alloca)
++    {
++      current_function_has_variable_string = TRUE;
++      return TRUE;
++    }
++
++  if (string_p)
++    return TRUE;
++
++#ifdef STACK_GROWS_DOWNWARD
++  /* Search a string variable from arguments.  */
++  parms = DECL_ARGUMENTS (current_function_decl);
++
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms))
++	  {
++	    string_p = search_string_def (TREE_TYPE(parms));
++	    if (string_p)
++	      return TRUE;
++	  }
++      }
++#endif
++
++  __latest_search_result = FALSE;
++  return FALSE;
++}
++
++
++/* Search string from local variables in the specified scope.  */
++static int
++search_string_from_local_vars (tree block)
++{
++  tree types;
++  int found = FALSE;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable.  */
++	  /* name: types.decl.name.identifier.id                     */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
++	      && TREE_CODE (types) == VAR_DECL
++	      && ! DECL_ARTIFICIAL (types)
++	      && DECL_RTL_SET_P (types)
++	      && GET_CODE (DECL_RTL (types)) == MEM
++
++	      && search_string_def (TREE_TYPE (types)))
++	    {
++	      rtx home = DECL_RTL (types);
++
++	      if (GET_CODE (home) == MEM
++		  && (GET_CODE (XEXP (home, 0)) == MEM
++		      || (GET_CODE (XEXP (home, 0)) == REG
++			  && XEXP (home, 0) != virtual_stack_vars_rtx
++			  && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
++			  && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
++#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
++			  && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
++#endif
++			  )))
++		/* If the value is indirect by memory or by a register
++		   that isn't the frame pointer then it means the object is
++		   variable-sized and address through
++		   that register or stack slot.
++		   The protection has no way to hide pointer variables
++		   behind the array, so all we can do is staying
++		   the order of variables and arguments.  */
++		{
++		  current_function_has_variable_string = TRUE;
++		}
++	    
++	      /* Found character array.  */
++	      found = TRUE;
++	    }
++	}
++
++      if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
++	{
++	  found = TRUE;
++	}
++
++      block = BLOCK_CHAIN (block);
++    }
++    
++  return found;
++}
++
++
++/* Search a character array from the specified type tree.  */
++int
++search_string_def (tree type)
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case ARRAY_TYPE:
++      /* Check if the array is a variable-sized array.  */
++      if (TYPE_DOMAIN (type) == 0
++	  || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++	      && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
++	current_function_defines_vsized_array = TRUE;
++
++      /* Check if the array is related to char array.  */
++      if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
++	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
++	  || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
++	{
++	  /* Check if the string is a variable string.  */
++	  if (TYPE_DOMAIN (type) == 0
++	      || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++		  && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
++	    return TRUE;
++
++	  /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE.  */
++	  if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
++	      && (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
++		  >= SUSPICIOUS_BUF_SIZE))
++	    return TRUE;
++
++	  current_function_defines_short_string = TRUE;
++	}
++      
++      /* to protect every functions, sweep any arrays to the frame top.  */
++      is_array = TRUE;
++
++      return search_string_def(TREE_TYPE(type));
++	
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++      /* Check if each field has character arrays.  */
++      for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	{
++	  /* Omit here local type decls until we know how to support them. */
++	  if ((TREE_CODE (tem) == TYPE_DECL)
++	      || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
++	    continue;
++
++	  if (search_string_def(TREE_TYPE(tem)))
++	    return TRUE;
++	}
++      break;
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++    case OFFSET_TYPE:
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++/* Examine whether the input contains frame pointer addressing.  */
++int
++contains_fp (rtx op)
++{
++  enum rtx_code code;
++  rtx x;
++  int i, j;
++  const char *fmt;
++
++  x = op;
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case REG:
++    case ADDRESSOF:
++      return FALSE;
++
++    case MEM:
++      /* This case is not generated at the stack protection.
++	 see plus_constant_wide and simplify_plus_minus function.  */
++      if (XEXP (x, 0) == virtual_stack_vars_rtx)
++	abort ();
++      
++    case PLUS:
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
++	return TRUE;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (contains_fp (XEXP (x, i)))
++	  return TRUE;
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	if (contains_fp (XVECEXP (x, i, j)))
++	  return TRUE;
++
++  return FALSE;
++}
++
++
++/* Examine whether the input contains any pointer.  */
++static int
++search_pointer_def (tree type)
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++      /* Check if each field has a pointer.  */
++      for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	{
++	  if ((TREE_CODE (tem) == TYPE_DECL)
++	      || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
++	    continue;
++
++	  if (search_pointer_def (TREE_TYPE(tem)))
++	    return TRUE;
++	}
++      break;
++
++    case ARRAY_TYPE:
++      return search_pointer_def (TREE_TYPE(type));
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++    case OFFSET_TYPE:
++      if (TYPE_READONLY (TREE_TYPE (type)))
++	{
++	  /* If this pointer contains function pointer,
++	     it should be protected.  */
++	  return search_func_pointer (TREE_TYPE (type));
++	}
++      return TRUE;
++	
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++/* Examine whether the input contains function pointer.  */
++static int
++search_func_pointer (tree type)
++{
++  tree tem;
++    
++  if (! type)
++    return FALSE;
++
++  switch (TREE_CODE (type))
++    {
++    case UNION_TYPE:
++    case QUAL_UNION_TYPE:
++    case RECORD_TYPE:
++	if (! TREE_VISITED (type))
++	  {
++	    /* Mark the type as having been visited already.  */
++	    TREE_VISITED (type) = 1;
++
++	    /* Check if each field has a function pointer.  */
++	    for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
++	      {
++		if (TREE_CODE (tem) == FIELD_DECL
++		    && search_func_pointer (TREE_TYPE(tem)))
++		  {
++		    TREE_VISITED (type) = 0;
++		    return TRUE;
++		  }
++	      }
++	    
++	    TREE_VISITED (type) = 0;
++	  }
++	break;
++
++    case ARRAY_TYPE:
++      return search_func_pointer (TREE_TYPE(type));
++	
++    case POINTER_TYPE:
++    case REFERENCE_TYPE:
++    case OFFSET_TYPE:
++      if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
++	return TRUE;
++      return search_func_pointer (TREE_TYPE(type));
++	
++    default:
++      break;
++    }
++
++  return FALSE;
++}
++
++
++/* Check whether the specified rtx contains PLUS rtx with used flag.  */
++static int
++check_used_flag (rtx x)
++{
++  register int i, j;
++  register enum rtx_code code;
++  register const char *format_ptr;
++
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case REG:
++    case QUEUED:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++      return FALSE;
++
++    case PLUS:
++      if (x->used)
++	return TRUE;
++
++    default:
++      break;
++    }
++
++  format_ptr = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++)
++    {
++      switch (*format_ptr++)
++	{
++	case 'e':
++	  if (check_used_flag (XEXP (x, i)))
++	    return TRUE;
++	  break;
++
++	case 'E':
++	  for (j = 0; j < XVECLEN (x, i); j++)
++	    if (check_used_flag (XVECEXP (x, i, j)))
++	      return TRUE;
++	  break;
++	}
++    }
++
++  return FALSE;
++}
++
++
++/* Reset used flag of every insns after the spcecified insn.  */
++static void
++reset_used_flags_for_insns (rtx insn)
++{
++  int i, j;
++  enum rtx_code code;
++  const char *format_ptr;
++
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	code = GET_CODE (insn);
++	insn->used = 0;
++	format_ptr = GET_RTX_FORMAT (code);
++
++	for (i = 0; i < GET_RTX_LENGTH (code); i++)
++	  {
++	    switch (*format_ptr++)
++	      {
++	      case 'e':
++		reset_used_flags_of_plus (XEXP (insn, i));
++		break;
++			
++	      case 'E':
++		for (j = 0; j < XVECLEN (insn, i); j++)
++		  reset_used_flags_of_plus (XVECEXP (insn, i, j));
++		break;
++	      }
++	  }
++      }
++}
++
++
++/* Reset used flag of every variables in the specified block.  */
++static void
++reset_used_flags_for_decls (tree block)
++{
++  tree types;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      types = BLOCK_VARS(block);
++	
++      for (types= BLOCK_VARS(block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable and
++	     also skip an global variable.  */
++	  if (! DECL_EXTERNAL (types))
++	    {
++	      if (! DECL_RTL_SET_P (types))
++		continue;
++	      home = DECL_RTL (types);
++
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == PLUS
++		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++		{
++		  XEXP (home, 0)->used = 0;
++		}
++	    }
++	}
++
++      reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++/* Reset the used flag of every PLUS rtx derived from the specified rtx.  */
++static void
++reset_used_flags_of_plus (rtx x)
++{
++  int i, j;
++  enum rtx_code code;
++  const char *format_ptr;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++      /* These types may be freely shared so we needn't do any resetting
++	 for them.  */
++    case REG:
++    case QUEUED:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++      return;
++
++    case INSN:
++    case JUMP_INSN:
++    case CALL_INSN:
++    case NOTE:
++    case LABEL_REF:
++    case BARRIER:
++      /* The chain of insns is not being copied.  */
++      return;
++      
++    case PLUS:
++      x->used = 0;
++      break;
++
++    case CALL_PLACEHOLDER:
++      reset_used_flags_for_insns (XEXP (x, 0));
++      reset_used_flags_for_insns (XEXP (x, 1));
++      reset_used_flags_for_insns (XEXP (x, 2));
++      break;
++
++    default:
++      break;
++    }
++
++  format_ptr = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++)
++    {
++      switch (*format_ptr++)
++	{
++	case 'e':
++	  reset_used_flags_of_plus (XEXP (x, i));
++	  break;
++
++	case 'E':
++	  for (j = 0; j < XVECLEN (x, i); j++)
++	    reset_used_flags_of_plus (XVECEXP (x, i, j));
++	  break;
++	}
++    }
++}
++
++
++/* Generate the prologue insns of the protector into the specified insn.  */
++static void
++rtl_prologue (rtx insn)
++{
++#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
++#undef HAS_INIT_SECTION
++#define HAS_INIT_SECTION
++#endif
++
++  rtx _val;
++
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == NOTE
++	&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
++      break;
++  
++#if !defined (HAS_INIT_SECTION)
++  /* If this function is `main', skip a call to `__main'
++     to run guard instruments after global initializers, etc.  */
++  if (DECL_NAME (current_function_decl)
++      && MAIN_NAME_P (DECL_NAME (current_function_decl))
++      && DECL_CONTEXT (current_function_decl) == NULL_TREE)
++    {
++      rtx fbinsn = insn;
++      for (; insn; insn = NEXT_INSN (insn))
++	if (GET_CODE (insn) == NOTE
++	    && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
++	  break;
++      if (insn == 0)
++	insn = fbinsn;
++    }
++#endif
++
++  /* Mark the next insn of FUNCTION_BEG insn.  */
++  prologue_insert_point = NEXT_INSN (insn);
++		
++  start_sequence ();
++
++  _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
++  emit_move_insn ( guard_area, _guard);
++
++  _val = get_insns ();
++  end_sequence ();
++
++  emit_insn_before (_val, prologue_insert_point);
++}
++
++
++/* Generate the epilogue insns of the protector into the specified insn.  */
++static void
++rtl_epilogue (rtx insn)
++{
++  rtx if_false_label;
++  rtx _val;
++  rtx funcname;
++  tree funcstr;
++  int  flag_have_return = FALSE;
++		
++  start_sequence ();
++
++#ifdef HAVE_return
++  if (HAVE_return)
++    {
++      rtx insn;
++      return_label = gen_label_rtx ();
++      
++      for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
++	if (GET_CODE (insn) == JUMP_INSN
++	    && GET_CODE (PATTERN (insn)) == RETURN
++	    && GET_MODE (PATTERN (insn)) == VOIDmode)
++	  {
++	    rtx pat = gen_rtx_SET (VOIDmode,
++				   pc_rtx,
++				   gen_rtx_LABEL_REF (VOIDmode,
++						      return_label));
++	    PATTERN (insn) = pat;
++	    flag_have_return = TRUE;
++	  }
++
++
++      emit_label (return_label);
++    }
++#endif
++
++  /*                                          if (guard_area != _guard) */
++  compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
++
++  if_false_label = gen_label_rtx ();		/* { */
++  emit_jump_insn ( gen_beq(if_false_label));
++
++  /* generate string for the current function name */
++  funcstr = build_string (strlen(current_function_name ())+1,
++			  current_function_name ());
++  TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
++  funcname = output_constant_def (funcstr, 1);
++
++  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__stack_smash_handler"),
++		     0, VOIDmode, 2,
++                     XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
++
++  /* generate RTL to return from the current function */
++		
++  emit_barrier ();				/* } */
++  emit_label (if_false_label);
++
++  /* generate RTL to return from the current function */
++  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
++    use_return_register ();
++
++#ifdef HAVE_return
++  if (HAVE_return && flag_have_return)
++    {
++      emit_jump_insn (gen_return ());
++      emit_barrier ();
++    }
++#endif
++  
++  _val = get_insns ();
++  end_sequence ();
++
++  emit_insn_after (_val, insn);
++}
++
++
++/* For every variable which type is character array, moves its location
++   in the stack frame to the sweep_frame_offset position.  */
++static void
++arrange_var_order (tree block)
++{
++  tree types;
++  HOST_WIDE_INT offset;
++    
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      /* arrange the location of character arrays in depth first.  */
++      arrange_var_order (BLOCK_SUBBLOCKS (block));
++      
++      for (types = BLOCK_VARS (block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable.  */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
++	      && TREE_CODE (types) == VAR_DECL
++	      && ! DECL_ARTIFICIAL (types)
++	      /* && ! DECL_COPIED (types): gcc3.4 can sweep inlined string.  */
++	      && DECL_RTL_SET_P (types)
++	      && GET_CODE (DECL_RTL (types)) == MEM
++	      && GET_MODE (DECL_RTL (types)) == BLKmode
++
++	      && (is_array=0,
++		  search_string_def (TREE_TYPE (types))
++		  || (! current_function_defines_vulnerable_string && is_array)))
++	    {
++	      rtx home = DECL_RTL (types);
++
++	      if (!(GET_CODE (home) == MEM
++		    && (GET_CODE (XEXP (home, 0)) == MEM
++			|| (GET_CODE (XEXP (home, 0)) == REG
++			    && XEXP (home, 0) != virtual_stack_vars_rtx
++			    && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
++			    && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
++#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
++			    && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
++#endif
++			    ))))
++		{
++		  /* Found a string variable.  */
++		  HOST_WIDE_INT var_size =
++		    ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
++		     / BITS_PER_UNIT);
++
++		  /* Confirmed it is BLKmode.  */
++		  int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++		  var_size = CEIL_ROUND (var_size, alignment);
++
++		  /* Skip the variable if it is top of the region
++		     specified by sweep_frame_offset.  */
++		  offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
++		  if (offset == sweep_frame_offset - var_size)
++		    sweep_frame_offset -= var_size;
++		      
++		  else if (offset < sweep_frame_offset - var_size)
++		    sweep_string_variable (DECL_RTL (types), var_size);
++		}
++	    }
++	}
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++/* To protect every pointer argument and move character arrays in the argument,
++   Copy those variables to the top of the stack frame and move the location of
++   character arrays to the posion of sweep_frame_offset.  */
++static void
++copy_args_for_protection (void)
++{
++  tree parms = DECL_ARGUMENTS (current_function_decl);
++  rtx temp_rtx;
++
++  parms = DECL_ARGUMENTS (current_function_decl);
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    int string_p;
++	    rtx seq;
++
++	    string_p = search_string_def (TREE_TYPE(parms));
++
++	    /* Check if it is a candidate to move.  */
++	    if (string_p || search_pointer_def (TREE_TYPE (parms)))
++	      {
++		int arg_size
++		  = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
++		     / BITS_PER_UNIT);
++		tree passed_type = DECL_ARG_TYPE (parms);
++		tree nominal_type = TREE_TYPE (parms);
++		
++		start_sequence ();
++
++		if (GET_CODE (DECL_RTL (parms)) == REG)
++		  {
++		    rtx safe = 0;
++		    
++		    change_arg_use_of_insns (prologue_insert_point,
++					     DECL_RTL (parms), &safe, 0);
++		    if (safe)
++		      {
++			/* Generate codes for copying the content.  */
++			rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
++		    
++			/* Avoid register elimination in gcse.c.  */
++			PATTERN (movinsn)->volatil = 1;
++			
++			/* Save debugger info.  */
++			SET_DECL_RTL (parms, safe);
++		      }
++		  }
++		else if (GET_CODE (DECL_RTL (parms)) == MEM
++			 && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
++		  {
++		    rtx movinsn;
++		    rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
++
++		    /* Generate codes for copying the content.  */
++		    movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
++		    /* Avoid register elimination in gcse.c.  */
++		    PATTERN (movinsn)->volatil = 1;
++
++		    /* Change the addressof information to the newly
++		       allocated pseudo register.  */
++		    emit_move_insn (DECL_RTL (parms), safe);
++
++		    /* Save debugger info.  */
++		    SET_DECL_RTL (parms, safe);
++		  }
++			
++		/* See if the frontend wants to pass this by invisible
++		   reference.  */
++		else if (passed_type != nominal_type
++			 && POINTER_TYPE_P (passed_type)
++			 && TREE_TYPE (passed_type) == nominal_type)
++		  {
++		    rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
++
++		    change_arg_use_of_insns (prologue_insert_point,
++					     orig, &safe, 0);
++		    if (safe)
++		      {
++			/* Generate codes for copying the content.  */
++			rtx movinsn = emit_move_insn (safe, orig);
++		    
++			/* Avoid register elimination in gcse.c  */
++			PATTERN (movinsn)->volatil = 1;
++			
++			/* Save debugger info.  */
++			SET_DECL_RTL (parms, safe);
++		      }
++		  }
++
++		else
++		  {
++		    /* Declare temporary local variable for parms.  */
++		    temp_rtx
++		      = assign_stack_local (DECL_MODE (parms), arg_size,
++					    DECL_MODE (parms) == BLKmode ?
++					    -1 : 0);
++		    
++		    MEM_IN_STRUCT_P (temp_rtx)
++		      = AGGREGATE_TYPE_P (TREE_TYPE (parms));
++		    set_mem_alias_set (temp_rtx, get_alias_set (parms));
++
++		    /* Generate codes for copying the content.  */
++		    store_expr (parms, temp_rtx, 0);
++
++		    /* Change the reference for each instructions.  */
++		    move_arg_location (prologue_insert_point, DECL_RTL (parms),
++				       temp_rtx, arg_size);
++
++		    /* Change the location of parms variable.  */
++		    SET_DECL_RTL (parms, temp_rtx);
++		  }
++
++		seq = get_insns ();
++		end_sequence ();
++		emit_insn_before (seq, prologue_insert_point);
++
++#ifdef FRAME_GROWS_DOWNWARD
++		/* Process the string argument.  */
++		if (string_p && DECL_MODE (parms) == BLKmode)
++		  {
++		    int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
++		    arg_size = CEIL_ROUND (arg_size, alignment);
++			
++		    /* Change the reference for each instructions.  */
++		    sweep_string_variable (DECL_RTL (parms), arg_size);
++		  }
++#endif
++	      }
++	  }
++      }
++}
++
++
++/* Sweep a string variable to the positon of sweep_frame_offset in the 
++   stack frame, that is a last position of string variables.  */
++static void
++sweep_string_variable (rtx sweep_var, HOST_WIDE_INT var_size)
++{
++  HOST_WIDE_INT sweep_offset;
++
++  switch (GET_CODE (sweep_var))
++    {
++    case MEM:
++      if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
++	  && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
++	return;
++      sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
++      break;
++    case CONST_INT:
++      sweep_offset = INTVAL (sweep_var);
++      break;
++    default:
++      abort ();
++    }
++
++  /* Scan all declarations of variables and fix the offset address of
++     the variable based on the frame pointer.  */
++  sweep_string_in_decls (DECL_INITIAL (current_function_decl),
++			 sweep_offset, var_size);
++
++  /* Scan all argument variable and fix the offset address based on
++     the frame pointer.  */
++  sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
++			sweep_offset, var_size);
++
++  /* For making room for sweep variable, scan all insns and
++     fix the offset address of the variable that is based on frame pointer.  */
++  sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
++
++
++  /* Clear all the USED bits in operands of all insns and declarations of
++     local variables.  */
++  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
++  reset_used_flags_for_insns (function_first_insn);
++
++  sweep_frame_offset -= var_size;
++}
++
++
++
++/* Move an argument to the local variable addressed by frame_offset.  */
++static void
++move_arg_location (rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)
++{
++  /* For making room for sweep variable, scan all insns and
++     fix the offset address of the variable that is based on frame pointer.  */
++  change_arg_use_of_insns (insn, orig, &new, var_size);
++
++
++  /* Clear all the USED bits in operands of all insns and declarations
++     of local variables.  */
++  reset_used_flags_for_insns (insn);
++}
++
++
++/* Sweep character arrays declared as local variable.  */
++static void
++sweep_string_in_decls (tree block, HOST_WIDE_INT sweep_offset,
++		       HOST_WIDE_INT sweep_size)
++{
++  tree types;
++  HOST_WIDE_INT offset;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable and
++	     also skip an global variable.  */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
++	    
++	    if (! DECL_RTL_SET_P (types))
++	      continue;
++
++	    home = DECL_RTL (types);
++
++	    /* Process for static local variable.  */
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
++	      continue;
++
++	    if (GET_CODE (home) == MEM
++		&& XEXP (home, 0) == virtual_stack_vars_rtx)
++	      {
++		offset = 0;
++		
++		/* the operand related to the sweep variable.  */
++		if (sweep_offset <= offset
++		    && offset < sweep_offset + sweep_size)
++		  {
++		    offset = sweep_frame_offset - sweep_size - sweep_offset;
++
++		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
++						    offset);
++		    XEXP (home, 0)->used = 1;
++		  }
++		else if (sweep_offset <= offset
++			 && offset < sweep_frame_offset)
++		  {
++		    /* the rest of variables under sweep_frame_offset,
++		       shift the location.  */
++		    XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
++						    -sweep_size);
++		    XEXP (home, 0)->used = 1;
++		  }
++	      }
++		
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == MEM)
++	      {
++		/* Process for dynamically allocated array.  */
++		home = XEXP (home, 0);
++	      }
++		
++	    if (GET_CODE (home) == MEM
++		&& GET_CODE (XEXP (home, 0)) == PLUS
++		&& XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
++		&& GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++	      {
++		if (! XEXP (home, 0)->used)
++		  {
++		    offset = AUTO_OFFSET(XEXP (home, 0));
++
++		    /* the operand related to the sweep variable.  */
++		    if (sweep_offset <= offset
++			&& offset < sweep_offset + sweep_size)
++		      {
++
++			offset
++			  += sweep_frame_offset - sweep_size - sweep_offset;
++			XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								      offset);
++
++			/* mark */
++			XEXP (home, 0)->used = 1;
++		      }
++		    else if (sweep_offset <= offset
++			     && offset < sweep_frame_offset)
++		      {
++			/* the rest of variables under sweep_frame_offset,
++			   so shift the location.  */
++
++			XEXP (XEXP (home, 0), 1)
++			  = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++
++			/* mark */
++			XEXP (home, 0)->used = 1;
++		      }
++		  }
++	      }
++	  }
++	}
++
++      sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
++			     sweep_offset, sweep_size);
++
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++/* Sweep character arrays declared as argument.  */
++static void
++sweep_string_in_args (tree parms, HOST_WIDE_INT sweep_offset,
++		      HOST_WIDE_INT sweep_size)
++{
++  rtx home;
++  HOST_WIDE_INT offset;
++    
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
++	  {
++	    home = DECL_INCOMING_RTL (parms);
++
++	    if (XEXP (home, 0)->used)
++	      continue;
++
++	    offset = AUTO_OFFSET(XEXP (home, 0));
++
++	    /* the operand related to the sweep variable.  */
++	    if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
++	      {
++		if (sweep_offset <= offset
++		    && offset < sweep_offset + sweep_size)
++		  {
++		    offset += sweep_frame_offset - sweep_size - sweep_offset;
++		    XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								  offset);
++
++		    /* mark */
++		    XEXP (home, 0)->used = 1;
++		  }
++		else if (sweep_offset <= offset
++			 && offset < sweep_frame_offset)
++		  {
++		    /* the rest of variables under sweep_frame_offset,
++		       shift the location.  */
++		    XEXP (XEXP (home, 0), 1)
++		      = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++
++		    /* mark */
++		    XEXP (home, 0)->used = 1;
++		  }
++	      }
++	  }
++      }
++}
++
++
++/* Set to 1 when the instruction contains virtual registers.  */
++static int has_virtual_reg;
++
++/* Sweep the specified character array for every insns. The array starts from
++   the sweep_offset and its size is sweep_size.  */
++static void
++sweep_string_use_of_insns (rtx insn, HOST_WIDE_INT sweep_offset,
++			   HOST_WIDE_INT sweep_size)
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	has_virtual_reg = FALSE;
++	sweep_string_in_operand (insn, &PATTERN (insn),
++				 sweep_offset, sweep_size);
++	sweep_string_in_operand (insn, &REG_NOTES (insn),
++				 sweep_offset, sweep_size);
++      }
++}
++
++
++/* Sweep the specified character array, which starts from the sweep_offset and
++   its size is sweep_size.
++
++   When a pointer is given,
++   if it points the address higher than the array, it stays.
++   if it points the address inside the array, it changes to point inside
++   the sweeped array.
++   if it points the address lower than the array, it shifts higher address by
++   the sweep_size.  */
++static void
++sweep_string_in_operand (rtx insn, rtx *loc,
++			 HOST_WIDE_INT sweep_offset, HOST_WIDE_INT sweep_size)
++{
++  rtx x = *loc;
++  enum rtx_code code;
++  int i, j, k = 0;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case ADDRESSOF:
++      return;
++	    
++    case REG:
++      if (x == virtual_incoming_args_rtx
++	  || x == virtual_stack_vars_rtx
++	  || x == virtual_stack_dynamic_rtx
++	  || x == virtual_outgoing_args_rtx
++	  || x == virtual_cfa_rtx)
++	has_virtual_reg = TRUE;
++      return;
++      
++    case SET:
++      /*
++	skip setjmp setup insn and setjmp restore insn
++	Example:
++	(set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
++	(set (virtual_stack_vars_rtx) (REG))
++      */
++      if (GET_CODE (XEXP (x, 0)) == MEM
++	  && XEXP (x, 1) == virtual_stack_vars_rtx)
++	return;
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == REG)
++	return;
++      break;
++	    
++    case PLUS:
++      /* Handle typical case of frame register plus constant.  */
++      if (XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
++	{
++	  if (x->used)
++	    goto single_use_of_virtual_reg;
++	  
++	  offset = AUTO_OFFSET(x);
++
++	  /* When arguments grow downward, the virtual incoming
++	     args pointer points to the top of the argument block,
++	     so block is identified by the pointer - 1.
++	     The flag is set at the copy_rtx_and_substitute in integrate.c  */
++	  if (RTX_INTEGRATED_P (x))
++	    k = -1;
++
++	  /* the operand related to the sweep variable.  */
++	  if (sweep_offset <= offset + k
++	      && offset + k < sweep_offset + sweep_size)
++	    {
++	      offset += sweep_frame_offset - sweep_size - sweep_offset;
++
++	      XEXP (x, 0) = virtual_stack_vars_rtx;
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	      x->used = 1;
++	    }
++	  else if (sweep_offset <= offset + k
++		   && offset + k < sweep_frame_offset)
++	    {
++	      /* the rest of variables under sweep_frame_offset,
++		 shift the location.  */
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
++	      x->used = 1;
++	    }
++	  
++	single_use_of_virtual_reg:
++	  if (has_virtual_reg) {
++	    /* excerpt from insn_invalid_p in recog.c  */
++	    int icode = recog_memoized (insn);
++
++	    if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
++	      {
++		rtx temp, seq;
++		
++		start_sequence ();
++		temp = force_operand (x, NULL_RTX);
++		seq = get_insns ();
++		end_sequence ();
++		
++		emit_insn_before (seq, insn);
++		if (! validate_change (insn, loc, temp, 0)
++		    && !validate_replace_rtx (x, temp, insn))
++		  fatal_insn ("sweep_string_in_operand", insn);
++	      }
++	  }
++
++	  has_virtual_reg = TRUE;
++	  return;
++	}
++
++#ifdef FRAME_GROWS_DOWNWARD
++      /* Alert the case of frame register plus constant given by reg.  */
++      else if (XEXP (x, 0) == virtual_stack_vars_rtx
++	       && GET_CODE (XEXP (x, 1)) == REG)
++	fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
++#endif
++
++      /*
++	process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      sweep_string_use_of_insns (XEXP (x, i),
++					 sweep_offset, sweep_size);
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	/*
++	  virtual_stack_vars_rtx without offset
++	  Example:
++	    (set (reg:SI xx) (reg:SI 78))
++	    (set (reg:SI xx) (MEM (reg:SI 78)))
++	*/
++	if (XEXP (x, i) == virtual_stack_vars_rtx)
++	  fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
++	sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
++}   
++
++
++/* Change the use of an argument to the use of the duplicated variable for
++   every insns, The variable is addressed by new rtx.  */
++static void
++change_arg_use_of_insns (rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	rtx seq;
++	
++	start_sequence ();
++	change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
++
++	seq = get_insns ();
++	end_sequence ();
++	emit_insn_before (seq, insn);
++
++	/* load_multiple insn from virtual_incoming_args_rtx have several
++	   load insns. If every insn change the load address of arg
++	   to frame region, those insns are moved before the PARALLEL insn
++	   and remove the PARALLEL insn.  */
++	if (GET_CODE (PATTERN (insn)) == PARALLEL
++	    && XVECLEN (PATTERN (insn), 0) == 0)
++	  delete_insn (insn);
++      }
++}
++
++
++/* Change the use of an argument to the use of the duplicated variable for
++   every rtx derived from the x.  */
++static void
++change_arg_use_in_operand (rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)
++{
++  enum rtx_code code;
++  int i, j;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return;
++
++    case MEM:
++      /* Handle special case of MEM (incoming_args).  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 0) == virtual_incoming_args_rtx)
++	{
++	  offset = 0;
++
++	  /* the operand related to the sweep variable.  */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
++
++	    offset = AUTO_OFFSET(XEXP (*new, 0))
++	      + (offset - AUTO_OFFSET(XEXP (orig, 0)));
++
++	    XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
++	    XEXP (x, 0)->used = 1;
++
++	    return;
++	  }
++	}
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 0) == virtual_incoming_args_rtx
++	  && GET_CODE (XEXP (x, 1)) == CONST_INT
++	  && ! x->used)
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  /* the operand related to the sweep variable.  */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
++	    {
++
++	      offset = (AUTO_OFFSET(XEXP (*new, 0))
++			+ (offset - AUTO_OFFSET(XEXP (orig, 0))));
++
++	      XEXP (x, 0) = virtual_stack_vars_rtx;
++	      XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	      x->used = 1;
++
++	      return;
++	    }
++
++	  /*
++	    process further subtree:
++	    Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	    (const_int 5))
++	  */
++	}
++      break;
++
++    case SET:
++      /* Handle special case of "set (REG or MEM) (incoming_args)".
++	 It means that the the address of the 1st argument is stored.  */
++      if (GET_CODE (orig) == MEM
++	  && XEXP (x, 1) == virtual_incoming_args_rtx)
++	{
++	  offset = 0;
++
++	  /* the operand related to the sweep variable.  */
++	  if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
++	      offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
++	    {
++	      offset = (AUTO_OFFSET(XEXP (*new, 0))
++			+ (offset - AUTO_OFFSET(XEXP (orig, 0))));
++
++	      XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
++							  offset), NULL_RTX);
++	      XEXP (x, 1)->used = 1;
++
++	      return;
++	    }
++	}
++      break;
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      change_arg_use_of_insns (XEXP (x, i), orig, new, size);
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    case PARALLEL:
++      for (j = 0; j < XVECLEN (x, 0); j++)
++	{
++	  change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
++	}
++      if (recog_memoized (insn) < 0)
++	{
++	  for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
++	    {
++	      /* if parallel insn has a insn used virtual_incoming_args_rtx,
++		 the insn is removed from this PARALLEL insn.  */
++	      if (check_used_flag (XVECEXP (x, 0, j)))
++		{
++		  emit_insn (XVECEXP (x, 0, j));
++		  XVECEXP (x, 0, j) = NULL;
++		}
++	      else
++		XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
++	    }
++	  PUT_NUM_ELEM (XVEC (x, 0), i);
++	}
++      return;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (XEXP (x, i) == orig)
++	  {
++	    if (*new == 0)
++	      *new = gen_reg_rtx (GET_MODE (orig));
++	    XEXP (x, i) = *new;
++	    continue;
++	  }
++	change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	{
++	  if (XVECEXP (x, i, j) == orig)
++	    {
++	      if (*new == 0)
++		*new = gen_reg_rtx (GET_MODE (orig));
++	      XVECEXP (x, i, j) = *new;
++	      continue;
++	    }
++	  change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
++	}
++}   
++
++
++/* Validate every instructions from the specified instruction.
++   
++   The stack protector prohibits to generate machine specific frame addressing
++   for the first rtl generation. The prepare_stack_protection must convert
++   machine independent frame addressing to machine specific frame addressing,
++   so instructions for inline functions, which skip the conversion of
++   the stack protection, validate every instructions.  */
++static void
++validate_insns_of_varrefs (rtx insn)
++{
++  rtx next;
++
++  /* Initialize recognition, indicating that volatile is OK.  */
++  init_recog ();
++
++  for (; insn; insn = next)
++    {
++      next = NEXT_INSN (insn);
++      if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	  || GET_CODE (insn) == CALL_INSN)
++	{
++	  /* excerpt from insn_invalid_p in recog.c  */
++	  int icode = recog_memoized (insn);
++
++	  if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
++	    validate_operand_of_varrefs (insn, &PATTERN (insn));
++	}
++    }
++
++  init_recog_no_volatile ();
++}
++
++
++/* Validate frame addressing of the rtx and covert it to machine specific one.  */
++static void
++validate_operand_of_varrefs (rtx insn, rtx *loc)
++{
++  enum rtx_code code;
++  rtx x, temp, seq;
++  int i, j;
++  const char *fmt;
++
++  x = *loc;
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case USE:
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return;
++
++    case PLUS:
++      /* validate insn of frame register plus constant.  */
++      if (GET_CODE (x) == PLUS
++	  && XEXP (x, 0) == virtual_stack_vars_rtx
++	  && GET_CODE (XEXP (x, 1)) == CONST_INT)
++	{
++	  start_sequence ();
++
++	  { /* excerpt from expand_binop in optabs.c  */
++	    optab binoptab = add_optab;
++	    enum machine_mode mode = GET_MODE (x);
++	    int icode = (int) binoptab->handlers[(int) mode].insn_code;
++	    enum machine_mode mode1 = insn_data[icode].operand[2].mode;
++	    rtx pat;
++	    rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
++	    temp = gen_reg_rtx (mode);
++
++	    /* Now, if insn's predicates don't allow offset operands,
++	       put them into pseudo regs.  */
++
++	    if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
++		&& mode1 != VOIDmode)
++	      xop1 = copy_to_mode_reg (mode1, xop1);
++
++	    pat = GEN_FCN (icode) (temp, xop0, xop1);
++	    if (pat)
++	      emit_insn (pat);
++	    else
++	      abort (); /* there must be add_optab handler.  */
++	  }	      
++	  seq = get_insns ();
++	  end_sequence ();
++	  
++	  emit_insn_before (seq, insn);
++	  if (! validate_change (insn, loc, temp, 0))
++	    abort ();
++	  return;
++	}
++	break;
++      
++
++    case CALL_PLACEHOLDER:
++      for (i = 0; i < 3; i++)
++	{
++	  rtx seq = XEXP (x, i);
++	  if (seq)
++	    {
++	      push_to_sequence (seq);
++	      validate_insns_of_varrefs (XEXP (x, i));
++	      XEXP (x, i) = get_insns ();
++	      end_sequence ();
++	    }
++	}
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      validate_operand_of_varrefs (insn, &XEXP (x, i));
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
++}
++
++
++
++/* Return size that is not allocated for stack frame. It will be allocated
++   to modify the home of pseudo registers called from global_alloc.  */
++HOST_WIDE_INT
++get_frame_free_size (void)
++{
++  if (! flag_propolice_protection)
++    return 0;
++
++  return push_allocated_offset - push_frame_offset;
++}
++
++
++/* The following codes are invoked after the instantiation of pseudo registers.
++
++   Reorder local variables to place a peudo register after buffers to avoid
++   the corruption of local variables that could be used to further corrupt
++   arbitrary memory locations.  */
++#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
++static void push_frame (HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_in_operand (rtx, rtx, HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT, HOST_WIDE_INT);
++static void push_frame_of_reg_equiv_constant (HOST_WIDE_INT, HOST_WIDE_INT);
++static void reset_used_flags_for_push_frame (void);
++static int check_out_of_frame_access (rtx, HOST_WIDE_INT);
++static int check_out_of_frame_access_in_operand (rtx, HOST_WIDE_INT);
++#endif
++
++
++/* Assign stack local at the stage of register allocater. if a pseudo reg is
++   spilled out from such an allocation, it is allocated on the stack.
++   The protector keep the location be lower stack region than the location of
++   sweeped arrays.  */
++rtx
++assign_stack_local_for_pseudo_reg (enum machine_mode mode,
++				   HOST_WIDE_INT size, int align)
++{
++#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
++  return assign_stack_local (mode, size, align);
++#else
++  tree blocks = DECL_INITIAL (current_function_decl);
++  rtx new;
++  HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
++  int first_call_from_purge_addressof, first_call_from_global_alloc;
++
++  if (! flag_propolice_protection
++      || size == 0
++      || ! blocks
++      || current_function_is_inlinable
++      || ! search_string_from_argsandvars (CALL_FROM_PUSH_FRAME)
++      || current_function_contains_functions)
++    return assign_stack_local (mode, size, align);
++
++  first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
++  first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
++  saved_cse_not_expected = cse_not_expected;
++
++  starting_frame = ((STARTING_FRAME_OFFSET)
++		    ? STARTING_FRAME_OFFSET : BIGGEST_ALIGNMENT / BITS_PER_UNIT);
++  units_per_push = MAX (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
++			GET_MODE_SIZE (mode));
++    
++  if (first_call_from_purge_addressof)
++    {
++      push_frame_offset = push_allocated_offset;
++      if (check_out_of_frame_access (get_insns (), starting_frame))
++	{
++	  /* After the purge_addressof stage, there may be an instruction which
++	     have the pointer less than the starting_frame. 
++	     if there is an access below frame, push dummy region to seperate
++	     the address of instantiated variables.  */
++	  push_frame (GET_MODE_SIZE (DImode), 0);
++	  assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
++	}
++    }
++
++  if (first_call_from_global_alloc)
++    {
++      push_frame_offset = push_allocated_offset = 0;
++      if (check_out_of_frame_access (get_insns (), starting_frame))
++	{
++	  if (STARTING_FRAME_OFFSET)
++	    {
++	      /* if there is an access below frame, push dummy region 
++		 to seperate the address of instantiated variables.  */
++	      push_frame (GET_MODE_SIZE (DImode), 0);
++	      assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
++	    }
++	  else
++	    push_allocated_offset = starting_frame;
++	}
++    }
++
++  saved_frame_offset = frame_offset;
++  frame_offset = push_frame_offset;
++
++  new = assign_stack_local (mode, size, align);
++
++  push_frame_offset = frame_offset;
++  frame_offset = saved_frame_offset;
++  
++  if (push_frame_offset > push_allocated_offset)
++    {
++      push_frame (units_per_push,
++		  push_allocated_offset + STARTING_FRAME_OFFSET);
++
++      assign_stack_local (BLKmode, units_per_push, -1);
++      push_allocated_offset += units_per_push;
++    }
++
++  /* At the second call from global alloc, alpha push frame and assign
++     a local variable to the top of the stack.  */
++  if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
++    push_frame_offset = push_allocated_offset = 0;
++
++  return new;
++#endif
++}
++
++
++#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
++
++/* push frame infomation for instantiating pseudo register at the top of stack.
++   This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is 
++   not defined.
++
++   It is called by purge_addressof function and global_alloc (or reload)
++   function.  */
++static void
++push_frame (HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)
++{
++  reset_used_flags_for_push_frame();
++
++  /* Scan all declarations of variables and fix the offset address of
++     the variable based on the frame pointer.  */
++  push_frame_in_decls (DECL_INITIAL (current_function_decl),
++		       var_size, boundary);
++
++  /* Scan all argument variable and fix the offset address based on
++     the frame pointer.  */
++  push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
++		      var_size, boundary);
++
++  /* Scan all operands of all insns and fix the offset address
++     based on the frame pointer.  */
++  push_frame_of_insns (get_insns (), var_size, boundary);
++
++  /* Scan all reg_equiv_memory_loc and reg_equiv_constant.  */
++  push_frame_of_reg_equiv_memory_loc (var_size, boundary);
++  push_frame_of_reg_equiv_constant (var_size, boundary);
++
++  reset_used_flags_for_push_frame();
++}
++
++
++/* Reset used flag of every insns, reg_equiv_memory_loc,
++   and reg_equiv_constant.  */
++static void
++reset_used_flags_for_push_frame(void)
++{
++  int i;
++  extern rtx *reg_equiv_memory_loc;
++  extern rtx *reg_equiv_constant;
++
++  /* Clear all the USED bits in operands of all insns and declarations of
++     local vars.  */
++  reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
++  reset_used_flags_for_insns (get_insns ());
++
++
++  /* The following codes are processed if the push_frame is called from 
++     global_alloc (or reload) function.  */
++  if (reg_equiv_memory_loc == 0)
++    return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_memory_loc[i])
++      {
++	rtx x = reg_equiv_memory_loc[i];
++
++	if (GET_CODE (x) == MEM
++	    && GET_CODE (XEXP (x, 0)) == PLUS
++	    && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
++	  {
++	    /* reset */
++	    XEXP (x, 0)->used = 0;
++	  }
++      }
++
++  
++  if (reg_equiv_constant == 0)
++    return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_constant[i])
++      {
++	rtx x = reg_equiv_constant[i];
++
++	if (GET_CODE (x) == PLUS
++	    && AUTO_BASEPTR (x) == frame_pointer_rtx)
++	  {
++	    /* reset */
++	    x->used = 0;
++	  }
++      }
++}
++
++
++/* Push every variables declared as a local variable and make a room for
++   instantiated register.  */
++static void
++push_frame_in_decls (tree block, HOST_WIDE_INT push_size,
++		     HOST_WIDE_INT boundary)
++{
++  tree types;
++  HOST_WIDE_INT offset;
++  rtx home;
++
++  while (block && TREE_CODE(block)==BLOCK)
++    {
++      for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
++	{
++	  /* Skip the declaration that refers an external variable and
++	     also skip an global variable.  */
++	  if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
++	    {
++	      if (! DECL_RTL_SET_P (types))
++		continue;
++
++	      home = DECL_RTL (types);
++
++	      /* Process for static local variable.  */
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
++		continue;
++
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == REG)
++		{
++		  if (XEXP (home, 0) != frame_pointer_rtx
++		      || boundary != 0)
++		    continue;
++
++		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
++						  push_size);
++
++		  /* mark */
++		  XEXP (home, 0)->used = 1;
++		}
++		
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == MEM)
++		{
++		  /* Process for dynamically allocated array.  */
++		  home = XEXP (home, 0);
++		}
++		
++	      if (GET_CODE (home) == MEM
++		  && GET_CODE (XEXP (home, 0)) == PLUS
++		  && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
++		{
++		  offset = AUTO_OFFSET(XEXP (home, 0));
++
++		  if (! XEXP (home, 0)->used
++		      && offset >= boundary)
++		    {
++		      offset += push_size;
++		      XEXP (XEXP (home, 0), 1)
++			= gen_rtx_CONST_INT (VOIDmode, offset);
++		      
++		      /* mark */
++		      XEXP (home, 0)->used = 1;
++		    }
++		}
++	    }
++	}
++
++      push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
++      block = BLOCK_CHAIN (block);
++    }
++}
++
++
++/* Push every variables declared as an argument and make a room for
++   instantiated register.  */
++static void
++push_frame_in_args (tree parms, HOST_WIDE_INT push_size,
++		    HOST_WIDE_INT boundary)
++{
++  rtx home;
++  HOST_WIDE_INT offset;
++    
++  for (; parms; parms = TREE_CHAIN (parms))
++    if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
++      {
++	if (PARM_PASSED_IN_MEMORY (parms))
++	  {
++	    home = DECL_INCOMING_RTL (parms);
++	    offset = AUTO_OFFSET(XEXP (home, 0));
++
++	    if (XEXP (home, 0)->used || offset < boundary)
++	      continue;
++
++	    /* the operand related to the sweep variable.  */
++	    if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
++	      {
++		if (XEXP (home, 0) == frame_pointer_rtx)
++		  XEXP (home, 0) = plus_constant (frame_pointer_rtx,
++						  push_size);
++		else {
++		  offset += push_size;
++		  XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
++								offset);
++		}
++
++		/* mark */
++		XEXP (home, 0)->used = 1;
++	      }
++	  }
++      }
++}
++
++
++/* Set to 1 when the instruction has the reference to be pushed.  */
++static int insn_pushed;
++
++/* Tables of equivalent registers with frame pointer.  */
++static int *fp_equiv = 0;
++
++
++/* Push the frame region to make a room for allocated local variable.  */
++static void
++push_frame_of_insns (rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
++{
++  /* init fp_equiv */
++  fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
++		
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	rtx last;
++	
++	insn_pushed = FALSE;
++
++	/* Push frame in INSN operation.  */
++	push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
++
++	/* Push frame in NOTE.  */
++	push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
++
++	/* Push frame in CALL EXPR_LIST.  */
++	if (GET_CODE (insn) == CALL_INSN)
++	  push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
++				 push_size, boundary);
++
++	/* Pushed frame addressing style may not be machine specific one.
++	   so the instruction should be converted to use the machine specific
++	   frame addressing.  */
++	if (insn_pushed
++	    && (last = try_split (PATTERN (insn), insn, 1)) != insn)
++	  {
++	    rtx first = NEXT_INSN (insn);
++	    rtx trial = NEXT_INSN (first);
++	    rtx pattern = PATTERN (trial);
++	    rtx set;
++
++	    /* Update REG_EQUIV info to the first splitted insn.  */
++	    if ((set = single_set (insn))
++		&& find_reg_note (insn, REG_EQUIV, SET_SRC (set))
++		&& GET_CODE (PATTERN (first)) == SET)
++	      {
++		REG_NOTES (first)
++		  = gen_rtx_EXPR_LIST (REG_EQUIV,
++				       SET_SRC (PATTERN (first)),
++				       REG_NOTES (first));
++	      }
++
++	    /* copy the first insn of splitted insns to the original insn and
++	       delete the first insn,
++	       because the original insn is pointed from records:
++	       insn_chain, reg_equiv_init, used for global_alloc.  */
++	    if (cse_not_expected)
++	      {
++		add_insn_before (insn, first);
++		
++		/* Copy the various flags, and other information.  */
++		memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
++		PATTERN (insn) = PATTERN (first);
++		REG_NOTES (insn) = REG_NOTES (first);
++
++		/* then remove the first insn of splitted insns.  */
++		remove_insn (first);
++		INSN_DELETED_P (first) = 1;
++	      }
++
++	    if (GET_CODE (pattern) == SET
++		&& GET_CODE (XEXP (pattern, 0)) == REG
++		&& GET_CODE (XEXP (pattern, 1)) == PLUS
++		&& XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
++		&& GET_CODE (XEXP (XEXP (pattern, 1), 1)) == CONST_INT)
++	      {
++		rtx offset = XEXP (XEXP (pattern, 1), 1);
++		fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
++
++		delete_insn (trial);
++	      }
++
++	    insn = last;
++	  }
++      }
++
++  /* Clean up.  */
++  free (fp_equiv);
++}
++
++
++/* Push the frame region by changing the operand that points the frame.  */
++static void
++push_frame_in_operand (rtx insn, rtx orig,
++		       HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
++{
++  rtx x = orig;
++  enum rtx_code code;
++  int i, j;
++  HOST_WIDE_INT offset;
++  const char *fmt;
++
++  if (x == 0)
++    return;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++    case USE:
++      return;
++	    
++    case SET:
++      /*
++	Skip setjmp setup insn and setjmp restore insn
++	alpha case:
++	(set (MEM (reg:SI xx)) (frame_pointer_rtx)))
++	(set (frame_pointer_rtx) (REG))
++      */
++      if (GET_CODE (XEXP (x, 0)) == MEM
++	  && XEXP (x, 1) == frame_pointer_rtx)
++	return;
++      if (XEXP (x, 0) == frame_pointer_rtx
++	  && GET_CODE (XEXP (x, 1)) == REG)
++	return;
++
++      /*
++	powerpc case: restores setjmp address
++	(set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
++	or
++	(set (reg) (plus frame_pointer_rtx const_int -n))
++	(set (frame_pointer_rtx) (reg))
++      */
++      if (GET_CODE (XEXP (x, 0)) == REG
++	  && GET_CODE (XEXP (x, 1)) == PLUS
++	  && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
++	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
++	  && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
++	{
++	  x = XEXP (x, 1);
++	  offset = AUTO_OFFSET(x);
++	  if (x->used || -offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
++	  x->used = 1; insn_pushed = TRUE;
++	  return;
++	}
++
++      /* Reset fp_equiv register.  */
++      else if (GET_CODE (XEXP (x, 0)) == REG
++	  && fp_equiv[REGNO (XEXP (x, 0))])
++	fp_equiv[REGNO (XEXP (x, 0))] = 0;
++
++      /* Propagete fp_equiv register.  */
++      else if (GET_CODE (XEXP (x, 0)) == REG
++	       && GET_CODE (XEXP (x, 1)) == REG
++	       && fp_equiv[REGNO (XEXP (x, 1))])
++	if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
++	    || reg_renumber[REGNO (XEXP (x, 0))] > 0)
++	  fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
++      break;
++
++    case MEM:
++      if (XEXP (x, 0) == frame_pointer_rtx
++	  && boundary == 0)
++	{
++	  XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
++	  XEXP (x, 0)->used = 1; insn_pushed = TRUE;
++	  return;
++	}
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (GET_CODE (XEXP (x, 1)) == CONST_INT
++	  && XEXP (x, 0) == frame_pointer_rtx)
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  if (x->used || offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle alpha case:
++	 (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
++      */
++      if (GET_CODE (XEXP (x, 1)) == CONST_INT
++	  && GET_CODE (XEXP (x, 0)) == SUBREG
++	  && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  if (x->used || offset < boundary)
++	    return;
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle powerpc case:
++	 (set (reg x) (plus fp const))
++	 (set (.....) (... (plus (reg x) (const B))))
++      */
++      else if (GET_CODE (XEXP (x, 1)) == CONST_INT
++	       && GET_CODE (XEXP (x, 0)) == REG
++	       && fp_equiv[REGNO (XEXP (x, 0))])
++	{
++	  offset = AUTO_OFFSET(x);
++
++	  if (x->used)
++	    return;
++
++	  offset += fp_equiv[REGNO (XEXP (x, 0))];
++
++	  XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++	  x->used = 1; insn_pushed = TRUE;
++
++	  return;
++	}
++      /*
++	Handle special case of frame register plus reg (constant).
++	 (set (reg x) (const B))
++	 (set (....) (...(plus fp (reg x))))
++      */
++      else if (XEXP (x, 0) == frame_pointer_rtx
++	       && GET_CODE (XEXP (x, 1)) == REG
++	       && PREV_INSN (insn)
++	       && PATTERN (PREV_INSN (insn))
++	       && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
++	       && GET_CODE (SET_SRC (PATTERN (PREV_INSN (insn)))) == CONST_INT)
++	{
++	  offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
++
++	  if (x->used || offset < boundary)
++	    return;
++	  
++	  SET_SRC (PATTERN (PREV_INSN (insn)))
++	    = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
++	  x->used = 1;
++	  XEXP (x, 1)->used = 1;
++
++	  return;
++	}
++      /*
++	Handle special case of frame register plus reg (used).
++	The register already have a pushed offset, just mark this frame
++	addressing.
++      */
++      else if (XEXP (x, 0) == frame_pointer_rtx
++	       && XEXP (x, 1)->used)
++	{
++	  x->used = 1;
++	  return;
++	}
++      /*
++	Process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (FP) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      push_frame_of_insns (XEXP (x, 0), push_size, boundary);
++      push_frame_of_insns (XEXP (x, 1), push_size, boundary);
++      push_frame_of_insns (XEXP (x, 2), push_size, boundary);
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
++	  fatal_insn ("push_frame_in_operand", insn);
++	push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
++}   
++
++
++/* Change the location pointed in reg_equiv_memory_loc.  */
++static void
++push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT push_size,
++				    HOST_WIDE_INT boundary)
++{
++  int i;
++  extern rtx *reg_equiv_memory_loc;
++
++  /* This function is processed if the push_frame is called from 
++     global_alloc (or reload) function.  */
++  if (reg_equiv_memory_loc == 0)
++    return;
++
++  for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
++    if (reg_equiv_memory_loc[i])
++      {
++	rtx x = reg_equiv_memory_loc[i];
++	int offset;
++
++	if (GET_CODE (x) == MEM
++	    && GET_CODE (XEXP (x, 0)) == PLUS
++	    && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
++	  {
++	    offset = AUTO_OFFSET(XEXP (x, 0));
++	    
++	    if (! XEXP (x, 0)->used
++		&& offset >= boundary)
++	      {
++		offset += push_size;
++		XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		/* mark */
++		XEXP (x, 0)->used = 1;
++	      }
++	  }
++	else if (GET_CODE (x) == MEM
++		 && XEXP (x, 0) == frame_pointer_rtx
++		 && boundary == 0)
++	  {
++	    XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
++	    XEXP (x, 0)->used = 1; insn_pushed = TRUE;
++	  }
++      }
++}
++
++
++/* Change the location pointed in reg_equiv_constant.  */
++static void
++push_frame_of_reg_equiv_constant (HOST_WIDE_INT push_size,
++				  HOST_WIDE_INT boundary)
++{
++  int i;
++  extern rtx *reg_equiv_constant;
++
++  /* This function is processed if the push_frame is called from 
++     global_alloc (or reload) function.  */
++  if (reg_equiv_constant == 0)
++    return;
++
++  for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
++    if (reg_equiv_constant[i])
++      {
++	rtx x = reg_equiv_constant[i];
++	int offset;
++
++	if (GET_CODE (x) == PLUS
++	    && XEXP (x, 0) == frame_pointer_rtx)
++	  {
++	    offset = AUTO_OFFSET(x);
++	    
++	    if (! x->used
++		&& offset >= boundary)
++	      {
++		offset += push_size;
++		XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
++
++		/* mark */
++		x->used = 1;
++	      }
++	  }
++	else if (x == frame_pointer_rtx
++		 && boundary == 0)
++	  {
++	    reg_equiv_constant[i]
++	      = plus_constant (frame_pointer_rtx, push_size);
++	    reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
++	  }
++      }
++}
++
++
++/* Check every instructions if insn's memory reference is out of frame.  */
++static int
++check_out_of_frame_access (rtx insn, HOST_WIDE_INT boundary)
++{
++  for (; insn; insn = NEXT_INSN (insn))
++    if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
++	|| GET_CODE (insn) == CALL_INSN)
++      {
++	if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
++	  return TRUE;
++      }
++  return FALSE;
++}
++
++
++/* Check every operands if the reference is out of frame.  */
++static int
++check_out_of_frame_access_in_operand (rtx orig, HOST_WIDE_INT boundary)
++{
++  rtx x = orig;
++  enum rtx_code code;
++  int i, j;
++  const char *fmt;
++
++  if (x == 0)
++    return FALSE;
++
++  code = GET_CODE (x);
++
++  switch (code)
++    {
++    case CONST_INT:
++    case CONST_DOUBLE:
++    case CONST:
++    case SYMBOL_REF:
++    case CODE_LABEL:
++    case PC:
++    case CC0:
++    case ASM_INPUT:
++    case ADDR_VEC:
++    case ADDR_DIFF_VEC:
++    case RETURN:
++    case REG:
++    case ADDRESSOF:
++      return FALSE;
++	    
++    case MEM:
++      if (XEXP (x, 0) == frame_pointer_rtx)
++	if (0 < boundary)
++	  return TRUE;
++      break;
++      
++    case PLUS:
++      /* Handle special case of frame register plus constant.  */
++      if (GET_CODE (XEXP (x, 1)) == CONST_INT
++	  && XEXP (x, 0) == frame_pointer_rtx)
++	{
++	  if (0 <= AUTO_OFFSET(x)
++	      && AUTO_OFFSET(x) < boundary)
++	    return TRUE;
++	  return FALSE;
++	}
++      /*
++	Process further subtree:
++	Example:  (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
++	(const_int 5))
++      */
++      break;
++
++    case CALL_PLACEHOLDER:
++      if (check_out_of_frame_access (XEXP (x, 0), boundary))
++	return TRUE;
++      if (check_out_of_frame_access (XEXP (x, 1), boundary))
++	return TRUE;
++      if (check_out_of_frame_access (XEXP (x, 2), boundary))
++	return TRUE;
++      break;
++
++    default:
++      break;
++    }
++
++  /* Scan all subexpressions.  */
++  fmt = GET_RTX_FORMAT (code);
++  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
++    if (*fmt == 'e')
++      {
++	if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
++	  return TRUE;
++      }
++    else if (*fmt == 'E')
++      for (j = 0; j < XVECLEN (x, i); j++)
++	if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
++	  return TRUE;
++
++  return FALSE;
++}
++#endif
+diff -Naur gcc-3.4.1.orig/gcc/protector.h gcc-3.4.1.ssp/gcc/protector.h
+--- gcc-3.4.1.orig/gcc/protector.h	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/protector.h	2004-01-20 02:01:39.000000000 +0000
+@@ -0,0 +1,55 @@
++/* RTL buffer overflow protection function for GNU C compiler
++   Copyright (C) 2003 Free Software Foundation, Inc.
++
++This file is part of GCC.
++
++GCC is free software; you can redistribute it and/or modify it under
++the terms of the GNU General Public License as published by the Free
++Software Foundation; either version 2, or (at your option) any later
++version.
++
++GCC is distributed in the hope that it will be useful, but WITHOUT ANY
++WARRANTY; without even the implied warranty of MERCHANTABILITY or
++FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
++for more details.
++
++You should have received a copy of the GNU General Public License
++along with GCC; see the file COPYING.  If not, write to the Free
++Software Foundation, 59 Temple Place - Suite 330, Boston, MA
++02111-1307, USA.  */
++
++
++/* Declare GUARD variable.  */
++#define GUARD_m		Pmode
++#define UNITS_PER_GUARD						\
++  MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
++
++#ifndef L_stack_smash_handler
++
++/* Insert a guard variable before a character buffer and change the order
++ of pointer variables, character buffers and pointer arguments.  */
++
++extern void prepare_stack_protection  (int);
++
++#ifdef TREE_CODE
++/* Search a character array from the specified type tree.  */
++
++extern int search_string_def (tree);
++#endif
++
++/* Examine whether the input contains frame pointer addressing.  */
++
++extern int contains_fp (rtx);
++
++/* Return size that is not allocated for stack frame. It will be allocated
++   to modify the home of pseudo registers called from global_alloc.  */
++
++extern HOST_WIDE_INT get_frame_free_size (void);
++
++/* Allocate a local variable in the stack area before character buffers
++   to avoid the corruption of it.  */
++
++extern rtx assign_stack_local_for_pseudo_reg (enum machine_mode,
++					      HOST_WIDE_INT, int);
++
++#endif
+diff -Naur gcc-3.4.1.orig/gcc/reload1.c gcc-3.4.1.ssp/gcc/reload1.c
+--- gcc-3.4.1.orig/gcc/reload1.c	2004-05-02 12:37:17.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/reload1.c	2004-08-30 00:57:22.000000000 +0000
+@@ -43,6 +43,7 @@
+ #include "toplev.h"
+ #include "except.h"
+ #include "tree.h"
++#include "protector.h"
+ 
+ /* This file contains the reload pass of the compiler, which is
+    run after register allocation has been done.  It checks that
+@@ -891,7 +892,7 @@
+       if (cfun->stack_alignment_needed)
+         assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
+ 
+-      starting_frame_size = get_frame_size ();
++      starting_frame_size = get_frame_size () - get_frame_free_size ();
+ 
+       set_initial_elim_offsets ();
+       set_initial_label_offsets ();
+@@ -955,7 +956,7 @@
+ 	setup_save_areas ();
+ 
+       /* If we allocated another stack slot, redo elimination bookkeeping.  */
+-      if (starting_frame_size != get_frame_size ())
++      if (starting_frame_size != get_frame_size () - get_frame_free_size ())
+ 	continue;
+ 
+       if (caller_save_needed)
+@@ -974,7 +975,7 @@
+ 
+       /* If we allocated any new memory locations, make another pass
+ 	 since it might have changed elimination offsets.  */
+-      if (starting_frame_size != get_frame_size ())
++      if (starting_frame_size != get_frame_size () - get_frame_free_size ())
+ 	something_changed = 1;
+ 
+       {
+@@ -1066,11 +1067,11 @@
+   if (insns_need_reload != 0 || something_needs_elimination
+       || something_needs_operands_changed)
+     {
+-      HOST_WIDE_INT old_frame_size = get_frame_size ();
++      HOST_WIDE_INT old_frame_size = get_frame_size () - get_frame_free_size ();
+ 
+       reload_as_needed (global);
+ 
+-      if (old_frame_size != get_frame_size ())
++      if (old_frame_size != get_frame_size () - get_frame_free_size ())
+ 	abort ();
+ 
+       if (num_eliminable)
+@@ -1958,7 +1959,7 @@
+       if (from_reg == -1)
+ 	{
+ 	  /* No known place to spill from => no slot to reuse.  */
+-	  x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
++	  x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
+ 				  inherent_size == total_size ? 0 : -1);
+ 	  if (BYTES_BIG_ENDIAN)
+ 	    /* Cancel the  big-endian correction done in assign_stack_local.
+diff -Naur gcc-3.4.1.orig/gcc/rtl.h gcc-3.4.1.ssp/gcc/rtl.h
+--- gcc-3.4.1.orig/gcc/rtl.h	2004-04-21 19:52:37.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/rtl.h	2004-08-30 00:57:22.000000000 +0000
+@@ -473,6 +473,18 @@
+ 			     __FUNCTION__);				\
+    _rtx; })
+ 
++#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9)	\
++  __extension__								\
++({ rtx const _rtx = (RTX);						\
++   if (GET_CODE(_rtx) != C1 && GET_CODE(_rtx) != C2			\
++       && GET_CODE(_rtx) != C3 && GET_CODE(_rtx) != C4			\
++       && GET_CODE(_rtx) != C5 && GET_CODE(_rtx) != C6			\
++       && GET_CODE(_rtx) != C7 && GET_CODE(_rtx) != C8			\
++       && GET_CODE(_rtx) != C9)						\
++     rtl_check_failed_flag  (NAME, _rtx, __FILE__, __LINE__,		\
++			     __FUNCTION__);				\
++   _rtx; })
++
+ extern void rtl_check_failed_flag (const char *, rtx, const char *,
+ 				   int, const char *)
+     ATTRIBUTE_NORETURN
+@@ -488,6 +500,7 @@
+ #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6)		(RTX)
+ #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7)		(RTX)
+ #define RTL_FLAG_CHECK8(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8)	(RTX)
++#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9)	(RTX)
+ #endif
+ 
+ #define CLEAR_RTX_FLAGS(RTX)	\
+@@ -583,9 +596,9 @@
+ #define LOG_LINKS(INSN)	XEXP(INSN, 7)
+ 
+ #define RTX_INTEGRATED_P(RTX)						\
+-  (RTL_FLAG_CHECK8("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN,		\
++  (RTL_FLAG_CHECK9("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN,		\
+ 		   JUMP_INSN, INSN_LIST, BARRIER, CODE_LABEL, CONST,	\
+-		   NOTE)->integrated)
++		   PLUS, NOTE)->integrated)
+ #define RTX_UNCHANGING_P(RTX)						\
+   (RTL_FLAG_CHECK3("RTX_UNCHANGING_P", (RTX), REG, MEM, CONCAT)->unchanging)
+ #define RTX_FRAME_RELATED_P(RTX)					\
+@@ -1125,6 +1138,10 @@
+   (RTL_FLAG_CHECK3("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS,		\
+ 		   ASM_INPUT)->volatil)
+ 
++/* 1 if RTX is an SET rtx that is not eliminated for the stack protection.  */
++#define SET_VOLATILE_P(RTX)					\
++  (RTL_FLAG_CHECK1("SET_VOLATILE_P", (RTX), SET)->volatil)
++
+ /* 1 if RTX is a mem that refers to an aggregate, either to the
+    aggregate itself of to a field of the aggregate.  If zero, RTX may
+    or may not be such a reference.  */
+diff -Naur gcc-3.4.1.orig/gcc/simplify-rtx.c gcc-3.4.1.ssp/gcc/simplify-rtx.c
+--- gcc-3.4.1.orig/gcc/simplify-rtx.c	2004-06-14 17:48:02.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/simplify-rtx.c	2004-08-30 00:57:22.000000000 +0000
+@@ -2286,6 +2286,7 @@
+   int n_ops = 2, input_ops = 2, input_consts = 0, n_consts;
+   int first, changed;
+   int i, j;
++  HOST_WIDE_INT fp_offset = 0;
+ 
+   memset (ops, 0, sizeof ops);
+ 
+@@ -2311,6 +2312,10 @@
+ 	  switch (this_code)
+ 	    {
+ 	    case PLUS:
++	    if (flag_propolice_protection
++		&& XEXP (this_op, 0) == virtual_stack_vars_rtx
++		&& GET_CODE (XEXP (this_op, 1)) == CONST_INT)
++	      fp_offset = INTVAL (XEXP (this_op, 1));
+ 	    case MINUS:
+ 	      if (n_ops == 7)
+ 		return NULL_RTX;
+@@ -2472,11 +2477,24 @@
+       && GET_CODE (ops[n_ops - 1].op) == CONST_INT
+       && CONSTANT_P (ops[n_ops - 2].op))
+     {
+-      rtx value = ops[n_ops - 1].op;
+-      if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
+-	value = neg_const_int (mode, value);
+-      ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
+-      n_ops--;
++      if (!flag_propolice_protection)
++	{
++	  rtx value = ops[n_ops - 1].op;
++	  if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
++	    value = neg_const_int (mode, value);
++	  ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
++	  n_ops--;
++	}
++      /* The stack protector keeps the addressing style of a local variable,
++	 so it doesn't use neg_const_int function not to change
++	 the offset value.  */
++      else {
++	HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
++	if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
++	  value = -value;
++	ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value);
++	n_ops--;
++      }
+     }
+ 
+   /* Count the number of CONSTs that we generated.  */
+@@ -2494,6 +2512,59 @@
+ 	  || (n_ops + n_consts == input_ops && n_consts <= input_consts)))
+     return NULL_RTX;
+ 
++  if (flag_propolice_protection)
++    {
++      /* keep the addressing style of local variables
++	 as (plus (virtual_stack_vars_rtx) (CONST_int x)).
++	 For the case array[r-1],
++	 converts from (+ (+VFP c1) (+r -1)) to (SET R (+VFP c1)) (+ R (+r -1)).
++
++	 This loop finds ops[i] which is the register for the frame
++	 addressing, Then, makes the frame addressing using the register and
++	 the constant of ops[n_ops - 1].  */
++      for (i = 0; i < n_ops; i++)
++#ifdef FRAME_GROWS_DOWNWARD
++	if (ops[i].op == virtual_stack_vars_rtx)
++#else
++	if (ops[i].op == virtual_stack_vars_rtx
++	    || ops[i].op == frame_pointer_rtx)
++#endif
++	  {
++	    if (GET_CODE (ops[n_ops - 1].op) == CONST_INT)
++	      {
++		HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
++		if (value >= fp_offset)
++		  {
++		    ops[i].op = plus_constant (ops[i].op, value);
++		    n_ops--;
++		  }
++		else
++		  {
++		    if (!force
++			&& (n_ops + 1 + n_consts > input_ops
++			    || (n_ops + 1 + n_consts == input_ops
++				&& n_consts <= input_consts)))
++		      return NULL_RTX;
++		    ops[n_ops - 1].op = GEN_INT (value-fp_offset);
++		    ops[i].op = plus_constant (ops[i].op, fp_offset);
++		  }
++	      }
++	    /* keep the following address pattern;
++	       (1) buf[BUFSIZE] is the first assigned variable.
++	       (+ (+ fp -BUFSIZE) BUFSIZE)
++	       (2) ((+ (+ fp 1) r) -1).  */
++	    else if (fp_offset != 0)
++	      return NULL_RTX;
++	    /* keep the (+ fp 0) pattern for the following case;
++	       (1) buf[i]: i: REG, buf: (+ fp 0) in !FRAME_GROWS_DOWNWARD
++	       (2) argument: the address is (+ fp 0).  */
++	    else if (fp_offset == 0)
++	      return NULL_RTX;
++
++	    break;
++	  }
++    }
++
+   /* Put a non-negated operand first, if possible.  */
+ 
+   for (i = 0; i < n_ops && ops[i].neg; i++)
+diff -Naur gcc-3.4.1.orig/gcc/testsuite/gcc.dg/ssp-warn.c gcc-3.4.1.ssp/gcc/testsuite/gcc.dg/ssp-warn.c
+--- gcc-3.4.1.orig/gcc/testsuite/gcc.dg/ssp-warn.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/testsuite/gcc.dg/ssp-warn.c	2003-11-21 08:41:19.000000000 +0000
+@@ -0,0 +1,32 @@
++/* { dg-do compile } */
++/* { dg-options "-fstack-protector" } */
++void
++test1()
++{
++  void intest1(int *a)
++    {
++      *a ++;
++    }
++  
++  char buf[80];
++
++  buf[0] = 0;
++} /* { dg-bogus "not protecting function: it contains functions" } */
++
++void
++test2(int n)
++{
++  char buf[80];
++  char vbuf[n];
++
++  buf[0] = 0;
++  vbuf[0] = 0;
++} /* { dg-bogus "not protecting variables: it has a variable length buffer" } */
++
++void
++test3()
++{
++  char buf[5];
++
++  buf[0] = 0;
++} /* { dg-bogus "not protecting function: buffer is less than 8 bytes long" } */
+diff -Naur gcc-3.4.1.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp gcc-3.4.1.ssp/gcc/testsuite/gcc.misc-tests/ssp-execute.exp
+--- gcc-3.4.1.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/testsuite/gcc.misc-tests/ssp-execute.exp	2003-11-22 00:28:12.000000000 +0000
+@@ -0,0 +1,48 @@
++#   Copyright (C) 2003 Free Software Foundation, Inc.
++
++# This program is free software; you can redistribute it and/or modify
++# it under the terms of the GNU General Public License as published by
++# the Free Software Foundation; either version 2 of the License, or
++# (at your option) any later version.
++# 
++# This program is distributed in the hope that it will be useful,
++# but WITHOUT ANY WARRANTY; without even the implied warranty of
++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
++# GNU General Public License for more details.
++# 
++# You should have received a copy of the GNU General Public License
++# along with this program; if not, write to the Free Software
++# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.  
++
++# Test the functionality of programs compiled with -fstack-protector.
++# ordering like c-torture options.
++set TORTURE_OPTIONS [list \
++    { -fstack-protector -O0 } \
++    { -fstack-protector -O1 } \
++    { -fstack-protector -O2 } \
++    { -fstack-protector -O2 -fomit-frame-pointer } \
++    { -fstack-protector -O3 -fomit-frame-pointer } \
++    { -fstack-protector -O3 -fomit-frame-pointer -funroll-loops } \
++    { -fstack-protector -O3 -fomit-frame-pointer -funroll-all-loops -finline-functions } \
++    { -fstack-protector -O3 -g } \
++    { -fstack-protector -Os } ]
++
++if $tracelevel then {
++    strace $tracelevel
++}
++
++# Load support procs.
++load_lib c-torture.exp
++
++#
++# main test loop
++#
++
++foreach src [lsort [glob -nocomplain $srcdir/$subdir/ssp-execute*.c]] {
++    # If we're only testing specific files and this isn't one of them, skip it.
++    if ![runtest_file_p $runtests $src] then {
++	continue
++    }
++
++    c-torture-execute $src
++}
+diff -Naur gcc-3.4.1.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c gcc-3.4.1.ssp/gcc/testsuite/gcc.misc-tests/ssp-execute1.c
+--- gcc-3.4.1.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/testsuite/gcc.misc-tests/ssp-execute1.c	2004-02-16 05:15:39.000000000 +0000
+@@ -0,0 +1,54 @@
++/* Test location changes of character array.  */
++
++void
++test(int i)
++{
++  int  ibuf1[10];
++  char buf[50];
++  int  ibuf2[10];
++  char buf2[50000];
++  int  ibuf3[10];
++  char *p;
++
++  /* c1: the frame offset of buf[0]
++     c2: the frame offset of buf2[0]
++  */
++  p= &buf[0]; *p=1;		/* expected rtl: (+ fp -c1) */
++  if (*p != buf[0])
++    abort();
++  p= &buf[5]; *p=2;		/* expected rtl: (+ fp -c1+5) */
++  if (*p != buf[5])
++    abort();
++  p= &buf[-1]; *p=3;		/* expected rtl: (+ (+ fp -c1) -1) */
++  if (*p != buf[-1])
++    abort();
++  p= &buf[49]; *p=4;		/* expected rtl: (+ fp -c1+49) */
++  if (*p != buf[49])
++    abort();
++  p = &buf[i+5]; *p=5;		/* expected rtl: (+ (+ fp -c1) (+ i 5)) */
++  if (*p != buf[i+5])
++    abort ();
++  p = buf - 1; *p=6;		/* expected rtl: (+ (+ fp -c1) -1) */
++  if (*p != buf[-1])
++    abort ();
++  p = 1 + buf; *p=7;		/* expected rtl: (+ (+ fp -c1) 1) */
++  if (*p != buf[1])
++    abort ();
++  p = &buf[1] - 1; *p=8;	/* expected rtl: (+ (+ fp -c1+1) -1) */
++  if (*p != buf[0])
++    abort ();
++
++  /* test big offset which is greater than the max value of signed 16 bit integer.  */
++  p = &buf2[45555]; *p=9;	/* expected rtl: (+ fp -c2+45555) */
++  if (*p != buf2[45555])
++    abort ();
++}
++
++int main()
++{
++  test(10);
++  exit(0);
++}
++
++
++  
+diff -Naur gcc-3.4.1.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c gcc-3.4.1.ssp/gcc/testsuite/gcc.misc-tests/ssp-execute2.c
+--- gcc-3.4.1.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/testsuite/gcc.misc-tests/ssp-execute2.c	2003-11-22 08:44:33.000000000 +0000
+@@ -0,0 +1,49 @@
++void
++test(int i, char *j, int k)
++{
++  int  a[10];
++  char b;
++  int  c;
++  long *d;
++  char buf[50];
++  long e[10];
++  int  n;
++
++  a[0] = 4;
++  b = 5;
++  c = 6;
++  d = (long*)7;
++  e[0] = 8;
++
++  /* overflow buffer */
++  for (n = 0; n < 120; n++)
++    buf[n] = 0;
++  
++  if (j == 0 || *j != 2)
++    abort ();
++  if (a[0] == 0)
++    abort ();
++  if (b == 0)
++    abort ();
++  if (c == 0)
++    abort ();
++  if (d == 0)
++    abort ();
++  if (e[0] == 0)
++    abort ();
++
++  exit (0);
++}
++
++int main()
++{
++  int i, k;
++  int j[40];
++  i = 1;
++  j[39] = 2;
++  k = 3;
++  test(i, &j[39], k);
++}
++
++
++  
+diff -Naur gcc-3.4.1.orig/gcc/toplev.c gcc-3.4.1.ssp/gcc/toplev.c
+--- gcc-3.4.1.orig/gcc/toplev.c	2004-02-20 08:40:49.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/toplev.c	2004-08-30 00:57:22.000000000 +0000
+@@ -97,6 +97,10 @@
+ 				   declarations for e.g. AIX 4.x.  */
+ #endif
+ 
++#ifdef STACK_PROTECTOR
++#include "protector.h"
++#endif
++
+ #ifndef HAVE_conditional_execution
+ #define HAVE_conditional_execution 0
+ #endif
+@@ -979,6 +983,15 @@
+    minimum function alignment.  Zero means no alignment is forced.  */
+ int force_align_functions_log;
+ 
++#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
++/* Nonzero means use propolice as a stack protection method */
++int flag_propolice_protection = 1;
++int flag_stack_protection = 0;
++#else
++int flag_propolice_protection = 0;
++int flag_stack_protection = 0;
++#endif
++
+ typedef struct
+ {
+   const char *const string;
+@@ -1154,7 +1167,9 @@
+   {"mem-report", &mem_report, 1 },
+   { "trapv", &flag_trapv, 1 },
+   { "wrapv", &flag_wrapv, 1 },
+-  { "new-ra", &flag_new_regalloc, 1 }
++  { "new-ra", &flag_new_regalloc, 1 },
++  {"stack-protector", &flag_propolice_protection, 1 },
++  {"stack-protector-all", &flag_stack_protection, 1 }
+ };
+ 
+ /* Here is a table, controlled by the tm.h file, listing each -m switch
+@@ -2687,6 +2702,9 @@
+ 
+   insns = get_insns ();
+ 
++  if (flag_propolice_protection)
++    prepare_stack_protection (inlinable);
++
+   /* Dump the rtl code if we are dumping rtl.  */
+ 
+   if (open_dump_file (DFI_rtl, decl))
+@@ -4482,6 +4500,12 @@
+     /* The presence of IEEE signaling NaNs, implies all math can trap.  */
+     if (flag_signaling_nans)
+       flag_trapping_math = 1;
++
++  /* This combination makes optimized frame addressings and causes
++    a internal compilation error at prepare_stack_protection.
++    so don't allow it.  */
++  if (flag_stack_protection && !flag_propolice_protection)
++    flag_propolice_protection = TRUE;
+ }
+ 
+ /* Initialize the compiler back end.  */
+diff -Naur gcc-3.4.1.orig/gcc/tree.h gcc-3.4.1.ssp/gcc/tree.h
+--- gcc-3.4.1.orig/gcc/tree.h	2004-02-08 01:52:43.000000000 +0000
++++ gcc-3.4.1.ssp/gcc/tree.h	2004-08-30 00:57:22.000000000 +0000
+@@ -1489,6 +1489,10 @@
+    where it is called.  */
+ #define DECL_INLINE(NODE) (FUNCTION_DECL_CHECK (NODE)->decl.inline_flag)
+ 
++/* In a VAR_DECL, nonzero if the declaration is copied for inlining.
++   The stack protector should keep its location in the stack.  */
++#define DECL_COPIED(NODE) (VAR_DECL_CHECK (NODE)->decl.inline_flag)
++
+ /* Nonzero in a FUNCTION_DECL means that this function was declared inline,
+    such as via the `inline' keyword in C/C++.  This flag controls the linkage
+    semantics of 'inline'; whether or not the function is inlined is

Added: trunk/gcc/gcc-3.4-sspspecs-1.patch
===================================================================
--- trunk/gcc/gcc-3.4-sspspecs-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/gcc/gcc-3.4-sspspecs-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,114 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2004-08-29
+Initial Package Version: 3.4
+Upstream Status: Rejected Upstream
+Origin: Based on gcc-3.3.2-pie-ssp.patch by Alexander Gabert
+Description: See description in the sspspecs.h hunk below the copyright.
+This patch will also work on gcc-2.95.3 with fuzz
+
+diff -Naur gcc-3.4.1.orig/gcc/gcc.c gcc-3.4.1.sspspecs/gcc/gcc.c
+--- gcc-3.4.1.orig/gcc/gcc.c	2004-04-18 23:45:11.000000000 +0000
++++ gcc-3.4.1.sspspecs/gcc/gcc.c	2004-08-30 01:57:15.000000000 +0000
+@@ -574,6 +574,7 @@
+ 
+ /* config.h can define CC1_SPEC to provide extra args to cc1 and cc1plus
+    or extra switch-translations.  */
++#include "sspspecs.h"
+ #ifndef CC1_SPEC
+ #define CC1_SPEC ""
+ #endif
+diff -Naur gcc-3.4.1.orig/gcc/sspspecs.h gcc-3.4.1.sspspecs/gcc/sspspecs.h
+--- gcc-3.4.1.orig/gcc/sspspecs.h	1970-01-01 00:00:00.000000000 +0000
++++ gcc-3.4.1.sspspecs/gcc/sspspecs.h	2004-08-30 01:57:15.000000000 +0000
+@@ -0,0 +1,91 @@
++/* Adds spec for RTL buffer overflow protection function
++Copyright (C) 1987, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
++1999, 2000, 2001, 2002 Free Software Foundation, Inc.
++
++This file is part of GCC.
++
++GCC is free software; you can redistribute it and/or modify it under
++the terms of the GNU General Public License as published by the Free
++Software Foundation; either version 2, or (at your option) any later
++version.
++
++GCC is distributed in the hope that it will be useful, but WITHOUT ANY
++WARRANTY; without even the implied warranty of MERCHANTABILITY or
++FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
++for more details.
++
++You should have received a copy of the GNU General Public License
++along with GCC; see the file COPYING.  If not, write to the Free
++Software Foundation, 59 Temple Place - Suite 330, Boston, MA
++02111-1307, USA. */
++
++/* Based on http://dev.gentoo.org/~pappy/gentoo-projects/\
++hardened-gcc/gentoo/distrib/3.3.2.2/noarch/gcc-3.3.2-pie-ssp.patch
++thanks to Alexander Gabert and Hardened Gentoo. Modified for just
++SSP, and for addition compatability with NetBSD. This patch adds
++default specs for -fstack-protector-all with filters so libs should
++build normaly. This patch does not have SSP functions, get those from
++http://www.research.ibm.com/trl/projects/security/ssp/
++Workgroup at hlfs-dev at linuxfromscratch.org
++Website at http://www.linuxfromscratch.org/hlfs/ */
++/* ashes */
++
++/* Hopefully this define will prevent this header from being looped.
++	It gets endif'd at the bottom */
++#ifndef SSP_SPEC_H
++#define SSP_SPEC_H
++
++/* FreeBSD and OpenBSD targets are untested but should work.
++	Please report error or success to the workgroup mailing list. */
++#ifdef __FreeBSD__
++#define SSP_KERNEL_EXCLUDE "D_KERNEL"
++#endif
++#ifdef __NetBSD__
++#define SSP_KERNEL_EXCLUDE "D_KERNEL"
++#endif
++#ifdef __OpenBSD__
++#define SSP_KERNEL_EXCLUDE "D_KERNEL"
++#endif
++#ifdef __linux__
++#define SSP_KERNEL_EXCLUDE "D__KERNEL__"
++#endif
++
++/* Fail if none of the above match. */
++#ifndef SSP_KERNEL_EXCLUDE
++ #error "FAILED in sspspecs.h: Unknown target system - Hackme"
++#endif
++
++/* Setup macro. */
++#ifndef NSPEC
++#define NSPEC(a,b) "%{!"a": "b"} "
++#endif
++
++/* These are the exclusion flags wrapped in the above macro. */
++#ifndef STD_SSP_EXCLUDE
++#define STD_SSP_EXCLUDE(flag)			\
++	NSPEC(SSP_KERNEL_EXCLUDE,				\
++	NSPEC("DIN_GCC",					\
++	NSPEC("DIN_LIBGCC",					\
++	NSPEC("DIN_LIBGCC2",				\
++	NSPEC("nostartfiles",				\
++	NSPEC("nostdinc",				\
++	NSPEC("nostdlib",					\
++	NSPEC("nodefaultlibs",				\
++	NSPEC("fno-stack-protector",		\
++	NSPEC("fno-stack-protector-all",	\
++	flag								\
++	))))))))))
++#endif
++
++/* -fforce-addr has been heavily tested by Gentoo and should be safe. */
++#ifndef CC1_SSP
++#define CC1_SSP	"-fstack-protector-all -fforce-addr"
++#endif
++
++/* Setup our new spec string with exclusion flags, and prepend the original spec. */
++static char cc1_spec_string[] = CC1_SPEC STD_SSP_EXCLUDE(CC1_SSP);
++/* Redefine cc1 specs. */
++#undef CC1_SPEC
++#define CC1_SPEC (cc1_spec_string)
++
++#endif /* End of SSP_SPEC_H */

Added: trunk/glibc/glibc-2.3.3-gcc34_allow-1.patch
===================================================================
--- trunk/glibc/glibc-2.3.3-gcc34_allow-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/glibc/glibc-2.3.3-gcc34_allow-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,117 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch.org> (ashes)
+Date: 2004-08-29
+Initial Package Version: 2.3.3
+Upstream Status: From Upstream
+Origin: http://kegel.com/crosstool/
+	glibc-linuxthreads-2.3.2-allow-3.4.patch
+	glibc-2.3.2-allow-gcc-3.4-nounit.patch
+	fixup.patch (for glibc-2.3.2)
+Description: These are upstream fixes to allow glibc-2.3.3 to build with
+gcc-3.4.x. This patch comes from crosstools which got it from glibc-2.3.4.
+See also:
+http://www.linuxfromscratch.org/hlfs/
+
+diff -Naur glibc-2.3.3-lfs-5.1.orig/configure glibc-2.3.3-lfs-5.1.gcc34/configure
+--- glibc-2.3.3-lfs-5.1.orig/configure	2004-02-09 20:58:21.000000000 +0000
++++ glibc-2.3.3-lfs-5.1.gcc34/configure	2004-08-29 21:18:56.000000000 +0000
+@@ -5558,6 +5558,35 @@
+ fi
+ 
+ 
++echo "$as_me:$LINENO: checking for -fno-unit-at-a-time" >&5
++echo $ECHO_N "checking for -fno-unit-at-a-time... $ECHO_C" >&6
++if test "${libc_cv_fno_unit_at_a_time+set}" = set; then
++  echo $ECHO_N "(cached) $ECHO_C" >&6
++else
++  cat > conftest.c <<EOF
++int foo;
++EOF
++if { ac_try='${CC-cc} $CFLAGS $CPPFLAGS -S -fno-unit-at-a-time
++			    conftest.c 1>&5'
++  { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
++  (eval $ac_try) 2>&5
++  ac_status=$?
++  echo "$as_me:$LINENO: \$? = $ac_status" >&5
++  (exit $ac_status); }; }
++then
++  libc_cv_fno_unit_at_a_time=yes
++else
++  libc_cv_fno_unit_at_a_time=no
++fi
++rm -f conftest*
++fi
++echo "$as_me:$LINENO: result: $libc_cv_fno_unit_at_a_time" >&5
++echo "${ECHO_T}$libc_cv_fno_unit_at_a_time" >&6
++if test $libc_cv_fno_unit_at_a_time = yes; then
++  fno_unit_at_a_time=-fno-unit-at-a-time
++fi
++
++
+ if test $elf != yes; then
+   echo "$as_me:$LINENO: checking for .init and .fini sections" >&5
+ echo $ECHO_N "checking for .init and .fini sections... $ECHO_C" >&6
+diff -Naur glibc-2.3.3-lfs-5.1.orig/elf/dl-runtime.c glibc-2.3.3-lfs-5.1.gcc34/elf/dl-runtime.c
+--- glibc-2.3.3-lfs-5.1.orig/elf/dl-runtime.c	2004-02-09 07:08:09.000000000 +0000
++++ glibc-2.3.3-lfs-5.1.gcc34/elf/dl-runtime.c	2004-08-29 21:18:20.000000000 +0000
+@@ -36,6 +36,12 @@
+ # define VERSYMIDX(sym)	(DT_NUM + DT_THISPROCNUM + DT_VERSIONTAGIDX (sym))
+ #endif
+ 
++/* The fixup functions might have need special attributes.  If none
++   are provided define the macro as empty.  */
++#ifndef ARCH_FIXUP_ATTRIBUTE
++# define ARCH_FIXUP_ATTRIBUTE
++#endif
++
+ 
+ /* This function is called through a special trampoline from the PLT the
+    first time each PLT entry is called.  We must perform the relocation
+@@ -46,7 +52,7 @@
+ 
+ #ifndef ELF_MACHINE_NO_PLT
+ static ElfW(Addr)
+-__attribute ((used, noinline))
++__attribute ((used, noinline)) ARCH_FIXUP_ATTRIBUTE
+ fixup (
+ # ifdef ELF_MACHINE_RUNTIME_FIXUP_ARGS
+         ELF_MACHINE_RUNTIME_FIXUP_ARGS,
+@@ -128,7 +134,7 @@
+ #if !defined PROF && !defined ELF_MACHINE_NO_PLT && !__BOUNDED_POINTERS__
+ 
+ static ElfW(Addr)
+-__attribute ((used, noinline))
++__attribute ((used, noinline)) ARCH_FIXUP_ATTRIBUTE
+ profile_fixup (
+ #ifdef ELF_MACHINE_RUNTIME_FIXUP_ARGS
+        ELF_MACHINE_RUNTIME_FIXUP_ARGS,
+diff -Naur glibc-2.3.3-lfs-5.1.orig/linuxthreads/sysdeps/unix/sysv/linux/mips/Makefile glibc-2.3.3-lfs-5.1.gcc34/linuxthreads/sysdeps/unix/sysv/linux/mips/Makefile
+--- glibc-2.3.3-lfs-5.1.orig/linuxthreads/sysdeps/unix/sysv/linux/mips/Makefile	2003-01-27 18:57:22.000000000 +0000
++++ glibc-2.3.3-lfs-5.1.gcc34/linuxthreads/sysdeps/unix/sysv/linux/mips/Makefile	2004-08-29 21:21:45.000000000 +0000
+@@ -1,2 +1,6 @@
+ # pull in __syscall_error routine
+ libpthread-routines += sysdep
++
++ifeq ($(subdir),linuxthreads)
++CFLAGS-pt-initfini.s = $(fno-unit-at-a-time)
++endif
+diff -Naur glibc-2.3.3-lfs-5.1.orig/sysdeps/i386/dl-machine.h glibc-2.3.3-lfs-5.1.gcc34/sysdeps/i386/dl-machine.h
+--- glibc-2.3.3-lfs-5.1.orig/sysdeps/i386/dl-machine.h	2003-09-24 21:08:31.000000000 +0000
++++ glibc-2.3.3-lfs-5.1.gcc34/sysdeps/i386/dl-machine.h	2004-08-29 21:17:37.000000000 +0000
+@@ -154,11 +154,14 @@
+    destroys the passed register information.  */
+ /* GKM FIXME: Fix trampoline to pass bounds so we can do
+    without the `__unbounded' qualifier.  */
+-static ElfW(Addr) fixup (struct link_map *__unbounded l, ElfW(Word) reloc_offset)
+-     __attribute__ ((regparm (2), unused));
++#define ARCH_FIXUP_ATTRIBUTE __attribute__ ((regparm (3), unused))
++
++static ElfW(Addr) fixup (struct link_map *__unbounded l,
++			 ElfW(Word) reloc_offset)
++     ARCH_FIXUP_ATTRIBUTE;
+ static ElfW(Addr) profile_fixup (struct link_map *l, ElfW(Word) reloc_offset,
+ 				 ElfW(Addr) retaddr)
+-     __attribute__ ((regparm (3), unused));
++     ARCH_FIXUP_ATTRIBUTE;
+ # endif
+ 
+ /* This code is used in dl-runtime.c to call the `fixup' function

Added: trunk/glibc/glibc-2.3.3-pax-1.patch
===================================================================
--- trunk/glibc/glibc-2.3.3-pax-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/glibc/glibc-2.3.3-pax-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,191 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2004-08-30
+Initial Package Version: 2.3.3
+Upstream Status: Rejected Upstream
+Origin: This is the combination of the three following patches:
+	glibc-2.3.3_pre20040117-pt_pax.diff
+	glibc-2.3.3-dl_execstack-PaX-support.patch
+	glibc-2.3.2-iconvconfig-name_insert.patch
+Description: This is needed for Pax. http://pax.grsecurity.net/
+
+Also see:
+http://www.linuxfromscratch.org/hlfs/
+
+diff -Naur glibc-2.3.3.orig/elf/elf.h glibc-2.3.3.pax/elf/elf.h
+--- glibc-2.3.3.orig/elf/elf.h	2003-06-26 19:54:29.000000000 +0000
++++ glibc-2.3.3.pax/elf/elf.h	2004-08-30 15:01:32.000000000 +0000
+@@ -566,12 +566,25 @@
+ #define PT_HIOS		0x6fffffff	/* End of OS-specific */
+ #define PT_LOPROC	0x70000000	/* Start of processor-specific */
+ #define PT_HIPROC	0x7fffffff	/* End of processor-specific */
++#define PT_PAX_FLAGS	0x65041580	/* Indicates PaX flag markings */
+ 
+ /* Legal values for p_flags (segment flags).  */
+ 
+ #define PF_X		(1 << 0)	/* Segment is executable */
+ #define PF_W		(1 << 1)	/* Segment is writable */
+ #define PF_R		(1 << 2)	/* Segment is readable */
++#define PF_PAGEEXEC	(1 << 4)	/* Enable  PAGEEXEC */
++#define PF_NOPAGEEXEC	(1 << 5)	/* Disable PAGEEXEC */
++#define PF_SEGMEXEC	(1 << 6)	/* Enable  SEGMEXEC */
++#define PF_NOSEGMEXEC	(1 << 7)	/* Disable SEGMEXEC */
++#define PF_MPROTECT	(1 << 8)	/* Enable  MPROTECT */
++#define PF_NOMPROTECT	(1 << 9)	/* Disable MPROTECT */
++#define PF_RANDEXEC	(1 << 10)	/* Enable  RANDEXEC */
++#define PF_NORANDEXEC	(1 << 11)	/* Disable RANDEXEC */
++#define PF_EMUTRAMP	(1 << 12)	/* Enable  EMUTRAMP */
++#define PF_NOEMUTRAMP	(1 << 13)	/* Disable EMUTRAMP */
++#define PF_RANDMMAP	(1 << 14)	/* Enable  RANDMMAP */
++#define PF_NORANDMMAP	(1 << 15)	/* Disable RANDMMAP */
+ #define PF_MASKOS	0x0ff00000	/* OS-specific */
+ #define PF_MASKPROC	0xf0000000	/* Processor-specific */
+ 
+diff -Naur glibc-2.3.3.orig/iconv/iconvconfig.c glibc-2.3.3.pax/iconv/iconvconfig.c
+--- glibc-2.3.3.orig/iconv/iconvconfig.c	2003-06-11 21:38:47.000000000 +0000
++++ glibc-2.3.3.pax/iconv/iconvconfig.c	2004-08-30 15:01:44.000000000 +0000
+@@ -988,6 +988,34 @@
+                               module name offset
+                          (following last entry with step count 0)
+ */
++
++static struct hash_entry *hash_table;
++static size_t hash_size;
++
++/* Function to insert the names.  */
++static void name_insert (const void *nodep, VISIT value, int level)
++{
++  struct name *name;
++  unsigned int idx;
++  unsigned int hval2;
++
++  if (value != leaf && value != postorder)
++    return;
++
++  name = *(struct name **) nodep;
++  idx = name->hashval % hash_size;
++  hval2 = 1 + name->hashval % (hash_size - 2);
++
++  while (hash_table[idx].string_offset != 0)
++    if ((idx += hval2) >= hash_size)
++      idx -= hash_size;
++
++  hash_table[idx].string_offset = strtaboffset (name->strent);
++
++  assert (name->module_idx != -1);
++  hash_table[idx].module_idx = name->module_idx;
++}
++
+ static int
+ write_output (void)
+ {
+@@ -995,8 +1023,6 @@
+   char *string_table;
+   size_t string_table_size;
+   struct gconvcache_header header;
+-  struct hash_entry *hash_table;
+-  size_t hash_size;
+   struct module_entry *module_table;
+   char *extra_table;
+   char *cur_extra_table;
+@@ -1009,30 +1035,6 @@
+ 		+ strlen (".XXXXXX")];
+   char finalname[prefix_len + sizeof (GCONV_MODULES_CACHE)];
+ 
+-  /* Function to insert the names.  */
+-  static void name_insert (const void *nodep, VISIT value, int level)
+-    {
+-      struct name *name;
+-      unsigned int idx;
+-      unsigned int hval2;
+-
+-      if (value != leaf && value != postorder)
+-	return;
+-
+-      name = *(struct name **) nodep;
+-      idx = name->hashval % hash_size;
+-      hval2 = 1 + name->hashval % (hash_size - 2);
+-
+-      while (hash_table[idx].string_offset != 0)
+-	if ((idx += hval2) >= hash_size)
+-	  idx -= hash_size;
+-
+-      hash_table[idx].string_offset = strtaboffset (name->strent);
+-
+-      assert (name->module_idx != -1);
+-      hash_table[idx].module_idx = name->module_idx;
+-    }
+-
+   /* Open the output file.  */
+   assert (GCONV_MODULES_CACHE[0] == '/');
+   strcpy (stpcpy (mempcpy (tmpfname, prefix, prefix_len), GCONV_MODULES_CACHE),
+diff -Naur glibc-2.3.3.orig/sysdeps/unix/sysv/linux/dl-execstack.c glibc-2.3.3.pax/sysdeps/unix/sysv/linux/dl-execstack.c
+--- glibc-2.3.3.orig/sysdeps/unix/sysv/linux/dl-execstack.c	2003-11-27 05:22:03.000000000 +0000
++++ glibc-2.3.3.pax/sysdeps/unix/sysv/linux/dl-execstack.c	2004-08-30 15:01:49.000000000 +0000
+@@ -47,11 +47,17 @@
+ 		      PROT_READ|PROT_WRITE|PROT_EXEC|PROT_GROWSDOWN) == 0)
+ 	goto return_success;
+ #  if __ASSUME_PROT_GROWSUPDOWN == 0
+-      if (errno == EINVAL)
++      if (errno == EINVAL) {
+ 	no_growsdown = true;
+-      else
++      } else {
++#  endif
++	if (errno == EACCES)		/* PAX is enabled */
++	  return 0;
++	else
++	  return errno;
++#  if __ASSUME_PROT_GROWSUPDOWN == 0
++      }
+ #  endif
+-	return errno;
+     }
+ # endif
+ 
+@@ -73,8 +79,11 @@
+ 	page -= size;
+       else
+ 	{
+-	  if (errno != ENOMEM)	/* Unexpected failure mode.  */
++	  if (errno == EACCES) {	/* PAX is enabled */
++	    return 0;
++	  } else if (errno != ENOMEM) {	/* Unexpected failure mode.  */
+ 	    return errno;
++	  }
+ 
+ 	  if (size == GL(dl_pagesize))
+ 	    /* We just tried to mprotect the top hole page and failed.
+@@ -105,11 +114,17 @@
+ 		      PROT_READ|PROT_WRITE|PROT_EXEC|PROT_GROWSUP) == 0)
+ 	goto return_success;
+ #  if __ASSUME_PROT_GROWSUPDOWN == 0
+-      if (errno == EINVAL)
++      if (errno == EINVAL) {
+ 	no_growsup = true;
+-      else
++      } else {
++#  endif
++	if (errno == EACCES)		/* PAX is enabled */
++	  return 0;
++	else
++	  return errno;
++#  if __ASSUME_PROT_GROWSUPDOWN == 0
++      }
+ #  endif
+-	return errno;
+     }
+ # endif
+ 
+@@ -130,8 +145,11 @@
+ 	page += size;
+       else
+ 	{
+-	  if (errno != ENOMEM)	/* Unexpected failure mode.  */
++	  if (errno == EACCES) {	/* PAX is enabled */
++	    return 0;
++	  } else if (errno != ENOMEM) {	/* Unexpected failure mode.  */
+ 	    return errno;
++	  }
+ 
+ 	  if (size == GL(dl_pagesize))
+ 	    /* We just tried to mprotect the lowest hole page and failed.

Added: trunk/glibc/glibc-2.3.3-security_fixes-1.patch
===================================================================
--- trunk/glibc/glibc-2.3.3-security_fixes-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/glibc/glibc-2.3.3-security_fixes-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,65 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2004-02-26
+Initial Package Version: 2.3.3
+Upstream Status: Rejected Upstream
+Origin: This is the combination of the following patches:
+	glibc-2.3.3-owl-malloc-unlink-sanity-check.diff
+	glibc-2.3.3-got-fix.diff
+Description: Optional security fixes.
+
+Also see:
+http://www.linuxfromscratch.org/hlfs/
+
+diff -Naur glibc-2.3.3.orig/malloc/malloc.c glibc-2.3.3.security_fixes/malloc/malloc.c
+--- glibc-2.3.3.orig/malloc/malloc.c	2003-09-30 02:06:48.000000000 +0000
++++ glibc-2.3.3.security_fixes/malloc/malloc.c	2004-08-30 15:22:02.000000000 +0000
+@@ -315,6 +315,11 @@
+ #define assert(x) ((void)0)
+ #endif
+ 
++/* needed for owl-malloc-unlink-sanity-check */
++#include <abort-instr.h>
++#ifndef ABORT_INSTRUCTION
++#define ABORT_INSTRUCTION
++#endif
+ 
+ /*
+   INTERNAL_SIZE_T is the word-size used for internal bookkeeping
+@@ -1954,6 +1959,14 @@
+ #define unlink(P, BK, FD) {                                            \
+   FD = P->fd;                                                          \
+   BK = P->bk;                                                          \
++  /* owl-malloc-unlink-sanity-check */                                 \
++  if (FD->bk != P || BK->fd != P)                                      \
++  {                                                                    \
++    ABORT_INSTRUCTION;                                                 \
++    _exit(127);                                                        \
++    while (1)                                                          \
++      ABORT_INSTRUCTION;                                               \
++  }                                                                    \
+   FD->bk = BK;                                                         \
+   BK->fd = FD;                                                         \
+ }
+diff -Naur glibc-2.3.3.orig/sysdeps/i386/elf/start.S glibc-2.3.3.security_fixes/sysdeps/i386/elf/start.S
+--- glibc-2.3.3.orig/sysdeps/i386/elf/start.S	2003-05-28 19:37:45.000000000 +0000
++++ glibc-2.3.3.security_fixes/sysdeps/i386/elf/start.S	2004-08-30 15:21:57.000000000 +0000
+@@ -73,16 +73,13 @@
+ 	addl $_GLOBAL_OFFSET_TABLE_, %ebx
+ 
+ 	/* Push address of our own entry points to .fini and .init.  */
+-	leal __libc_csu_fini at GOTOFF(%ebx), %eax
+-	pushl %eax
+-	leal __libc_csu_init at GOTOFF(%ebx), %eax
+-	pushl %eax
++	pushl __libc_csu_fini at GOT(%ebx)
++	pushl __libc_csu_init at GOT(%ebx)
+ 
+ 	pushl %ecx		/* Push second argument: argv.  */
+ 	pushl %esi		/* Push first argument: argc.  */
+ 
+-	leal BP_SYM (main)@GOTOFF(%ebx), %eax
+-	pushl %eax
++	pushl BP_SYM (main)@GOT(%ebx)
+ 
+ 	/* Call the user's main function, and exit with its value.
+ 	   But let the libc call main.    */

Added: trunk/linux/linux-2.4.27-frandom-1.patch
===================================================================
--- trunk/linux/linux-2.4.27-frandom-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/linux/linux-2.4.27-frandom-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,539 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2004-08-30
+Initial Package Version: 2.4.27
+Upstream Status: Rejected Upstream
+Origin: http://frandom.sourceforge.net/ - frandom-0.8
+Description: This is usefull for smashing stack protector, wiping
+discs, mktemp, and more.
+This source was modified for 2.4.26, and is enabled by default in
+arch/i386/defconfig
+
+Thanks to Eli Billauer
+http://www.billauer.co.il/
+http://frandom.sourceforge.net/
+
+Also see:
+http://www.linuxfromscratch.org/hlfs/
+http://www.linuxfromscratch.org/hints/downloads/files/entropy.txt
+http://www.linuxfromscratch.org/~robert/hlfs/hints/attachments/entropy/\
+	frandom-0.8.tar.gz
+
+diff -Naur linux-2.4.26.orig/Documentation/Configure.help linux-2.4.26.frandom/Documentation/Configure.help
+--- linux-2.4.26.orig/Documentation/Configure.help	2004-04-14 13:05:24.000000000 +0000
++++ linux-2.4.26.frandom/Documentation/Configure.help	2004-04-18 21:56:30.000000000 +0000
+@@ -18294,6 +18294,17 @@
+   input/output character sets. Say Y here for the UTF-8 encoding of
+   the Unicode/ISO9646 universal character set.
+ 
++Fast random data generator suite (/dev/frandom and /dev/erandom)
++CONFIG_FRANDOM
++  Fast random data/number generator support in kernel. This random
++  generator is 10-50 times faster than /dev/urandom, and saves kernel
++  entropy.
++
++  If unsure, say Y unless you're tight on kernel size. This module is
++  small and harmless otherwise.
++
++  If you choose M, the sysctl interface will be disabled.
++
+ Virtual terminal
+ CONFIG_VT
+   If you say Y here, you will get support for terminal devices with
+diff -Naur linux-2.4.26.orig/arch/i386/defconfig linux-2.4.26.frandom/arch/i386/defconfig
+--- linux-2.4.26.orig/arch/i386/defconfig	2004-04-14 13:05:25.000000000 +0000
++++ linux-2.4.26.frandom/arch/i386/defconfig	2004-04-18 21:57:52.000000000 +0000
+@@ -562,6 +562,7 @@
+ #
+ CONFIG_VT=y
+ CONFIG_VT_CONSOLE=y
++CONFIG_FRANDOM=y
+ CONFIG_SERIAL=y
+ # CONFIG_SERIAL_CONSOLE is not set
+ # CONFIG_SERIAL_EXTENDED is not set
+diff -Naur linux-2.4.26.orig/drivers/char/Config.in linux-2.4.26.frandom/drivers/char/Config.in
+--- linux-2.4.26.orig/drivers/char/Config.in	2004-02-18 13:36:31.000000000 +0000
++++ linux-2.4.26.frandom/drivers/char/Config.in	2004-04-18 21:56:30.000000000 +0000
+@@ -25,6 +25,7 @@
+       tristate '   Dual serial port support' CONFIG_DUALSP_SERIAL
+    fi
+ fi
++tristate 'Fast random data generator suite (/dev/frandom and /dev/erandom)' CONFIG_FRANDOM
+ dep_mbool 'Extended dumb serial driver options' CONFIG_SERIAL_EXTENDED $CONFIG_SERIAL
+ if [ "$CONFIG_SERIAL_EXTENDED" = "y" ]; then
+    bool '  Support more than 4 serial ports' CONFIG_SERIAL_MANY_PORTS
+diff -Naur linux-2.4.26.orig/drivers/char/Makefile linux-2.4.26.frandom/drivers/char/Makefile
+--- linux-2.4.26.orig/drivers/char/Makefile	2004-02-18 13:36:31.000000000 +0000
++++ linux-2.4.26.frandom/drivers/char/Makefile	2004-04-18 21:56:30.000000000 +0000
+@@ -25,7 +25,7 @@
+ 			misc.o pty.o random.o selection.o serial.o \
+ 			sonypi.o tty_io.o tty_ioctl.o generic_serial.o \
+ 			au1000_gpio.o vac-serial.o hp_psaux.o nvram.o \
+-			scx200.o fetchop.o
++			scx200.o fetchop.o frandom.o
+ 
+ mod-subdirs	:=	joystick ftape drm drm-4.0 pcmcia
+ 
+@@ -334,6 +334,8 @@
+   obj-y += ipmi/ipmi.o
+ endif
+ 
++obj-$(CONFIG_FRANDOM) += frandom.o
++
+ include $(TOPDIR)/Rules.make
+ 
+ fastdep:
+diff -Naur linux-2.4.26.orig/drivers/char/frandom.c linux-2.4.26.frandom/drivers/char/frandom.c
+--- linux-2.4.26.orig/drivers/char/frandom.c	1970-01-01 00:00:00.000000000 +0000
++++ linux-2.4.26.frandom/drivers/char/frandom.c	2004-04-18 21:56:30.000000000 +0000
+@@ -0,0 +1,362 @@
++/*
++** frandom.c
++**      Fast pseudo-random generator 
++**
++**      (c) Copyright 2003 Eli Billauer
++**      http://www.billauer.co.il
++**
++** This program is free software; you can redistribute it and/or modify
++** it under the terms of the GNU General Public License as published by
++** the Free Software Foundation; either version 2 of the License, or
++** (at your option) any later version.
++**
++** Usage: mknod /dev/frandom c 235 11
++**        mknod /dev/erandom c 235 12
++**        insmod frandom
++**
++** This code is highly based upon the examples given in the book "Linux
++** Device Drivers" by Alessandro Rubini and Jonathan Corbet, published
++** by O'Reilly & Associates.
++** O'Reilly's release of this book on the web for free is highly
++** appreciated.
++**
++*/
++
++#include <linux/version.h>
++#include <linux/config.h>
++#include <linux/module.h>
++
++#include <linux/kernel.h>
++#include <linux/init.h>
++#include <linux/slab.h> 
++#include <linux/fs.h> 
++#include <linux/errno.h>
++#include <linux/types.h> 
++#include <linux/random.h>
++
++#include <asm/uaccess.h>
++
++#if (LINUX_VERSION_CODE>=KERNEL_VERSION(2,6,0))
++#include <linux/moduleparam.h>
++#endif
++
++#define INTERNAL_SEED 0
++#define EXTERNAL_SEED 1
++
++#define FRANDOM_MAJOR 235
++#define FRANDOM_MINOR 11 
++#define ERANDOM_MINOR 12 
++
++static struct file_operations frandom_fops; /* Values assigned below */
++
++static int erandom_seeded = 0; /* Internal flag */
++
++static int frandom_major = FRANDOM_MAJOR;
++static int frandom_minor = FRANDOM_MINOR;
++static int erandom_minor = ERANDOM_MINOR;
++static int frandom_bufsize = 256;
++static int frandom_chunklimit = 0; /* =0 means unlimited */
++
++MODULE_DESCRIPTION("Fast pseudo-random number generator");
++MODULE_LICENSE("GPL");
++MODULE_AUTHOR("Eli Billauer");
++MODULE_PARM(frandom_major,"i");
++MODULE_PARM_DESC(frandom_major,"Major number of /dev/frandom and /dev/erandom");
++MODULE_PARM(frandom_minor,"i");
++MODULE_PARM_DESC(frandom_minor,"Minor number of /dev/frandom");
++MODULE_PARM(erandom_minor,"i");
++MODULE_PARM_DESC(erandom_minor,"Minor number of /dev/erandom");
++MODULE_PARM(frandom_bufsize,"i");
++MODULE_PARM_DESC(frandom_bufsize,"Internal buffer size in bytes. Default is 256. Must be >= 256");
++MODULE_PARM(frandom_chunklimit,"i");
++MODULE_PARM_DESC(frandom_chunklimit,"Limit for read() blocks size. 0 (default) is unlimited, otherwise must be >= 256");
++
++struct frandom_state
++{
++	struct semaphore sem; /* Semaphore on the state structure */
++
++	u8 S[256]; /* The state array */
++	u8 i;        
++	u8 j;
++
++	char *buf;
++};
++
++static struct frandom_state *erandom_state;
++
++static inline void swap_byte(u8 *a, u8 *b)
++{
++	u8 swapByte; 
++  
++	swapByte = *a; 
++	*a = *b;      
++	*b = swapByte;
++}
++
++static void init_rand_state(struct frandom_state *state, int seedflag);
++
++void erandom_get_random_bytes(char *buf, size_t count)
++{
++	struct frandom_state *state = erandom_state;
++	int k;
++
++	unsigned int i;
++	unsigned int j;
++	u8 *S;
++  
++	/* If we fail to get the semaphore, we revert to external random data.
++	   Since semaphore blocking is expected to be very rare, and interrupts
++	   during these rare and very short periods of time even less frequent,
++	   we take the better-safe-than-sorry approach, and fill the buffer
++	   some expensive random data, in case the caller wasn't aware of this
++	   possibility, and expects random data anyhow.
++	*/
++
++	if (down_interruptible(&state->sem)) {
++		get_random_bytes(buf, count);
++		return;
++	}
++
++	/* We seed erandom as late as possible, hoping that the kernel's main
++	   RNG is already restored in the boot sequence (not critical, but
++	   better.
++	*/
++	
++	if (!erandom_seeded) {
++		erandom_seeded = 1;
++		init_rand_state(state, EXTERNAL_SEED);
++		printk(KERN_INFO "frandom: Seeded global generator now (used by erandom)\n");
++	}
++
++	i = state->i;     
++	j = state->j;
++	S = state->S;  
++
++	for (k=0; k<count; k++) {
++		i = (i + 1) & 0xff;
++		j = (j + S[i]) & 0xff;
++		swap_byte(&S[i], &S[j]);
++		*buf++ = S[(S[i] + S[j]) & 0xff];
++	}
++ 
++	state->i = i;     
++	state->j = j;
++
++	up(&state->sem);
++}
++
++static void init_rand_state(struct frandom_state *state, int seedflag)
++{
++	unsigned int i, j, k;
++	u8 *S;
++	u8 *seed = state->buf;
++
++	if (seedflag == INTERNAL_SEED)
++		erandom_get_random_bytes(seed, 256);
++	else
++		get_random_bytes(seed, 256);
++
++	S = state->S;
++	for (i=0; i<256; i++)
++		*S++=i;
++
++	j=0;
++	S = state->S;
++
++	for (i=0; i<256; i++) {
++		j = (j + S[i] + *seed++) & 0xff;
++		swap_byte(&S[i], &S[j]);
++	}
++
++	/* It's considered good practice to discard the first 256 bytes
++	   generated. So we do it:
++	*/
++
++	i=0; j=0;
++	for (k=0; k<256; k++) {
++		i = (i + 1) & 0xff;
++		j = (j + S[i]) & 0xff;
++		swap_byte(&S[i], &S[j]);
++	}
++
++	state->i = i; /* Save state */
++	state->j = j;
++}
++
++static int frandom_open(struct inode *inode, struct file *filp)
++{
++  
++	struct frandom_state *state;
++
++	int num =MINOR(inode->i_rdev);
++	if ((num != frandom_minor) && (num != erandom_minor)) return -ENODEV;
++  
++	state = kmalloc(sizeof(struct frandom_state), GFP_KERNEL);
++	if (!state)
++		return -ENOMEM;
++
++	state->buf = kmalloc(frandom_bufsize, GFP_KERNEL);
++	if (!state->buf) {
++		kfree(state);
++		return -ENOMEM;
++	}
++
++	sema_init(&state->sem, 1); /* Init semaphore as a mutex */
++
++	if (num == frandom_minor)
++		init_rand_state(state, EXTERNAL_SEED);
++	else
++		init_rand_state(state, INTERNAL_SEED);
++
++	filp->private_data = state;
++
++#if (LINUX_VERSION_CODE<KERNEL_VERSION(2,4,0))
++	MOD_INC_USE_COUNT; 
++#endif
++  
++	return 0; /* Success */
++}
++
++static int frandom_release(struct inode *inode, struct file *filp)
++{
++
++	struct frandom_state *state = filp->private_data;
++
++	kfree(state->buf);
++	kfree(state);
++  
++#if (LINUX_VERSION_CODE<KERNEL_VERSION(2,4,0))
++	MOD_DEC_USE_COUNT;
++#endif
++
++	return 0;
++}
++
++static ssize_t frandom_read(struct file *filp, char *buf, size_t count,
++			    loff_t *f_pos)
++{
++	struct frandom_state *state = filp->private_data;
++	ssize_t ret;
++	int dobytes, k;
++	char *localbuf;
++
++	unsigned int i;
++	unsigned int j;
++	u8 *S;
++  
++	if (down_interruptible(&state->sem))
++		return -ERESTARTSYS;
++  
++	if ((frandom_chunklimit > 0) && (count > frandom_chunklimit))
++		count = frandom_chunklimit;
++
++	ret = count; /* It's either everything or an error... */
++  
++	i = state->i;     
++	j = state->j;
++	S = state->S;  
++
++	while (count) {
++		if (count > frandom_bufsize)
++			dobytes = frandom_bufsize;
++		else
++			dobytes = count;
++
++		localbuf = state->buf;
++
++		for (k=0; k<dobytes; k++) {
++			i = (i + 1) & 0xff;
++			j = (j + S[i]) & 0xff;
++			swap_byte(&S[i], &S[j]);
++			*localbuf++ = S[(S[i] + S[j]) & 0xff];
++		}
++ 
++		if (copy_to_user(buf, state->buf, dobytes)) {
++			ret = -EFAULT;
++			goto out;
++		}
++
++		buf += dobytes;
++		count -= dobytes;
++	}
++
++ out:
++	state->i = i;     
++	state->j = j;
++
++	up(&state->sem);
++	return ret;
++}
++
++static struct file_operations frandom_fops = {
++	read:       frandom_read,
++	open:       frandom_open,
++	release:    frandom_release,
++};
++
++static void frandom_cleanup_module(void) {
++	kfree(erandom_state->buf);
++	kfree(erandom_state);
++ 
++	unregister_chrdev(frandom_major, "frandom");
++}
++
++
++static int frandom_init_module(void)
++{
++	int result;
++
++	/* The buffer size MUST be at least 256 bytes, because we assume that
++	   minimal length in init_rand_state().
++	*/       
++	if (frandom_bufsize < 256) {
++		printk(KERN_ERR "frandom: Refused to load because frandom_bufsize=%d < 256\n",frandom_bufsize);
++		return -EINVAL;
++	}
++	if ((frandom_chunklimit != 0) && (frandom_chunklimit < 256)) {
++		printk(KERN_ERR "frandom: Refused to load because frandom_chunklimit=%d < 256 and != 0\n",frandom_chunklimit);
++		return -EINVAL;
++	}
++
++	erandom_state = kmalloc(sizeof(struct frandom_state), GFP_KERNEL);
++	if (!erandom_state)
++		return -ENOMEM;
++
++	/* This specific buffer is only used for seeding, so we need
++	   256 bytes exactly */
++	erandom_state->buf = kmalloc(256, GFP_KERNEL);
++	if (!erandom_state->buf) {
++		kfree(erandom_state);
++		return -ENOMEM;
++	}
++
++	sema_init(&erandom_state->sem, 1); /* Init semaphore as a mutex */
++
++	erandom_seeded = 0;
++
++#ifdef SET_MODULE_OWNER
++	SET_MODULE_OWNER(&frandom_fops);
++#endif
++	/*
++	 * Register your major, and accept a dynamic number. This is the
++	 * first thing to do, in order to avoid releasing other module's
++	 * fops in frandom_cleanup_module()
++	 */
++	result = register_chrdev(frandom_major, "frandom", &frandom_fops);
++	if (result < 0) {
++		printk(KERN_WARNING "frandom: can't get major %d\n",frandom_major);
++
++		kfree(erandom_state->buf);
++		kfree(erandom_state);
++	
++		return result;
++	}
++	if (frandom_major == 0) frandom_major = result; /* dynamic */
++    
++	return 0; /* succeed */
++}
++
++module_init(frandom_init_module);
++module_exit(frandom_cleanup_module);
++
++EXPORT_SYMBOL(erandom_get_random_bytes);
+diff -Naur linux-2.4.26.orig/drivers/char/random.c linux-2.4.26.frandom/drivers/char/random.c
+--- linux-2.4.26.orig/drivers/char/random.c	2004-02-18 13:36:31.000000000 +0000
++++ linux-2.4.26.frandom/drivers/char/random.c	2004-04-18 21:56:30.000000000 +0000
+@@ -1870,6 +1870,60 @@
+ 	return 1;
+ }
+ 
++#ifdef CONFIG_FRANDOM
++/* We don't really want to create a header file for frandom
++   at this stage, so here's the prototype: */
++
++void erandom_get_random_bytes(char *buf, size_t count);
++
++static int proc_do_erandom(ctl_table *table, int write, struct file *filp,
++			void *buffer, size_t *lenp)
++{
++	ctl_table	fake_table;
++	unsigned char	buf[64], random[16], *p;
++	int i;
++	
++	erandom_get_random_bytes(random, 16);
++	
++	p=buf;
++	
++	for (i=0; i<16; i++) {
++     		sprintf(p, "%02x", random[i]);
++		p+=2;
++	}
++
++	fake_table.data = buf;
++	fake_table.maxlen = sizeof(buf);
++
++	return proc_dostring(&fake_table, write, filp, buffer, lenp);
++}
++
++
++static int erandom_strategy(ctl_table *table, int *name, int nlen,
++			 void *oldval, size_t *oldlenp,
++			 void *newval, size_t newlen, void **context)
++{
++	unsigned char	random[16];
++	unsigned int	len;
++
++	if (!oldval || !oldlenp)
++		return 1;
++
++	erandom_get_random_bytes(random, 16);
++
++	if (get_user(len, oldlenp))
++		return -EFAULT;
++	if (len) {
++		if (len > 16)
++			len = 16;
++		if (copy_to_user(oldval, random, len) ||
++		    put_user(len, oldlenp))
++			return -EFAULT;
++	}
++	return 1;
++}
++#endif
++
+ ctl_table random_table[] = {
+ 	{RANDOM_POOLSIZE, "poolsize",
+ 	 &sysctl_poolsize, sizeof(int), 0644, NULL,
+@@ -1888,6 +1942,11 @@
+ 	{RANDOM_BOOT_ID, "boot_id",
+ 	 &sysctl_bootid, 16, 0444, NULL,
+ 	 &proc_do_uuid, &uuid_strategy},
++#ifdef CONFIG_FRANDOM
++	{RANDOM_ERANDOM, "erandom",
++	 NULL, 16, 0444, NULL,
++	 &proc_do_erandom, &erandom_strategy},
++#endif
+ 	{RANDOM_UUID, "uuid",
+ 	 NULL, 16, 0444, NULL,
+ 	 &proc_do_uuid, &uuid_strategy},
+diff -Naur linux-2.4.26.orig/include/linux/sysctl.h linux-2.4.26.frandom/include/linux/sysctl.h
+--- linux-2.4.26.orig/include/linux/sysctl.h	2004-04-14 13:05:40.000000000 +0000
++++ linux-2.4.26.frandom/include/linux/sysctl.h	2004-04-18 21:56:30.000000000 +0000
+@@ -190,7 +190,8 @@
+ 	RANDOM_READ_THRESH=3,
+ 	RANDOM_WRITE_THRESH=4,
+ 	RANDOM_BOOT_ID=5,
+-	RANDOM_UUID=6
++	RANDOM_UUID=6,
++	RANDOM_ERANDOM=7
+ };
+ 
+ /* /proc/sys/bus/isa */

Added: trunk/linux/linux-2.4.27-ssp-1.patch
===================================================================
--- trunk/linux/linux-2.4.27-ssp-1.patch	2004-08-31 18:42:28 UTC (rev 620)
+++ trunk/linux/linux-2.4.27-ssp-1.patch	2004-08-31 19:56:54 UTC (rev 621)
@@ -0,0 +1,64 @@
+Submitted By: Robert Connolly <robert at linuxfromscratch dot org> (ashes)
+Date: 2004-08-30
+Initial Package Version: 2.4.27
+Upstream Status: Rejected Upstream
+Origin: http://www.northernsecurity.net/adamantix/diffs/2.4.20-propolice.patch
+Description: Linux kernel patch for Smashing Stack Protector. This patch allows
+the use of -fstack-protector durring the kernel build.
+http://www.linuxfromscratch.org/hints/downloads/files/ssp.txt
+http://www.linuxfromscratch.org/hlfs/
+
+diff -Naur linux-2.4.26.orig/include/linux/kernel.h linux-2.4.26.ssp/include/linux/kernel.h
+--- linux-2.4.26.orig/include/linux/kernel.h	2002-11-28 23:53:15.000000000 +0000
++++ linux-2.4.26.ssp/include/linux/kernel.h	2004-04-18 19:54:08.000000000 +0000
+@@ -86,6 +86,9 @@
+ extern void dev_probe_lock(void);
+ extern void dev_probe_unlock(void);
+ 
++extern int __guard;
++extern void __stack_smash_handler(int, char []);
++
+ extern int session_of_pgrp(int pgrp);
+ 
+ asmlinkage int printk(const char * fmt, ...)
+diff -Naur linux-2.4.26.orig/kernel/ksyms.c linux-2.4.26.ssp/kernel/ksyms.c
+--- linux-2.4.26.orig/kernel/ksyms.c	2004-02-18 13:36:32.000000000 +0000
++++ linux-2.4.26.ssp/kernel/ksyms.c	2004-04-18 19:54:08.000000000 +0000
+@@ -528,6 +528,8 @@
+ EXPORT_SYMBOL(single_open);
+ EXPORT_SYMBOL(single_release);
+ EXPORT_SYMBOL(seq_release_private);
++EXPORT_SYMBOL_NOVERS(__guard);
++EXPORT_SYMBOL_NOVERS(__stack_smash_handler);
+ 
+ /* Program loader interfaces */
+ EXPORT_SYMBOL(setup_arg_pages);
+diff -Naur linux-2.4.26.orig/lib/Makefile linux-2.4.26.ssp/lib/Makefile
+--- linux-2.4.26.orig/lib/Makefile	2004-04-14 13:05:40.000000000 +0000
++++ linux-2.4.26.ssp/lib/Makefile	2004-04-18 19:54:08.000000000 +0000
+@@ -12,7 +12,7 @@
+ 	       rbtree.o crc32.o firmware_class.o
+ 
+ obj-y := errno.o ctype.o string.o vsprintf.o brlock.o cmdline.o \
+-	 bust_spinlocks.o rbtree.o dump_stack.o
++	 bust_spinlocks.o rbtree.o dump_stack.o propolice.o
+ 
+ obj-$(CONFIG_FW_LOADER) += firmware_class.o
+ obj-$(CONFIG_RWSEM_GENERIC_SPINLOCK) += rwsem-spinlock.o
+diff -Naur linux-2.4.26.orig/lib/propolice.c linux-2.4.26.ssp/lib/propolice.c
+--- linux-2.4.26.orig/lib/propolice.c	1970-01-01 00:00:00.000000000 +0000
++++ linux-2.4.26.ssp/lib/propolice.c	2004-04-18 19:54:08.000000000 +0000
+@@ -0,0 +1,13 @@
++ /*
++  *  linux/lib/errno.c
++  *
++  *
++  */
++ 
++ int __guard = '\0\0\n\777';
++ 
++ void __stack_smash_handler (int damaged, char func[])
++ 	 {
++	   static char *message = "propolice detects %x at function %s.\n" ;
++	   panic (message, damaged, func);
++}




More information about the patches mailing list