Ilya Enkovich
2014-10-08 18:57:08 UTC
Hi,
This patch introduces helper functions used by instrumentation. These helpers are used to access local structures, generate typical expressions, vars and statements. I also included local data initializer into this patch.
Thanks,
Ilya
--
2014-10-08 Ilya Enkovich <***@intel.com>
* tree-chkp.c (assign_handler): New.
(chkp_get_zero_bounds): New.
(chkp_uintptr_type): New.
(chkp_none_bounds_var): New.
(entry_block): New.
(zero_bounds): New.
(none_bounds): New.
(incomplete_bounds): New.
(tmp_var): New.
(size_tmp_var): New.
(chkp_abnormal_copies): New.
(chkp_invalid_bounds): New.
(chkp_completed_bounds_set): New.
(chkp_reg_bounds): New.
(chkp_bound_vars): New.
(chkp_reg_addr_bounds): New.
(chkp_incomplete_bounds_map): New.
(chkp_static_var_bounds): New.
(in_chkp_pass): New.
(CHKP_BOUND_TMP_NAME): New.
(CHKP_SIZE_TMP_NAME): New.
(CHKP_BOUNDS_OF_SYMBOL_PREFIX): New.
(CHKP_STRING_BOUNDS_PREFIX): New.
(CHKP_VAR_BOUNDS_PREFIX): New.
(CHKP_NONE_BOUNDS_VAR_NAME): New.
(chkp_get_tmp_var): New.
(chkp_get_tmp_reg): New.
(chkp_get_size_tmp_var): New.
(chkp_register_addr_bounds): New.
(chkp_get_registered_addr_bounds): New.
(chkp_mark_completed_bounds): New.
(chkp_completed_bounds): New.
(chkp_erase_completed_bounds): New.
(chkp_register_incomplete_bounds): New.
(chkp_incomplete_bounds): New.
(chkp_erase_incomplete_bounds): New.
(chkp_mark_invalid_bounds): New.
(chkp_valid_bounds): New.
(chkp_mark_invalid_bounds_walker): New.
(chkp_build_addr_expr): New.
(chkp_get_entry_block): New.
(chkp_get_bounds_var): New.
(chkp_get_registered_bounds): New.
(chkp_check_lower): New.
(chkp_check_upper): New.
(chkp_check_mem_access): New.
(chkp_build_component_ref): New.
(chkp_build_array_ref): New.
(chkp_can_be_shared): New.
(chkp_make_bounds): New.
(chkp_get_none_bounds_var): New.
(chkp_get_zero_bounds): New.
(chkp_get_none_bounds): New.
(chkp_get_invalid_op_bounds): New.
(chkp_get_nonpointer_load_bounds): New.
(chkp_get_next_bounds_parm): New.
(chkp_build_bndldx): New.
(chkp_make_static_bounds): New.
(chkp_generate_extern_var_bounds): New.
(chkp_intersect_bounds): New.
(chkp_may_narrow_to_field): New.
(chkp_narrow_bounds_for_field): New.
(chkp_narrow_bounds_to_field): New.
(chkp_walk_pointer_assignments): New.
(chkp_init): New.
diff --git a/gcc/tree-chkp.c b/gcc/tree-chkp.c
index 4ab8de6..c65334c 100644
--- a/gcc/tree-chkp.c
+++ b/gcc/tree-chkp.c
@@ -65,6 +65,10 @@ along with GCC; see the file COPYING3. If not see
#include "rtl.h" /* For MEM_P, assign_temp. */
#include "tree-dfa.h"
+typedef void (*assign_handler)(tree, tree, void *);
+
+static tree chkp_get_zero_bounds ();
+
#define chkp_bndldx_fndecl \
(targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
#define chkp_bndstx_fndecl \
@@ -88,11 +92,37 @@ along with GCC; see the file COPYING3. If not see
#define chkp_extract_upper_fndecl \
(targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
-static GTY (()) tree chkp_zero_bounds_var;
+static GTY (()) tree chkp_uintptr_type;
+static GTY (()) tree chkp_zero_bounds_var;
+static GTY (()) tree chkp_none_bounds_var;
+
+static GTY (()) basic_block entry_block;
+static GTY (()) tree zero_bounds;
+static GTY (()) tree none_bounds;
+static GTY (()) tree incomplete_bounds;
+static GTY (()) tree tmp_var;
+static GTY (()) tree size_tmp_var;
+static GTY (()) bitmap chkp_abnormal_copies;
+
+struct hash_set<tree> *chkp_invalid_bounds;
+struct hash_set<tree> *chkp_completed_bounds_set;
+struct hash_map<tree, tree> *chkp_reg_bounds;
+struct hash_map<tree, tree> *chkp_bound_vars;
+struct hash_map<tree, tree> *chkp_reg_addr_bounds;
+struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
struct hash_map<tree, tree> *chkp_bounds_map;
+struct hash_map<tree, tree> *chkp_static_var_bounds;
+
+static bool in_chkp_pass;
+#define CHKP_BOUND_TMP_NAME "__bound_tmp"
+#define CHKP_SIZE_TMP_NAME "__size_tmp"
+#define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
+#define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
+#define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
#define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
+#define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
/* Return 1 if function FNDECL is instrumented by Pointer
Bounds Checker. */
@@ -172,6 +202,130 @@ chkp_marked_stmt_p (gimple s)
return gimple_plf (s, GF_PLF_1);
}
+/* Get var to be used for bound temps. */
+static tree
+chkp_get_tmp_var (void)
+{
+ if (!tmp_var)
+ tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
+
+ return tmp_var;
+}
+
+/* Get SSA_NAME to be used as temp. */
+static tree
+chkp_get_tmp_reg (gimple stmt)
+{
+ if (in_chkp_pass)
+ return make_ssa_name (chkp_get_tmp_var (), stmt);
+
+ return make_temp_ssa_name (pointer_bounds_type_node, stmt,
+ CHKP_BOUND_TMP_NAME);
+}
+
+/* Get var to be used for size temps. */
+static tree
+chkp_get_size_tmp_var (void)
+{
+ if (!size_tmp_var)
+ size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
+
+ return size_tmp_var;
+}
+
+/* Register bounds BND for address of OBJ. */
+static void
+chkp_register_addr_bounds (tree obj, tree bnd)
+{
+ if (bnd == incomplete_bounds)
+ return;
+
+ chkp_reg_addr_bounds->put (obj, bnd);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Regsitered bound ");
+ print_generic_expr (dump_file, bnd, 0);
+ fprintf (dump_file, " for address of ");
+ print_generic_expr (dump_file, obj, 0);
+ fprintf (dump_file, "\n");
+ }
+}
+
+/* Return bounds registered for address of OBJ. */
+static tree
+chkp_get_registered_addr_bounds (tree obj)
+{
+ tree *slot = chkp_reg_addr_bounds->get (obj);
+ return slot ? *slot : NULL_TREE;
+}
+
+/* Mark BOUNDS as completed. */
+static void
+chkp_mark_completed_bounds (tree bounds)
+{
+ chkp_completed_bounds_set->add (bounds);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Marked bounds ");
+ print_generic_expr (dump_file, bounds, 0);
+ fprintf (dump_file, " as completed\n");
+ }
+}
+
+/* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
+static bool
+chkp_completed_bounds (tree bounds)
+{
+ return chkp_completed_bounds_set->contains (bounds);
+}
+
+/* Clear comleted bound marks. */
+static void
+chkp_erase_completed_bounds (void)
+{
+ delete chkp_completed_bounds_set;
+ chkp_completed_bounds_set = new hash_set<tree>;
+}
+
+/* Mark BOUNDS associated with PTR as incomplete. */
+static void
+chkp_register_incomplete_bounds (tree bounds, tree ptr)
+{
+ chkp_incomplete_bounds_map->put (bounds, ptr);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Regsitered incomplete bounds ");
+ print_generic_expr (dump_file, bounds, 0);
+ fprintf (dump_file, " for ");
+ print_generic_expr (dump_file, ptr, 0);
+ fprintf (dump_file, "\n");
+ }
+}
+
+/* Return 1 if BOUNDS are incomplete and 0 otherwise. */
+static bool
+chkp_incomplete_bounds (tree bounds)
+{
+ if (bounds == incomplete_bounds)
+ return true;
+
+ if (chkp_completed_bounds (bounds))
+ return false;
+
+ return chkp_incomplete_bounds_map->get (bounds) != NULL;
+}
+
+/* Clear incomleted bound marks. */
+static void
+chkp_erase_incomplete_bounds (void)
+{
+ delete chkp_incomplete_bounds_map;
+ chkp_incomplete_bounds_map = new hash_map<tree, tree>;
+}
+
/* Build and return bndmk call which creates bounds for structure
pointed by PTR. Structure should have complete type. */
tree
@@ -191,6 +345,45 @@ chkp_make_bounds_for_struct_addr (tree ptr)
2, ptr, size);
}
+/* Mark BOUNDS as invalid. */
+static void
+chkp_mark_invalid_bounds (tree bounds)
+{
+ chkp_invalid_bounds->add (bounds);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Marked bounds ");
+ print_generic_expr (dump_file, bounds, 0);
+ fprintf (dump_file, " as invalid\n");
+ }
+}
+
+/* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
+static bool
+chkp_valid_bounds (tree bounds)
+{
+ if (bounds == zero_bounds || bounds == none_bounds)
+ return false;
+
+ return !chkp_invalid_bounds->contains (bounds);
+}
+
+/* Helper function for chkp_finish_incomplete_bounds.
+ Marks all incompleted bounds as invalid. */
+bool
+chkp_mark_invalid_bounds_walker (tree const &bounds,
+ tree *slot ATTRIBUTE_UNUSED,
+ void *res ATTRIBUTE_UNUSED)
+{
+ if (!chkp_completed_bounds (bounds))
+ {
+ chkp_mark_invalid_bounds (bounds);
+ chkp_mark_completed_bounds (bounds);
+ }
+ return true;
+}
+
/* Return 1 if type TYPE is a pointer type or a
structure having a pointer type as one of its fields.
Otherwise return 0. */
@@ -258,6 +451,59 @@ chkp_set_bounds (tree node, tree val)
chkp_bounds_map->put (node, val);
}
+/* Build and return ADDR_EXPR for specified object OBJ. */
+static tree
+chkp_build_addr_expr (tree obj)
+{
+ return TREE_CODE (obj) == TARGET_MEM_REF
+ ? tree_mem_ref_addr (ptr_type_node, obj)
+ : build_fold_addr_expr (obj);
+}
+
+/* Return entry block to be used for checker initilization code.
+ Create new block if required. */
+static basic_block
+chkp_get_entry_block (void)
+{
+ if (!entry_block)
+ entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
+
+ return entry_block;
+}
+
+/* Return a bounds var to be used for pointer var PTR_VAR. */
+static tree
+chkp_get_bounds_var (tree ptr_var)
+{
+ tree bnd_var;
+ tree *slot;
+
+ slot = chkp_bound_vars->get (ptr_var);
+ if (slot)
+ bnd_var = *slot;
+ else
+ {
+ bnd_var = create_tmp_reg (pointer_bounds_type_node,
+ CHKP_BOUND_TMP_NAME);
+ chkp_bound_vars->put (ptr_var, bnd_var);
+ }
+
+ return bnd_var;
+}
+
+/* Get bounds registered for object PTR in global bounds table. */
+static tree
+chkp_get_registered_bounds (tree ptr)
+{
+ tree *slot;
+
+ if (!chkp_reg_bounds)
+ return NULL_TREE;
+
+ slot = chkp_reg_bounds->get (ptr);
+ return slot ? *slot : NULL_TREE;
+}
+
/* Force OP to be suitable for using as an argument for call.
New statements (if any) go to SEQ. */
static tree
@@ -276,6 +522,188 @@ chkp_force_gimple_call_op (tree op, gimple_seq *seq)
return op;
}
+/* Generate lower bound check for memory access by ADDR.
+ Check is inserted before the position pointed by ITER.
+ DIRFLAG indicates whether memory access is load or store. */
+static void
+chkp_check_lower (tree addr, tree bounds,
+ gimple_stmt_iterator iter,
+ location_t location ATTRIBUTE_UNUSED,
+ tree dirflag)
+{
+ gimple_seq seq;
+ gimple check;
+ tree node;
+
+ if (bounds == chkp_get_zero_bounds ())
+ return;
+
+ if (dirflag == integer_zero_node
+ && !flag_chkp_check_read)
+ return;
+
+ if (dirflag == integer_one_node
+ && !flag_chkp_check_write)
+ return;
+
+ seq = NULL;
+
+ node = chkp_force_gimple_call_op (addr, &seq);
+
+ check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
+ chkp_mark_stmt (check);
+ gimple_call_set_with_bounds (check, true);
+ gimple_seq_add_stmt (&seq, check);
+
+ gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ gimple before = gsi_stmt (iter);
+ fprintf (dump_file, "Generated lower bound check for statement ");
+ print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
+ fprintf (dump_file, " ");
+ print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
+ }
+}
+
+/* Generate upper bound check for memory access by ADDR.
+ Check is inserted before the position pointed by ITER.
+ DIRFLAG indicates whether memory access is load or store. */
+static void
+chkp_check_upper (tree addr, tree bounds,
+ gimple_stmt_iterator iter,
+ location_t location ATTRIBUTE_UNUSED,
+ tree dirflag)
+{
+ gimple_seq seq;
+ gimple check;
+ tree node;
+
+ if (bounds == chkp_get_zero_bounds ())
+ return;
+
+ if (dirflag == integer_zero_node
+ && !flag_chkp_check_read)
+ return;
+
+ if (dirflag == integer_one_node
+ && !flag_chkp_check_write)
+ return;
+
+ seq = NULL;
+
+ node = chkp_force_gimple_call_op (addr, &seq);
+
+ check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
+ chkp_mark_stmt (check);
+ gimple_call_set_with_bounds (check, true);
+ gimple_seq_add_stmt (&seq, check);
+
+ gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ gimple before = gsi_stmt (iter);
+ fprintf (dump_file, "Generated upper bound check for statement ");
+ print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
+ fprintf (dump_file, " ");
+ print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
+ }
+}
+
+/* Generate lower and upper bound checks for memory access
+ to memory slot [FIRST, LAST] againsr BOUNDS. Checks
+ are inserted before the position pointed by ITER.
+ DIRFLAG indicates whether memory access is load or store. */
+static void
+chkp_check_mem_access (tree first, tree last, tree bounds,
+ gimple_stmt_iterator iter,
+ location_t location,
+ tree dirflag)
+{
+ chkp_check_lower (first, bounds, iter, location, dirflag);
+ chkp_check_upper (last, bounds, iter, location, dirflag);
+}
+
+/* Return COMPONENT_REF accessing FIELD in OBJ. */
+static tree
+chkp_build_component_ref (tree obj, tree field)
+{
+ tree res;
+
+ /* If object is TMR then we do not use component_ref but
+ add offset instead. We need it to be able to get addr
+ of the reasult later. */
+ if (TREE_CODE (obj) == TARGET_MEM_REF)
+ {
+ tree offs = TMR_OFFSET (obj);
+ offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
+ offs, DECL_FIELD_OFFSET (field));
+
+ gcc_assert (offs);
+
+ res = copy_node (obj);
+ TREE_TYPE (res) = TREE_TYPE (field);
+ TMR_OFFSET (res) = offs;
+ }
+ else
+ res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
+
+ return res;
+}
+
+/* Return ARRAY_REF for array ARR and index IDX with
+ specified element type ETYPE and element size ESIZE. */
+static tree
+chkp_build_array_ref (tree arr, tree etype, tree esize,
+ unsigned HOST_WIDE_INT idx)
+{
+ tree index = build_int_cst (size_type_node, idx);
+ tree res;
+
+ /* If object is TMR then we do not use array_ref but
+ add offset instead. We need it to be able to get addr
+ of the reasult later. */
+ if (TREE_CODE (arr) == TARGET_MEM_REF)
+ {
+ tree offs = TMR_OFFSET (arr);
+
+ esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
+ esize, index);
+ gcc_assert(esize);
+
+ offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
+ offs, esize);
+ gcc_assert (offs);
+
+ res = copy_node (arr);
+ TREE_TYPE (res) = etype;
+ TMR_OFFSET (res) = offs;
+ }
+ else
+ res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
+
+ return res;
+}
+
+/* Return true when T can be shared. */
+
+static bool
+chkp_can_be_shared (tree t)
+{
+ if (IS_TYPE_OR_DECL_P (t)
+ || is_gimple_min_invariant (t)
+ || TREE_CODE (t) == SSA_NAME
+ || t == error_mark_node
+ || TREE_CODE (t) == IDENTIFIER_NODE
+ || TREE_CODE (t) == CASE_LABEL_EXPR
+ || DECL_P (t))
+ return true;
+
+ return false;
+}
+
/* Fill HAVE_BOUND output bitmap with information about
bounds requred for object of type TYPE.
@@ -400,6 +828,59 @@ chkp_make_static_const_bounds (HOST_WIDE_INT lb,
return var;
}
+/* Generate code to make bounds with specified lower bound LB and SIZE.
+ if AFTER is 1 then code is inserted after position pointed by ITER
+ otherwise code is inserted before position pointed by ITER.
+ If ITER is NULL then code is added to entry block. */
+static tree
+chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
+{
+ gimple_seq seq;
+ gimple_stmt_iterator gsi;
+ gimple stmt;
+ tree bounds;
+
+ if (iter)
+ gsi = *iter;
+ else
+ gsi = gsi_start_bb (chkp_get_entry_block ());
+
+ seq = NULL;
+
+ lb = chkp_force_gimple_call_op (lb, &seq);
+ size = chkp_force_gimple_call_op (size, &seq);
+
+ stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
+ chkp_mark_stmt (stmt);
+
+ bounds = chkp_get_tmp_reg (stmt);
+ gimple_call_set_lhs (stmt, bounds);
+
+ gimple_seq_add_stmt (&seq, stmt);
+
+ if (iter && after)
+ gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
+ else
+ gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Made bounds: ");
+ print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
+ if (iter)
+ {
+ fprintf (dump_file, " inserted before statement: ");
+ print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
+ }
+ else
+ fprintf (dump_file, " at function entry\n");
+ }
+
+ /* update_stmt (stmt); */
+
+ return bounds;
+}
+
/* Return var holding zero bounds. */
tree
chkp_get_zero_bounds_var (void)
@@ -411,6 +892,91 @@ chkp_get_zero_bounds_var (void)
return chkp_zero_bounds_var;
}
+/* Return var holding none bounds. */
+static tree
+chkp_get_none_bounds_var (void)
+{
+ if (!chkp_none_bounds_var)
+ chkp_none_bounds_var
+ = chkp_make_static_const_bounds (-1, 0,
+ CHKP_NONE_BOUNDS_VAR_NAME);
+ return chkp_none_bounds_var;
+}
+
+/* Return SSA_NAME used to represent zero bounds. */
+static tree
+chkp_get_zero_bounds (void)
+{
+ if (zero_bounds)
+ return zero_bounds;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Creating zero bounds...");
+
+ if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
+ || flag_chkp_use_static_const_bounds > 0)
+ {
+ gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
+ gimple stmt;
+
+ zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
+ stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
+ gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ }
+ else
+ zero_bounds = chkp_make_bounds (integer_zero_node,
+ integer_zero_node,
+ NULL,
+ false);
+
+ return zero_bounds;
+}
+
+/* Return SSA_NAME used to represent none bounds. */
+static tree
+chkp_get_none_bounds (void)
+{
+ if (none_bounds)
+ return none_bounds;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Creating none bounds...");
+
+
+ if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
+ || flag_chkp_use_static_const_bounds > 0)
+ {
+ gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
+ gimple stmt;
+
+ none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
+ stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
+ gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ }
+ else
+ none_bounds = chkp_make_bounds (integer_minus_one_node,
+ build_int_cst (size_type_node, 2),
+ NULL,
+ false);
+
+ return none_bounds;
+}
+
+/* Return bounds to be used as a result of operation which
+ should not create poiunter (e.g. MULT_EXPR). */
+static tree
+chkp_get_invalid_op_bounds (void)
+{
+ return chkp_get_zero_bounds ();
+}
+
+/* Return bounds to be used for loads of non-pointer values. */
+static tree
+chkp_get_nonpointer_load_bounds (void)
+{
+ return chkp_get_zero_bounds ();
+}
+
/* Return bounds used as returned by call
which produced SSA name VAL. */
gimple
@@ -431,6 +997,22 @@ chkp_retbnd_call_by_val (tree val)
return NULL;
}
+/* Check the next parameter for the given PARM is bounds
+ and return it's default SSA_NAME (create if required). */
+static tree
+chkp_get_next_bounds_parm (tree parm)
+{
+ tree bounds = TREE_CHAIN (parm);
+ gcc_assert (POINTER_BOUNDS_P (bounds));
+ bounds = ssa_default_def (cfun, bounds);
+ if (!bounds)
+ {
+ bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
+ set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
+ }
+ return bounds;
+}
+
/* Build and return CALL_EXPR for bndstx builtin with specified
arguments. */
tree
@@ -445,6 +1027,41 @@ chkp_build_bndldx_call (tree addr, tree ptr)
return call;
}
+/* Insert code to load bounds for PTR located by ADDR.
+ Code is inserted after position pointed by GSI.
+ Loaded bounds are returned. */
+static tree
+chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
+{
+ gimple_seq seq;
+ gimple stmt;
+ tree bounds;
+
+ seq = NULL;
+
+ addr = chkp_force_gimple_call_op (addr, &seq);
+ ptr = chkp_force_gimple_call_op (ptr, &seq);
+
+ stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
+ chkp_mark_stmt (stmt);
+ bounds = chkp_get_tmp_reg (stmt);
+ gimple_call_set_lhs (stmt, bounds);
+
+ gimple_seq_add_stmt (&seq, stmt);
+
+ gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Generated bndldx for pointer ");
+ print_generic_expr (dump_file, ptr, 0);
+ fprintf (dump_file, ": ");
+ print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
+ }
+
+ return bounds;
+}
+
/* Build and return CALL_EXPR for bndstx builtin with specified
arguments. */
tree
@@ -501,6 +1118,153 @@ chkp_build_make_bounds_call (tree lower_bound, tree size)
call, 2, lower_bound, size);
}
+/* Create static bounds var of specfified OBJ which is
+ is either VAR_DECL or string constant. */
+static tree
+chkp_make_static_bounds (tree obj)
+{
+ static int string_id = 1;
+ static int var_id = 1;
+ tree *slot;
+ const char *var_name;
+ char *bnd_var_name;
+ tree bnd_var;
+
+ /* First check if we already have required var. */
+ if (chkp_static_var_bounds)
+ {
+ slot = chkp_static_var_bounds->get (obj);
+ if (slot)
+ return *slot;
+ }
+
+ /* Build decl for bounds var. */
+ if (TREE_CODE (obj) == VAR_DECL)
+ {
+ if (DECL_IGNORED_P (obj))
+ {
+ bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
+ sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
+ }
+ else
+ {
+ var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
+
+ /* For hidden symbols we want to skip first '*' char. */
+ if (*var_name == '*')
+ var_name++;
+
+ bnd_var_name = (char *) xmalloc (strlen (var_name)
+ + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
+ strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
+ strcat (bnd_var_name, var_name);
+ }
+
+ bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier (bnd_var_name),
+ pointer_bounds_type_node);
+
+ /* Address of the obj will be used as lower bound. */
+ TREE_ADDRESSABLE (obj) = 1;
+ }
+ else
+ {
+ bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
+ sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
+
+ bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier (bnd_var_name),
+ pointer_bounds_type_node);
+ }
+
+ TREE_PUBLIC (bnd_var) = 0;
+ TREE_USED (bnd_var) = 1;
+ TREE_READONLY (bnd_var) = 0;
+ TREE_STATIC (bnd_var) = 1;
+ TREE_ADDRESSABLE (bnd_var) = 0;
+ DECL_ARTIFICIAL (bnd_var) = 1;
+ DECL_COMMON (bnd_var) = 1;
+ DECL_COMDAT (bnd_var) = 1;
+ DECL_READ_P (bnd_var) = 1;
+ DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
+ /* Force output similar to constant bounds.
+ See chkp_make_static_const_bounds. */
+ varpool_node::get_create (bnd_var)->force_output = 1;
+ /* Mark symbol as requiring bounds initialization. */
+ varpool_node::get_create (bnd_var)->need_bounds_init = 1;
+ varpool_node::finalize_decl (bnd_var);
+
+ /* Add created var to the map to use it for other references
+ to obj. */
+ if (!chkp_static_var_bounds)
+ chkp_static_var_bounds = new hash_map<tree, tree>;
+
+ chkp_static_var_bounds->put (obj, bnd_var);
+
+ return bnd_var;
+}
+
+/* When var has incomplete type we cannot get size to
+ compute its bounds. In such cases we use checker
+ builtin call which determines object size at runtime. */
+static tree
+chkp_generate_extern_var_bounds (tree var)
+{
+ tree bounds, size_reloc, lb, size, max_size, cond;
+ gimple_stmt_iterator gsi;
+ gimple_seq seq = NULL;
+ gimple stmt;
+
+ /* If instrumentation is not enabled for vars having
+ incomplete type then just return zero bounds to avoid
+ checks for this var. */
+ if (!flag_chkp_incomplete_type)
+ return chkp_get_zero_bounds ();
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Generating bounds for extern symbol '");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "'\n");
+ }
+
+ stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
+
+ size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
+ gimple_call_set_lhs (stmt, size_reloc);
+
+ gimple_seq_add_stmt (&seq, stmt);
+
+ lb = chkp_build_addr_expr (var);
+ size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
+
+ if (flag_chkp_zero_dynamic_size_as_infinite)
+ {
+ /* We should check that size relocation was resolved.
+ If it was not then use maximum possible size for the var. */
+ max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
+ fold_convert (chkp_uintptr_type, lb));
+ max_size = chkp_force_gimple_call_op (max_size, &seq);
+
+ cond = build2 (NE_EXPR, boolean_type_node, size_reloc, integer_zero_node);
+ stmt = gimple_build_assign_with_ops (COND_EXPR, size,
+ cond, size_reloc, max_size);
+ gimple_seq_add_stmt (&seq, stmt);
+ }
+ else
+ {
+ stmt = gimple_build_assign (size, size_reloc);
+ gimple_seq_add_stmt (&seq, stmt);
+ }
+
+ gsi = gsi_start_bb (chkp_get_entry_block ());
+ gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
+
+ bounds = chkp_make_bounds (lb, size, &gsi, true);
+
+ return bounds;
+}
+
/* Return 1 if TYPE has fields with zero size or fields
marked with chkp_variable_size attribute. */
bool
@@ -525,4 +1289,255 @@ chkp_variable_size_type (tree type)
return res;
}
+/* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
+ return the result. if ITER is not NULL then Code is inserted
+ before position pointed by ITER. Otherwise code is added to
+ entry block. */
+static tree
+chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
+{
+ if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
+ return bounds2 ? bounds2 : bounds1;
+ else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
+ return bounds1;
+ else
+ {
+ gimple_seq seq;
+ gimple stmt;
+ tree bounds;
+
+ seq = NULL;
+
+ stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
+ chkp_mark_stmt (stmt);
+
+ bounds = chkp_get_tmp_reg (stmt);
+ gimple_call_set_lhs (stmt, bounds);
+
+ gimple_seq_add_stmt (&seq, stmt);
+
+ /* We are probably doing narrowing for constant expression.
+ In such case iter may be undefined. */
+ if (!iter)
+ {
+ gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
+ iter = &gsi;
+ gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
+ }
+ else
+ gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Bounds intersection: ");
+ print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
+ fprintf (dump_file, " inserted before statement: ");
+ print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
+ TDF_VOPS|TDF_MEMSYMS);
+ }
+
+ return bounds;
+ }
+}
+
+/* Return 1 if we are allowed to narrow bounds for addressed FIELD
+ and 0 othersize. */
+static bool
+chkp_may_narrow_to_field (tree field)
+{
+ return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
+ && tree_to_uhwi (DECL_SIZE (field)) != 0
+ && (!DECL_FIELD_OFFSET (field)
+ || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
+ && (!DECL_FIELD_BIT_OFFSET (field)
+ || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
+ && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
+ && !chkp_variable_size_type (TREE_TYPE (field));
+}
+
+/* Return 1 if bounds for FIELD should be narrowed to
+ field's own size. */
+static bool
+chkp_narrow_bounds_for_field (tree field)
+{
+ HOST_WIDE_INT offs;
+ HOST_WIDE_INT bit_offs;
+
+ if (!chkp_may_narrow_to_field (field))
+ return false;
+
+ /* Accesse to compiler generated fields should not cause
+ bounds narrowing. */
+ if (DECL_ARTIFICIAL (field))
+ return false;
+
+ offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
+ bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
+
+ return (flag_chkp_narrow_bounds
+ && (flag_chkp_first_field_has_own_bounds
+ || offs
+ || bit_offs));
+}
+
+/* Perform narrowing for BOUNDS using bounds computed for field
+ access COMPONENT. ITER meaning is the same as for
+ chkp_intersect_bounds. */
+static tree
+chkp_narrow_bounds_to_field (tree bounds, tree component,
+ gimple_stmt_iterator *iter)
+{
+ tree field = TREE_OPERAND (component, 1);
+ tree size = DECL_SIZE_UNIT (field);
+ tree field_ptr = chkp_build_addr_expr (component);
+ tree field_bounds;
+
+ field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
+
+ return chkp_intersect_bounds (field_bounds, bounds, iter);
+}
+
+/* Helper function which checks type of RHS and finds all pointers in
+ it. For each found pointer we build it's accesses in LHS and RHS
+ objects and then call HANDLER for them. Function is used to copy
+ or initilize bounds for copied object. */
+static void
+chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
+ assign_handler handler)
+{
+ tree type = TREE_TYPE (lhs);
+
+ /* We have nothing to do with clobbers. */
+ if (TREE_CLOBBER_P (rhs))
+ return;
+
+ if (BOUNDED_TYPE_P (type))
+ handler (lhs, rhs, arg);
+ else if (RECORD_OR_UNION_TYPE_P (type))
+ {
+ tree field;
+
+ if (TREE_CODE (rhs) == CONSTRUCTOR)
+ {
+ unsigned HOST_WIDE_INT cnt;
+ tree val;
+
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
+ {
+ if (chkp_type_has_pointer (TREE_TYPE (field)))
+ {
+ tree lhs_field = chkp_build_component_ref (lhs, field);
+ chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
+ }
+ }
+ }
+ else
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL
+ && chkp_type_has_pointer (TREE_TYPE (field)))
+ {
+ tree rhs_field = chkp_build_component_ref (rhs, field);
+ tree lhs_field = chkp_build_component_ref (lhs, field);
+ chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
+ }
+ }
+ else if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ unsigned HOST_WIDE_INT cur = 0;
+ tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
+ tree etype = TREE_TYPE (type);
+ tree esize = TYPE_SIZE (etype);
+
+ if (TREE_CODE (rhs) == CONSTRUCTOR)
+ {
+ unsigned HOST_WIDE_INT cnt;
+ tree purp, val, lhs_elem;
+
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
+ {
+ if (purp && TREE_CODE (purp) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (purp, 0);
+ tree hi_index = TREE_OPERAND (purp, 1);
+
+ for (cur = (unsigned)tree_to_uhwi (lo_index);
+ cur <= (unsigned)tree_to_uhwi (hi_index);
+ cur++)
+ {
+ lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
+ chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
+ }
+ }
+ else
+ {
+ if (purp)
+ {
+ gcc_assert (TREE_CODE (purp) == INTEGER_CST);
+ cur = tree_to_uhwi (purp);
+ }
+
+ lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
+
+ chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
+ }
+ }
+ }
+ /* Copy array only when size is known. */
+ else if (maxval && !integer_minus_onep (maxval))
+ for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
+ {
+ tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
+ tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
+ chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
+ }
+ }
+ else
+ internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
+ get_tree_code_name (TREE_CODE (type)));
+}
+
+/* Initialize pass. */
+static void
+chkp_init (void)
+{
+ basic_block bb;
+ gimple_stmt_iterator i;
+
+ in_chkp_pass = true;
+
+ for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
+ for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
+ chkp_unmark_stmt (gsi_stmt (i));
+
+ chkp_invalid_bounds = new hash_set<tree>;
+ chkp_completed_bounds_set = new hash_set<tree>;
+ delete chkp_reg_bounds;
+ chkp_reg_bounds = new hash_map<tree, tree>;
+ delete chkp_bound_vars;
+ chkp_bound_vars = new hash_map<tree, tree>;
+ chkp_reg_addr_bounds = new hash_map<tree, tree>;
+ chkp_incomplete_bounds_map = new hash_map<tree, tree>;
+ delete chkp_bounds_map;
+ chkp_bounds_map = new hash_map<tree, tree>;
+ chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
+
+ entry_block = NULL;
+ zero_bounds = NULL_TREE;
+ none_bounds = NULL_TREE;
+ incomplete_bounds = integer_zero_node;
+ tmp_var = NULL_TREE;
+ size_tmp_var = NULL_TREE;
+
+ chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
+
+ /* We create these constant bounds once for each object file.
+ These symbols go to comdat section and result in single copy
+ of each one in the final binary. */
+ chkp_get_zero_bounds_var ();
+ chkp_get_none_bounds_var ();
+
+ calculate_dominance_info (CDI_DOMINATORS);
+ calculate_dominance_info (CDI_POST_DOMINATORS);
+}
+
#include "gt-tree-chkp.h"
This patch introduces helper functions used by instrumentation. These helpers are used to access local structures, generate typical expressions, vars and statements. I also included local data initializer into this patch.
Thanks,
Ilya
--
2014-10-08 Ilya Enkovich <***@intel.com>
* tree-chkp.c (assign_handler): New.
(chkp_get_zero_bounds): New.
(chkp_uintptr_type): New.
(chkp_none_bounds_var): New.
(entry_block): New.
(zero_bounds): New.
(none_bounds): New.
(incomplete_bounds): New.
(tmp_var): New.
(size_tmp_var): New.
(chkp_abnormal_copies): New.
(chkp_invalid_bounds): New.
(chkp_completed_bounds_set): New.
(chkp_reg_bounds): New.
(chkp_bound_vars): New.
(chkp_reg_addr_bounds): New.
(chkp_incomplete_bounds_map): New.
(chkp_static_var_bounds): New.
(in_chkp_pass): New.
(CHKP_BOUND_TMP_NAME): New.
(CHKP_SIZE_TMP_NAME): New.
(CHKP_BOUNDS_OF_SYMBOL_PREFIX): New.
(CHKP_STRING_BOUNDS_PREFIX): New.
(CHKP_VAR_BOUNDS_PREFIX): New.
(CHKP_NONE_BOUNDS_VAR_NAME): New.
(chkp_get_tmp_var): New.
(chkp_get_tmp_reg): New.
(chkp_get_size_tmp_var): New.
(chkp_register_addr_bounds): New.
(chkp_get_registered_addr_bounds): New.
(chkp_mark_completed_bounds): New.
(chkp_completed_bounds): New.
(chkp_erase_completed_bounds): New.
(chkp_register_incomplete_bounds): New.
(chkp_incomplete_bounds): New.
(chkp_erase_incomplete_bounds): New.
(chkp_mark_invalid_bounds): New.
(chkp_valid_bounds): New.
(chkp_mark_invalid_bounds_walker): New.
(chkp_build_addr_expr): New.
(chkp_get_entry_block): New.
(chkp_get_bounds_var): New.
(chkp_get_registered_bounds): New.
(chkp_check_lower): New.
(chkp_check_upper): New.
(chkp_check_mem_access): New.
(chkp_build_component_ref): New.
(chkp_build_array_ref): New.
(chkp_can_be_shared): New.
(chkp_make_bounds): New.
(chkp_get_none_bounds_var): New.
(chkp_get_zero_bounds): New.
(chkp_get_none_bounds): New.
(chkp_get_invalid_op_bounds): New.
(chkp_get_nonpointer_load_bounds): New.
(chkp_get_next_bounds_parm): New.
(chkp_build_bndldx): New.
(chkp_make_static_bounds): New.
(chkp_generate_extern_var_bounds): New.
(chkp_intersect_bounds): New.
(chkp_may_narrow_to_field): New.
(chkp_narrow_bounds_for_field): New.
(chkp_narrow_bounds_to_field): New.
(chkp_walk_pointer_assignments): New.
(chkp_init): New.
diff --git a/gcc/tree-chkp.c b/gcc/tree-chkp.c
index 4ab8de6..c65334c 100644
--- a/gcc/tree-chkp.c
+++ b/gcc/tree-chkp.c
@@ -65,6 +65,10 @@ along with GCC; see the file COPYING3. If not see
#include "rtl.h" /* For MEM_P, assign_temp. */
#include "tree-dfa.h"
+typedef void (*assign_handler)(tree, tree, void *);
+
+static tree chkp_get_zero_bounds ();
+
#define chkp_bndldx_fndecl \
(targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
#define chkp_bndstx_fndecl \
@@ -88,11 +92,37 @@ along with GCC; see the file COPYING3. If not see
#define chkp_extract_upper_fndecl \
(targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
-static GTY (()) tree chkp_zero_bounds_var;
+static GTY (()) tree chkp_uintptr_type;
+static GTY (()) tree chkp_zero_bounds_var;
+static GTY (()) tree chkp_none_bounds_var;
+
+static GTY (()) basic_block entry_block;
+static GTY (()) tree zero_bounds;
+static GTY (()) tree none_bounds;
+static GTY (()) tree incomplete_bounds;
+static GTY (()) tree tmp_var;
+static GTY (()) tree size_tmp_var;
+static GTY (()) bitmap chkp_abnormal_copies;
+
+struct hash_set<tree> *chkp_invalid_bounds;
+struct hash_set<tree> *chkp_completed_bounds_set;
+struct hash_map<tree, tree> *chkp_reg_bounds;
+struct hash_map<tree, tree> *chkp_bound_vars;
+struct hash_map<tree, tree> *chkp_reg_addr_bounds;
+struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
struct hash_map<tree, tree> *chkp_bounds_map;
+struct hash_map<tree, tree> *chkp_static_var_bounds;
+
+static bool in_chkp_pass;
+#define CHKP_BOUND_TMP_NAME "__bound_tmp"
+#define CHKP_SIZE_TMP_NAME "__size_tmp"
+#define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
+#define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
+#define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
#define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
+#define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
/* Return 1 if function FNDECL is instrumented by Pointer
Bounds Checker. */
@@ -172,6 +202,130 @@ chkp_marked_stmt_p (gimple s)
return gimple_plf (s, GF_PLF_1);
}
+/* Get var to be used for bound temps. */
+static tree
+chkp_get_tmp_var (void)
+{
+ if (!tmp_var)
+ tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
+
+ return tmp_var;
+}
+
+/* Get SSA_NAME to be used as temp. */
+static tree
+chkp_get_tmp_reg (gimple stmt)
+{
+ if (in_chkp_pass)
+ return make_ssa_name (chkp_get_tmp_var (), stmt);
+
+ return make_temp_ssa_name (pointer_bounds_type_node, stmt,
+ CHKP_BOUND_TMP_NAME);
+}
+
+/* Get var to be used for size temps. */
+static tree
+chkp_get_size_tmp_var (void)
+{
+ if (!size_tmp_var)
+ size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
+
+ return size_tmp_var;
+}
+
+/* Register bounds BND for address of OBJ. */
+static void
+chkp_register_addr_bounds (tree obj, tree bnd)
+{
+ if (bnd == incomplete_bounds)
+ return;
+
+ chkp_reg_addr_bounds->put (obj, bnd);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Regsitered bound ");
+ print_generic_expr (dump_file, bnd, 0);
+ fprintf (dump_file, " for address of ");
+ print_generic_expr (dump_file, obj, 0);
+ fprintf (dump_file, "\n");
+ }
+}
+
+/* Return bounds registered for address of OBJ. */
+static tree
+chkp_get_registered_addr_bounds (tree obj)
+{
+ tree *slot = chkp_reg_addr_bounds->get (obj);
+ return slot ? *slot : NULL_TREE;
+}
+
+/* Mark BOUNDS as completed. */
+static void
+chkp_mark_completed_bounds (tree bounds)
+{
+ chkp_completed_bounds_set->add (bounds);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Marked bounds ");
+ print_generic_expr (dump_file, bounds, 0);
+ fprintf (dump_file, " as completed\n");
+ }
+}
+
+/* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
+static bool
+chkp_completed_bounds (tree bounds)
+{
+ return chkp_completed_bounds_set->contains (bounds);
+}
+
+/* Clear comleted bound marks. */
+static void
+chkp_erase_completed_bounds (void)
+{
+ delete chkp_completed_bounds_set;
+ chkp_completed_bounds_set = new hash_set<tree>;
+}
+
+/* Mark BOUNDS associated with PTR as incomplete. */
+static void
+chkp_register_incomplete_bounds (tree bounds, tree ptr)
+{
+ chkp_incomplete_bounds_map->put (bounds, ptr);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Regsitered incomplete bounds ");
+ print_generic_expr (dump_file, bounds, 0);
+ fprintf (dump_file, " for ");
+ print_generic_expr (dump_file, ptr, 0);
+ fprintf (dump_file, "\n");
+ }
+}
+
+/* Return 1 if BOUNDS are incomplete and 0 otherwise. */
+static bool
+chkp_incomplete_bounds (tree bounds)
+{
+ if (bounds == incomplete_bounds)
+ return true;
+
+ if (chkp_completed_bounds (bounds))
+ return false;
+
+ return chkp_incomplete_bounds_map->get (bounds) != NULL;
+}
+
+/* Clear incomleted bound marks. */
+static void
+chkp_erase_incomplete_bounds (void)
+{
+ delete chkp_incomplete_bounds_map;
+ chkp_incomplete_bounds_map = new hash_map<tree, tree>;
+}
+
/* Build and return bndmk call which creates bounds for structure
pointed by PTR. Structure should have complete type. */
tree
@@ -191,6 +345,45 @@ chkp_make_bounds_for_struct_addr (tree ptr)
2, ptr, size);
}
+/* Mark BOUNDS as invalid. */
+static void
+chkp_mark_invalid_bounds (tree bounds)
+{
+ chkp_invalid_bounds->add (bounds);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Marked bounds ");
+ print_generic_expr (dump_file, bounds, 0);
+ fprintf (dump_file, " as invalid\n");
+ }
+}
+
+/* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
+static bool
+chkp_valid_bounds (tree bounds)
+{
+ if (bounds == zero_bounds || bounds == none_bounds)
+ return false;
+
+ return !chkp_invalid_bounds->contains (bounds);
+}
+
+/* Helper function for chkp_finish_incomplete_bounds.
+ Marks all incompleted bounds as invalid. */
+bool
+chkp_mark_invalid_bounds_walker (tree const &bounds,
+ tree *slot ATTRIBUTE_UNUSED,
+ void *res ATTRIBUTE_UNUSED)
+{
+ if (!chkp_completed_bounds (bounds))
+ {
+ chkp_mark_invalid_bounds (bounds);
+ chkp_mark_completed_bounds (bounds);
+ }
+ return true;
+}
+
/* Return 1 if type TYPE is a pointer type or a
structure having a pointer type as one of its fields.
Otherwise return 0. */
@@ -258,6 +451,59 @@ chkp_set_bounds (tree node, tree val)
chkp_bounds_map->put (node, val);
}
+/* Build and return ADDR_EXPR for specified object OBJ. */
+static tree
+chkp_build_addr_expr (tree obj)
+{
+ return TREE_CODE (obj) == TARGET_MEM_REF
+ ? tree_mem_ref_addr (ptr_type_node, obj)
+ : build_fold_addr_expr (obj);
+}
+
+/* Return entry block to be used for checker initilization code.
+ Create new block if required. */
+static basic_block
+chkp_get_entry_block (void)
+{
+ if (!entry_block)
+ entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
+
+ return entry_block;
+}
+
+/* Return a bounds var to be used for pointer var PTR_VAR. */
+static tree
+chkp_get_bounds_var (tree ptr_var)
+{
+ tree bnd_var;
+ tree *slot;
+
+ slot = chkp_bound_vars->get (ptr_var);
+ if (slot)
+ bnd_var = *slot;
+ else
+ {
+ bnd_var = create_tmp_reg (pointer_bounds_type_node,
+ CHKP_BOUND_TMP_NAME);
+ chkp_bound_vars->put (ptr_var, bnd_var);
+ }
+
+ return bnd_var;
+}
+
+/* Get bounds registered for object PTR in global bounds table. */
+static tree
+chkp_get_registered_bounds (tree ptr)
+{
+ tree *slot;
+
+ if (!chkp_reg_bounds)
+ return NULL_TREE;
+
+ slot = chkp_reg_bounds->get (ptr);
+ return slot ? *slot : NULL_TREE;
+}
+
/* Force OP to be suitable for using as an argument for call.
New statements (if any) go to SEQ. */
static tree
@@ -276,6 +522,188 @@ chkp_force_gimple_call_op (tree op, gimple_seq *seq)
return op;
}
+/* Generate lower bound check for memory access by ADDR.
+ Check is inserted before the position pointed by ITER.
+ DIRFLAG indicates whether memory access is load or store. */
+static void
+chkp_check_lower (tree addr, tree bounds,
+ gimple_stmt_iterator iter,
+ location_t location ATTRIBUTE_UNUSED,
+ tree dirflag)
+{
+ gimple_seq seq;
+ gimple check;
+ tree node;
+
+ if (bounds == chkp_get_zero_bounds ())
+ return;
+
+ if (dirflag == integer_zero_node
+ && !flag_chkp_check_read)
+ return;
+
+ if (dirflag == integer_one_node
+ && !flag_chkp_check_write)
+ return;
+
+ seq = NULL;
+
+ node = chkp_force_gimple_call_op (addr, &seq);
+
+ check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
+ chkp_mark_stmt (check);
+ gimple_call_set_with_bounds (check, true);
+ gimple_seq_add_stmt (&seq, check);
+
+ gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ gimple before = gsi_stmt (iter);
+ fprintf (dump_file, "Generated lower bound check for statement ");
+ print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
+ fprintf (dump_file, " ");
+ print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
+ }
+}
+
+/* Generate upper bound check for memory access by ADDR.
+ Check is inserted before the position pointed by ITER.
+ DIRFLAG indicates whether memory access is load or store. */
+static void
+chkp_check_upper (tree addr, tree bounds,
+ gimple_stmt_iterator iter,
+ location_t location ATTRIBUTE_UNUSED,
+ tree dirflag)
+{
+ gimple_seq seq;
+ gimple check;
+ tree node;
+
+ if (bounds == chkp_get_zero_bounds ())
+ return;
+
+ if (dirflag == integer_zero_node
+ && !flag_chkp_check_read)
+ return;
+
+ if (dirflag == integer_one_node
+ && !flag_chkp_check_write)
+ return;
+
+ seq = NULL;
+
+ node = chkp_force_gimple_call_op (addr, &seq);
+
+ check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
+ chkp_mark_stmt (check);
+ gimple_call_set_with_bounds (check, true);
+ gimple_seq_add_stmt (&seq, check);
+
+ gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ gimple before = gsi_stmt (iter);
+ fprintf (dump_file, "Generated upper bound check for statement ");
+ print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
+ fprintf (dump_file, " ");
+ print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
+ }
+}
+
+/* Generate lower and upper bound checks for memory access
+ to memory slot [FIRST, LAST] againsr BOUNDS. Checks
+ are inserted before the position pointed by ITER.
+ DIRFLAG indicates whether memory access is load or store. */
+static void
+chkp_check_mem_access (tree first, tree last, tree bounds,
+ gimple_stmt_iterator iter,
+ location_t location,
+ tree dirflag)
+{
+ chkp_check_lower (first, bounds, iter, location, dirflag);
+ chkp_check_upper (last, bounds, iter, location, dirflag);
+}
+
+/* Return COMPONENT_REF accessing FIELD in OBJ. */
+static tree
+chkp_build_component_ref (tree obj, tree field)
+{
+ tree res;
+
+ /* If object is TMR then we do not use component_ref but
+ add offset instead. We need it to be able to get addr
+ of the reasult later. */
+ if (TREE_CODE (obj) == TARGET_MEM_REF)
+ {
+ tree offs = TMR_OFFSET (obj);
+ offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
+ offs, DECL_FIELD_OFFSET (field));
+
+ gcc_assert (offs);
+
+ res = copy_node (obj);
+ TREE_TYPE (res) = TREE_TYPE (field);
+ TMR_OFFSET (res) = offs;
+ }
+ else
+ res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
+
+ return res;
+}
+
+/* Return ARRAY_REF for array ARR and index IDX with
+ specified element type ETYPE and element size ESIZE. */
+static tree
+chkp_build_array_ref (tree arr, tree etype, tree esize,
+ unsigned HOST_WIDE_INT idx)
+{
+ tree index = build_int_cst (size_type_node, idx);
+ tree res;
+
+ /* If object is TMR then we do not use array_ref but
+ add offset instead. We need it to be able to get addr
+ of the reasult later. */
+ if (TREE_CODE (arr) == TARGET_MEM_REF)
+ {
+ tree offs = TMR_OFFSET (arr);
+
+ esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
+ esize, index);
+ gcc_assert(esize);
+
+ offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
+ offs, esize);
+ gcc_assert (offs);
+
+ res = copy_node (arr);
+ TREE_TYPE (res) = etype;
+ TMR_OFFSET (res) = offs;
+ }
+ else
+ res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
+
+ return res;
+}
+
+/* Return true when T can be shared. */
+
+static bool
+chkp_can_be_shared (tree t)
+{
+ if (IS_TYPE_OR_DECL_P (t)
+ || is_gimple_min_invariant (t)
+ || TREE_CODE (t) == SSA_NAME
+ || t == error_mark_node
+ || TREE_CODE (t) == IDENTIFIER_NODE
+ || TREE_CODE (t) == CASE_LABEL_EXPR
+ || DECL_P (t))
+ return true;
+
+ return false;
+}
+
/* Fill HAVE_BOUND output bitmap with information about
bounds requred for object of type TYPE.
@@ -400,6 +828,59 @@ chkp_make_static_const_bounds (HOST_WIDE_INT lb,
return var;
}
+/* Generate code to make bounds with specified lower bound LB and SIZE.
+ if AFTER is 1 then code is inserted after position pointed by ITER
+ otherwise code is inserted before position pointed by ITER.
+ If ITER is NULL then code is added to entry block. */
+static tree
+chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
+{
+ gimple_seq seq;
+ gimple_stmt_iterator gsi;
+ gimple stmt;
+ tree bounds;
+
+ if (iter)
+ gsi = *iter;
+ else
+ gsi = gsi_start_bb (chkp_get_entry_block ());
+
+ seq = NULL;
+
+ lb = chkp_force_gimple_call_op (lb, &seq);
+ size = chkp_force_gimple_call_op (size, &seq);
+
+ stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
+ chkp_mark_stmt (stmt);
+
+ bounds = chkp_get_tmp_reg (stmt);
+ gimple_call_set_lhs (stmt, bounds);
+
+ gimple_seq_add_stmt (&seq, stmt);
+
+ if (iter && after)
+ gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
+ else
+ gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Made bounds: ");
+ print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
+ if (iter)
+ {
+ fprintf (dump_file, " inserted before statement: ");
+ print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
+ }
+ else
+ fprintf (dump_file, " at function entry\n");
+ }
+
+ /* update_stmt (stmt); */
+
+ return bounds;
+}
+
/* Return var holding zero bounds. */
tree
chkp_get_zero_bounds_var (void)
@@ -411,6 +892,91 @@ chkp_get_zero_bounds_var (void)
return chkp_zero_bounds_var;
}
+/* Return var holding none bounds. */
+static tree
+chkp_get_none_bounds_var (void)
+{
+ if (!chkp_none_bounds_var)
+ chkp_none_bounds_var
+ = chkp_make_static_const_bounds (-1, 0,
+ CHKP_NONE_BOUNDS_VAR_NAME);
+ return chkp_none_bounds_var;
+}
+
+/* Return SSA_NAME used to represent zero bounds. */
+static tree
+chkp_get_zero_bounds (void)
+{
+ if (zero_bounds)
+ return zero_bounds;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Creating zero bounds...");
+
+ if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
+ || flag_chkp_use_static_const_bounds > 0)
+ {
+ gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
+ gimple stmt;
+
+ zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
+ stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
+ gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ }
+ else
+ zero_bounds = chkp_make_bounds (integer_zero_node,
+ integer_zero_node,
+ NULL,
+ false);
+
+ return zero_bounds;
+}
+
+/* Return SSA_NAME used to represent none bounds. */
+static tree
+chkp_get_none_bounds (void)
+{
+ if (none_bounds)
+ return none_bounds;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Creating none bounds...");
+
+
+ if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
+ || flag_chkp_use_static_const_bounds > 0)
+ {
+ gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
+ gimple stmt;
+
+ none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
+ stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
+ gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ }
+ else
+ none_bounds = chkp_make_bounds (integer_minus_one_node,
+ build_int_cst (size_type_node, 2),
+ NULL,
+ false);
+
+ return none_bounds;
+}
+
+/* Return bounds to be used as a result of operation which
+ should not create poiunter (e.g. MULT_EXPR). */
+static tree
+chkp_get_invalid_op_bounds (void)
+{
+ return chkp_get_zero_bounds ();
+}
+
+/* Return bounds to be used for loads of non-pointer values. */
+static tree
+chkp_get_nonpointer_load_bounds (void)
+{
+ return chkp_get_zero_bounds ();
+}
+
/* Return bounds used as returned by call
which produced SSA name VAL. */
gimple
@@ -431,6 +997,22 @@ chkp_retbnd_call_by_val (tree val)
return NULL;
}
+/* Check the next parameter for the given PARM is bounds
+ and return it's default SSA_NAME (create if required). */
+static tree
+chkp_get_next_bounds_parm (tree parm)
+{
+ tree bounds = TREE_CHAIN (parm);
+ gcc_assert (POINTER_BOUNDS_P (bounds));
+ bounds = ssa_default_def (cfun, bounds);
+ if (!bounds)
+ {
+ bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
+ set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
+ }
+ return bounds;
+}
+
/* Build and return CALL_EXPR for bndstx builtin with specified
arguments. */
tree
@@ -445,6 +1027,41 @@ chkp_build_bndldx_call (tree addr, tree ptr)
return call;
}
+/* Insert code to load bounds for PTR located by ADDR.
+ Code is inserted after position pointed by GSI.
+ Loaded bounds are returned. */
+static tree
+chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
+{
+ gimple_seq seq;
+ gimple stmt;
+ tree bounds;
+
+ seq = NULL;
+
+ addr = chkp_force_gimple_call_op (addr, &seq);
+ ptr = chkp_force_gimple_call_op (ptr, &seq);
+
+ stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
+ chkp_mark_stmt (stmt);
+ bounds = chkp_get_tmp_reg (stmt);
+ gimple_call_set_lhs (stmt, bounds);
+
+ gimple_seq_add_stmt (&seq, stmt);
+
+ gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Generated bndldx for pointer ");
+ print_generic_expr (dump_file, ptr, 0);
+ fprintf (dump_file, ": ");
+ print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
+ }
+
+ return bounds;
+}
+
/* Build and return CALL_EXPR for bndstx builtin with specified
arguments. */
tree
@@ -501,6 +1118,153 @@ chkp_build_make_bounds_call (tree lower_bound, tree size)
call, 2, lower_bound, size);
}
+/* Create static bounds var of specfified OBJ which is
+ is either VAR_DECL or string constant. */
+static tree
+chkp_make_static_bounds (tree obj)
+{
+ static int string_id = 1;
+ static int var_id = 1;
+ tree *slot;
+ const char *var_name;
+ char *bnd_var_name;
+ tree bnd_var;
+
+ /* First check if we already have required var. */
+ if (chkp_static_var_bounds)
+ {
+ slot = chkp_static_var_bounds->get (obj);
+ if (slot)
+ return *slot;
+ }
+
+ /* Build decl for bounds var. */
+ if (TREE_CODE (obj) == VAR_DECL)
+ {
+ if (DECL_IGNORED_P (obj))
+ {
+ bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
+ sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
+ }
+ else
+ {
+ var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
+
+ /* For hidden symbols we want to skip first '*' char. */
+ if (*var_name == '*')
+ var_name++;
+
+ bnd_var_name = (char *) xmalloc (strlen (var_name)
+ + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
+ strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
+ strcat (bnd_var_name, var_name);
+ }
+
+ bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier (bnd_var_name),
+ pointer_bounds_type_node);
+
+ /* Address of the obj will be used as lower bound. */
+ TREE_ADDRESSABLE (obj) = 1;
+ }
+ else
+ {
+ bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
+ sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
+
+ bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier (bnd_var_name),
+ pointer_bounds_type_node);
+ }
+
+ TREE_PUBLIC (bnd_var) = 0;
+ TREE_USED (bnd_var) = 1;
+ TREE_READONLY (bnd_var) = 0;
+ TREE_STATIC (bnd_var) = 1;
+ TREE_ADDRESSABLE (bnd_var) = 0;
+ DECL_ARTIFICIAL (bnd_var) = 1;
+ DECL_COMMON (bnd_var) = 1;
+ DECL_COMDAT (bnd_var) = 1;
+ DECL_READ_P (bnd_var) = 1;
+ DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
+ /* Force output similar to constant bounds.
+ See chkp_make_static_const_bounds. */
+ varpool_node::get_create (bnd_var)->force_output = 1;
+ /* Mark symbol as requiring bounds initialization. */
+ varpool_node::get_create (bnd_var)->need_bounds_init = 1;
+ varpool_node::finalize_decl (bnd_var);
+
+ /* Add created var to the map to use it for other references
+ to obj. */
+ if (!chkp_static_var_bounds)
+ chkp_static_var_bounds = new hash_map<tree, tree>;
+
+ chkp_static_var_bounds->put (obj, bnd_var);
+
+ return bnd_var;
+}
+
+/* When var has incomplete type we cannot get size to
+ compute its bounds. In such cases we use checker
+ builtin call which determines object size at runtime. */
+static tree
+chkp_generate_extern_var_bounds (tree var)
+{
+ tree bounds, size_reloc, lb, size, max_size, cond;
+ gimple_stmt_iterator gsi;
+ gimple_seq seq = NULL;
+ gimple stmt;
+
+ /* If instrumentation is not enabled for vars having
+ incomplete type then just return zero bounds to avoid
+ checks for this var. */
+ if (!flag_chkp_incomplete_type)
+ return chkp_get_zero_bounds ();
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Generating bounds for extern symbol '");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "'\n");
+ }
+
+ stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
+
+ size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
+ gimple_call_set_lhs (stmt, size_reloc);
+
+ gimple_seq_add_stmt (&seq, stmt);
+
+ lb = chkp_build_addr_expr (var);
+ size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
+
+ if (flag_chkp_zero_dynamic_size_as_infinite)
+ {
+ /* We should check that size relocation was resolved.
+ If it was not then use maximum possible size for the var. */
+ max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
+ fold_convert (chkp_uintptr_type, lb));
+ max_size = chkp_force_gimple_call_op (max_size, &seq);
+
+ cond = build2 (NE_EXPR, boolean_type_node, size_reloc, integer_zero_node);
+ stmt = gimple_build_assign_with_ops (COND_EXPR, size,
+ cond, size_reloc, max_size);
+ gimple_seq_add_stmt (&seq, stmt);
+ }
+ else
+ {
+ stmt = gimple_build_assign (size, size_reloc);
+ gimple_seq_add_stmt (&seq, stmt);
+ }
+
+ gsi = gsi_start_bb (chkp_get_entry_block ());
+ gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
+
+ bounds = chkp_make_bounds (lb, size, &gsi, true);
+
+ return bounds;
+}
+
/* Return 1 if TYPE has fields with zero size or fields
marked with chkp_variable_size attribute. */
bool
@@ -525,4 +1289,255 @@ chkp_variable_size_type (tree type)
return res;
}
+/* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
+ return the result. if ITER is not NULL then Code is inserted
+ before position pointed by ITER. Otherwise code is added to
+ entry block. */
+static tree
+chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
+{
+ if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
+ return bounds2 ? bounds2 : bounds1;
+ else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
+ return bounds1;
+ else
+ {
+ gimple_seq seq;
+ gimple stmt;
+ tree bounds;
+
+ seq = NULL;
+
+ stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
+ chkp_mark_stmt (stmt);
+
+ bounds = chkp_get_tmp_reg (stmt);
+ gimple_call_set_lhs (stmt, bounds);
+
+ gimple_seq_add_stmt (&seq, stmt);
+
+ /* We are probably doing narrowing for constant expression.
+ In such case iter may be undefined. */
+ if (!iter)
+ {
+ gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
+ iter = &gsi;
+ gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
+ }
+ else
+ gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Bounds intersection: ");
+ print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
+ fprintf (dump_file, " inserted before statement: ");
+ print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
+ TDF_VOPS|TDF_MEMSYMS);
+ }
+
+ return bounds;
+ }
+}
+
+/* Return 1 if we are allowed to narrow bounds for addressed FIELD
+ and 0 othersize. */
+static bool
+chkp_may_narrow_to_field (tree field)
+{
+ return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
+ && tree_to_uhwi (DECL_SIZE (field)) != 0
+ && (!DECL_FIELD_OFFSET (field)
+ || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
+ && (!DECL_FIELD_BIT_OFFSET (field)
+ || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
+ && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
+ && !chkp_variable_size_type (TREE_TYPE (field));
+}
+
+/* Return 1 if bounds for FIELD should be narrowed to
+ field's own size. */
+static bool
+chkp_narrow_bounds_for_field (tree field)
+{
+ HOST_WIDE_INT offs;
+ HOST_WIDE_INT bit_offs;
+
+ if (!chkp_may_narrow_to_field (field))
+ return false;
+
+ /* Accesse to compiler generated fields should not cause
+ bounds narrowing. */
+ if (DECL_ARTIFICIAL (field))
+ return false;
+
+ offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
+ bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
+
+ return (flag_chkp_narrow_bounds
+ && (flag_chkp_first_field_has_own_bounds
+ || offs
+ || bit_offs));
+}
+
+/* Perform narrowing for BOUNDS using bounds computed for field
+ access COMPONENT. ITER meaning is the same as for
+ chkp_intersect_bounds. */
+static tree
+chkp_narrow_bounds_to_field (tree bounds, tree component,
+ gimple_stmt_iterator *iter)
+{
+ tree field = TREE_OPERAND (component, 1);
+ tree size = DECL_SIZE_UNIT (field);
+ tree field_ptr = chkp_build_addr_expr (component);
+ tree field_bounds;
+
+ field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
+
+ return chkp_intersect_bounds (field_bounds, bounds, iter);
+}
+
+/* Helper function which checks type of RHS and finds all pointers in
+ it. For each found pointer we build it's accesses in LHS and RHS
+ objects and then call HANDLER for them. Function is used to copy
+ or initilize bounds for copied object. */
+static void
+chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
+ assign_handler handler)
+{
+ tree type = TREE_TYPE (lhs);
+
+ /* We have nothing to do with clobbers. */
+ if (TREE_CLOBBER_P (rhs))
+ return;
+
+ if (BOUNDED_TYPE_P (type))
+ handler (lhs, rhs, arg);
+ else if (RECORD_OR_UNION_TYPE_P (type))
+ {
+ tree field;
+
+ if (TREE_CODE (rhs) == CONSTRUCTOR)
+ {
+ unsigned HOST_WIDE_INT cnt;
+ tree val;
+
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
+ {
+ if (chkp_type_has_pointer (TREE_TYPE (field)))
+ {
+ tree lhs_field = chkp_build_component_ref (lhs, field);
+ chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
+ }
+ }
+ }
+ else
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL
+ && chkp_type_has_pointer (TREE_TYPE (field)))
+ {
+ tree rhs_field = chkp_build_component_ref (rhs, field);
+ tree lhs_field = chkp_build_component_ref (lhs, field);
+ chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
+ }
+ }
+ else if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ unsigned HOST_WIDE_INT cur = 0;
+ tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
+ tree etype = TREE_TYPE (type);
+ tree esize = TYPE_SIZE (etype);
+
+ if (TREE_CODE (rhs) == CONSTRUCTOR)
+ {
+ unsigned HOST_WIDE_INT cnt;
+ tree purp, val, lhs_elem;
+
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
+ {
+ if (purp && TREE_CODE (purp) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (purp, 0);
+ tree hi_index = TREE_OPERAND (purp, 1);
+
+ for (cur = (unsigned)tree_to_uhwi (lo_index);
+ cur <= (unsigned)tree_to_uhwi (hi_index);
+ cur++)
+ {
+ lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
+ chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
+ }
+ }
+ else
+ {
+ if (purp)
+ {
+ gcc_assert (TREE_CODE (purp) == INTEGER_CST);
+ cur = tree_to_uhwi (purp);
+ }
+
+ lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
+
+ chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
+ }
+ }
+ }
+ /* Copy array only when size is known. */
+ else if (maxval && !integer_minus_onep (maxval))
+ for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
+ {
+ tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
+ tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
+ chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
+ }
+ }
+ else
+ internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
+ get_tree_code_name (TREE_CODE (type)));
+}
+
+/* Initialize pass. */
+static void
+chkp_init (void)
+{
+ basic_block bb;
+ gimple_stmt_iterator i;
+
+ in_chkp_pass = true;
+
+ for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
+ for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
+ chkp_unmark_stmt (gsi_stmt (i));
+
+ chkp_invalid_bounds = new hash_set<tree>;
+ chkp_completed_bounds_set = new hash_set<tree>;
+ delete chkp_reg_bounds;
+ chkp_reg_bounds = new hash_map<tree, tree>;
+ delete chkp_bound_vars;
+ chkp_bound_vars = new hash_map<tree, tree>;
+ chkp_reg_addr_bounds = new hash_map<tree, tree>;
+ chkp_incomplete_bounds_map = new hash_map<tree, tree>;
+ delete chkp_bounds_map;
+ chkp_bounds_map = new hash_map<tree, tree>;
+ chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
+
+ entry_block = NULL;
+ zero_bounds = NULL_TREE;
+ none_bounds = NULL_TREE;
+ incomplete_bounds = integer_zero_node;
+ tmp_var = NULL_TREE;
+ size_tmp_var = NULL_TREE;
+
+ chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
+
+ /* We create these constant bounds once for each object file.
+ These symbols go to comdat section and result in single copy
+ of each one in the final binary. */
+ chkp_get_zero_bounds_var ();
+ chkp_get_none_bounds_var ();
+
+ calculate_dominance_info (CDI_DOMINATORS);
+ calculate_dominance_info (CDI_POST_DOMINATORS);
+}
+
#include "gt-tree-chkp.h"