1999-08-26 09:30:50 +00:00
|
|
|
|
/* Implements exception handling.
|
2002-02-01 18:16:02 +00:00
|
|
|
|
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
|
|
|
|
1999, 2000, 2001, 2002 Free Software Foundation, Inc.
|
1999-08-26 09:30:50 +00:00
|
|
|
|
Contributed by Mike Stump <mrs@cygnus.com>.
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
This file is part of GCC.
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
GCC is free software; you can redistribute it and/or modify it under
|
|
|
|
|
the terms of the GNU General Public License as published by the Free
|
|
|
|
|
Software Foundation; either version 2, or (at your option) any later
|
|
|
|
|
version.
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
|
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
|
|
|
for more details.
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
2002-02-01 18:16:02 +00:00
|
|
|
|
along with GCC; see the file COPYING. If not, write to the Free
|
|
|
|
|
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
|
|
|
|
02111-1307, USA. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/* An exception is an event that can be signaled from within a
|
|
|
|
|
function. This event can then be "caught" or "trapped" by the
|
|
|
|
|
callers of this function. This potentially allows program flow to
|
|
|
|
|
be transferred to any arbitrary code associated with a function call
|
|
|
|
|
several levels up the stack.
|
|
|
|
|
|
|
|
|
|
The intended use for this mechanism is for signaling "exceptional
|
|
|
|
|
events" in an out-of-band fashion, hence its name. The C++ language
|
|
|
|
|
(and many other OO-styled or functional languages) practically
|
|
|
|
|
requires such a mechanism, as otherwise it becomes very difficult
|
|
|
|
|
or even impossible to signal failure conditions in complex
|
|
|
|
|
situations. The traditional C++ example is when an error occurs in
|
|
|
|
|
the process of constructing an object; without such a mechanism, it
|
|
|
|
|
is impossible to signal that the error occurs without adding global
|
|
|
|
|
state variables and error checks around every object construction.
|
|
|
|
|
|
|
|
|
|
The act of causing this event to occur is referred to as "throwing
|
|
|
|
|
an exception". (Alternate terms include "raising an exception" or
|
|
|
|
|
"signaling an exception".) The term "throw" is used because control
|
|
|
|
|
is returned to the callers of the function that is signaling the
|
|
|
|
|
exception, and thus there is the concept of "throwing" the
|
|
|
|
|
exception up the call stack.
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
[ Add updated documentation on how to use this. ] */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#include "config.h"
|
|
|
|
|
#include "system.h"
|
|
|
|
|
#include "rtl.h"
|
|
|
|
|
#include "tree.h"
|
|
|
|
|
#include "flags.h"
|
|
|
|
|
#include "function.h"
|
|
|
|
|
#include "expr.h"
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#include "libfuncs.h"
|
1999-08-26 09:30:50 +00:00
|
|
|
|
#include "insn-config.h"
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#include "except.h"
|
|
|
|
|
#include "integrate.h"
|
|
|
|
|
#include "hard-reg-set.h"
|
|
|
|
|
#include "basic-block.h"
|
1999-08-26 09:30:50 +00:00
|
|
|
|
#include "output.h"
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#include "dwarf2asm.h"
|
|
|
|
|
#include "dwarf2out.h"
|
|
|
|
|
#include "dwarf2.h"
|
1999-08-26 09:30:50 +00:00
|
|
|
|
#include "toplev.h"
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#include "hashtab.h"
|
1999-10-16 06:09:09 +00:00
|
|
|
|
#include "intl.h"
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#include "ggc.h"
|
|
|
|
|
#include "tm_p.h"
|
|
|
|
|
#include "target.h"
|
|
|
|
|
|
|
|
|
|
/* Provide defaults for stuff that may not be defined when using
|
|
|
|
|
sjlj exceptions. */
|
|
|
|
|
#ifndef EH_RETURN_STACKADJ_RTX
|
|
|
|
|
#define EH_RETURN_STACKADJ_RTX 0
|
|
|
|
|
#endif
|
|
|
|
|
#ifndef EH_RETURN_HANDLER_RTX
|
|
|
|
|
#define EH_RETURN_HANDLER_RTX 0
|
|
|
|
|
#endif
|
|
|
|
|
#ifndef EH_RETURN_DATA_REGNO
|
|
|
|
|
#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
|
|
|
|
|
#endif
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Nonzero means enable synchronous exceptions for non-call instructions. */
|
|
|
|
|
int flag_non_call_exceptions;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Protect cleanup actions with must-not-throw regions, with a call
|
|
|
|
|
to the given failure handler. */
|
|
|
|
|
tree (*lang_protect_cleanup_actions) PARAMS ((void));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Return true if type A catches type B. */
|
|
|
|
|
int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Map a type to a runtime object to match type. */
|
|
|
|
|
tree (*lang_eh_runtime_type) PARAMS ((tree));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
/* A hash table of label to region number. */
|
|
|
|
|
|
|
|
|
|
struct ehl_map_entry
|
|
|
|
|
{
|
|
|
|
|
rtx label;
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
static htab_t exception_handler_label_map;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static int call_site_base;
|
|
|
|
|
static unsigned int sjlj_funcdef_number;
|
|
|
|
|
static htab_t type_to_runtime_map;
|
|
|
|
|
|
|
|
|
|
/* Describe the SjLj_Function_Context structure. */
|
|
|
|
|
static tree sjlj_fc_type_node;
|
|
|
|
|
static int sjlj_fc_call_site_ofs;
|
|
|
|
|
static int sjlj_fc_data_ofs;
|
|
|
|
|
static int sjlj_fc_personality_ofs;
|
|
|
|
|
static int sjlj_fc_lsda_ofs;
|
|
|
|
|
static int sjlj_fc_jbuf_ofs;
|
|
|
|
|
|
|
|
|
|
/* Describes one exception region. */
|
|
|
|
|
struct eh_region
|
|
|
|
|
{
|
|
|
|
|
/* The immediately surrounding region. */
|
|
|
|
|
struct eh_region *outer;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* The list of immediately contained regions. */
|
|
|
|
|
struct eh_region *inner;
|
|
|
|
|
struct eh_region *next_peer;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* An identifier for this region. */
|
|
|
|
|
int region_number;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
/* When a region is deleted, its parents inherit the REG_EH_REGION
|
|
|
|
|
numbers already assigned. */
|
|
|
|
|
bitmap aka;
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Each region does exactly one thing. */
|
|
|
|
|
enum eh_region_type
|
|
|
|
|
{
|
|
|
|
|
ERT_UNKNOWN = 0,
|
|
|
|
|
ERT_CLEANUP,
|
|
|
|
|
ERT_TRY,
|
|
|
|
|
ERT_CATCH,
|
|
|
|
|
ERT_ALLOWED_EXCEPTIONS,
|
|
|
|
|
ERT_MUST_NOT_THROW,
|
|
|
|
|
ERT_THROW,
|
|
|
|
|
ERT_FIXUP
|
|
|
|
|
} type;
|
|
|
|
|
|
|
|
|
|
/* Holds the action to perform based on the preceding type. */
|
|
|
|
|
union {
|
|
|
|
|
/* A list of catch blocks, a surrounding try block,
|
|
|
|
|
and the label for continuing after a catch. */
|
|
|
|
|
struct {
|
|
|
|
|
struct eh_region *catch;
|
|
|
|
|
struct eh_region *last_catch;
|
|
|
|
|
struct eh_region *prev_try;
|
|
|
|
|
rtx continue_label;
|
|
|
|
|
} try;
|
|
|
|
|
|
|
|
|
|
/* The list through the catch handlers, the list of type objects
|
|
|
|
|
matched, and the list of associated filters. */
|
|
|
|
|
struct {
|
|
|
|
|
struct eh_region *next_catch;
|
|
|
|
|
struct eh_region *prev_catch;
|
|
|
|
|
tree type_list;
|
|
|
|
|
tree filter_list;
|
|
|
|
|
} catch;
|
|
|
|
|
|
|
|
|
|
/* A tree_list of allowed types. */
|
|
|
|
|
struct {
|
|
|
|
|
tree type_list;
|
|
|
|
|
int filter;
|
|
|
|
|
} allowed;
|
|
|
|
|
|
|
|
|
|
/* The type given by a call to "throw foo();", or discovered
|
|
|
|
|
for a throw. */
|
|
|
|
|
struct {
|
|
|
|
|
tree type;
|
|
|
|
|
} throw;
|
|
|
|
|
|
|
|
|
|
/* Retain the cleanup expression even after expansion so that
|
|
|
|
|
we can match up fixup regions. */
|
|
|
|
|
struct {
|
|
|
|
|
tree exp;
|
|
|
|
|
} cleanup;
|
|
|
|
|
|
|
|
|
|
/* The real region (by expression and by pointer) that fixup code
|
|
|
|
|
should live in. */
|
|
|
|
|
struct {
|
|
|
|
|
tree cleanup_exp;
|
|
|
|
|
struct eh_region *real_region;
|
|
|
|
|
} fixup;
|
|
|
|
|
} u;
|
|
|
|
|
|
|
|
|
|
/* Entry point for this region's handler before landing pads are built. */
|
|
|
|
|
rtx label;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Entry point for this region's handler from the runtime eh library. */
|
|
|
|
|
rtx landing_pad;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Entry point for this region's handler from an inner region. */
|
|
|
|
|
rtx post_landing_pad;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* The RESX insn for handing off control to the next outermost handler,
|
|
|
|
|
if appropriate. */
|
|
|
|
|
rtx resume;
|
|
|
|
|
};
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Used to save exception status for each function. */
|
|
|
|
|
struct eh_status
|
|
|
|
|
{
|
|
|
|
|
/* The tree of all regions for this function. */
|
|
|
|
|
struct eh_region *region_tree;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* The same information as an indexable array. */
|
|
|
|
|
struct eh_region **region_array;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* The most recently open region. */
|
|
|
|
|
struct eh_region *cur_region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* This is the region for which we are processing catch blocks. */
|
|
|
|
|
struct eh_region *try_region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
|
|
|
|
|
node is itself a TREE_CHAINed list of handlers for regions that
|
|
|
|
|
are not yet closed. The TREE_VALUE of each entry contains the
|
|
|
|
|
handler for the corresponding entry on the ehstack. */
|
|
|
|
|
tree protect_list;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx filter;
|
|
|
|
|
rtx exc_ptr;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int built_landing_pads;
|
|
|
|
|
int last_region_number;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
varray_type ttype_data;
|
|
|
|
|
varray_type ehspec_data;
|
|
|
|
|
varray_type action_record_data;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct call_site_record
|
|
|
|
|
{
|
|
|
|
|
rtx landing_pad;
|
|
|
|
|
int action;
|
|
|
|
|
} *call_site_data;
|
|
|
|
|
int call_site_data_used;
|
|
|
|
|
int call_site_data_size;
|
|
|
|
|
|
|
|
|
|
rtx ehr_stackadj;
|
|
|
|
|
rtx ehr_handler;
|
|
|
|
|
rtx ehr_label;
|
|
|
|
|
|
|
|
|
|
rtx sjlj_fc;
|
|
|
|
|
rtx sjlj_exit_after;
|
|
|
|
|
};
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
static void mark_eh_region PARAMS ((struct eh_region *));
|
2002-05-09 20:02:13 +00:00
|
|
|
|
static int mark_ehl_map_entry PARAMS ((PTR *, PTR));
|
|
|
|
|
static void mark_ehl_map PARAMS ((void *));
|
|
|
|
|
|
|
|
|
|
static void free_region PARAMS ((struct eh_region *));
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
static int t2r_eq PARAMS ((const PTR,
|
|
|
|
|
const PTR));
|
|
|
|
|
static hashval_t t2r_hash PARAMS ((const PTR));
|
|
|
|
|
static int t2r_mark_1 PARAMS ((PTR *, PTR));
|
|
|
|
|
static void t2r_mark PARAMS ((PTR));
|
|
|
|
|
static void add_type_for_runtime PARAMS ((tree));
|
|
|
|
|
static tree lookup_type_for_runtime PARAMS ((tree));
|
|
|
|
|
|
|
|
|
|
static struct eh_region *expand_eh_region_end PARAMS ((void));
|
|
|
|
|
|
|
|
|
|
static rtx get_exception_filter PARAMS ((struct function *));
|
|
|
|
|
|
|
|
|
|
static void collect_eh_region_array PARAMS ((void));
|
|
|
|
|
static void resolve_fixup_regions PARAMS ((void));
|
|
|
|
|
static void remove_fixup_regions PARAMS ((void));
|
|
|
|
|
static void remove_unreachable_regions PARAMS ((rtx));
|
|
|
|
|
static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
|
|
|
|
|
|
|
|
|
|
static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
|
|
|
|
|
struct inline_remap *));
|
|
|
|
|
static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
|
|
|
|
|
struct eh_region **));
|
|
|
|
|
static int ttypes_filter_eq PARAMS ((const PTR,
|
|
|
|
|
const PTR));
|
|
|
|
|
static hashval_t ttypes_filter_hash PARAMS ((const PTR));
|
|
|
|
|
static int ehspec_filter_eq PARAMS ((const PTR,
|
|
|
|
|
const PTR));
|
|
|
|
|
static hashval_t ehspec_filter_hash PARAMS ((const PTR));
|
|
|
|
|
static int add_ttypes_entry PARAMS ((htab_t, tree));
|
|
|
|
|
static int add_ehspec_entry PARAMS ((htab_t, htab_t,
|
|
|
|
|
tree));
|
|
|
|
|
static void assign_filter_values PARAMS ((void));
|
|
|
|
|
static void build_post_landing_pads PARAMS ((void));
|
|
|
|
|
static void connect_post_landing_pads PARAMS ((void));
|
|
|
|
|
static void dw2_build_landing_pads PARAMS ((void));
|
|
|
|
|
|
|
|
|
|
struct sjlj_lp_info;
|
|
|
|
|
static bool sjlj_find_directly_reachable_regions
|
|
|
|
|
PARAMS ((struct sjlj_lp_info *));
|
|
|
|
|
static void sjlj_assign_call_site_values
|
|
|
|
|
PARAMS ((rtx, struct sjlj_lp_info *));
|
|
|
|
|
static void sjlj_mark_call_sites
|
|
|
|
|
PARAMS ((struct sjlj_lp_info *));
|
|
|
|
|
static void sjlj_emit_function_enter PARAMS ((rtx));
|
|
|
|
|
static void sjlj_emit_function_exit PARAMS ((void));
|
|
|
|
|
static void sjlj_emit_dispatch_table
|
|
|
|
|
PARAMS ((rtx, struct sjlj_lp_info *));
|
|
|
|
|
static void sjlj_build_landing_pads PARAMS ((void));
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
static hashval_t ehl_hash PARAMS ((const PTR));
|
|
|
|
|
static int ehl_eq PARAMS ((const PTR,
|
|
|
|
|
const PTR));
|
|
|
|
|
static void ehl_free PARAMS ((PTR));
|
|
|
|
|
static void add_ehl_entry PARAMS ((rtx,
|
|
|
|
|
struct eh_region *));
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void remove_exception_handler_label PARAMS ((rtx));
|
|
|
|
|
static void remove_eh_handler PARAMS ((struct eh_region *));
|
2002-05-09 20:02:13 +00:00
|
|
|
|
static int for_each_eh_label_1 PARAMS ((PTR *, PTR));
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
struct reachable_info;
|
|
|
|
|
|
|
|
|
|
/* The return value of reachable_next_level. */
|
|
|
|
|
enum reachable_code
|
|
|
|
|
{
|
|
|
|
|
/* The given exception is not processed by the given region. */
|
|
|
|
|
RNL_NOT_CAUGHT,
|
|
|
|
|
/* The given exception may need processing by the given region. */
|
|
|
|
|
RNL_MAYBE_CAUGHT,
|
|
|
|
|
/* The given exception is completely processed by the given region. */
|
|
|
|
|
RNL_CAUGHT,
|
|
|
|
|
/* The given exception is completely processed by the runtime. */
|
|
|
|
|
RNL_BLOCKED
|
|
|
|
|
};
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static int check_handled PARAMS ((tree, tree));
|
|
|
|
|
static void add_reachable_handler
|
|
|
|
|
PARAMS ((struct reachable_info *, struct eh_region *,
|
|
|
|
|
struct eh_region *));
|
|
|
|
|
static enum reachable_code reachable_next_level
|
|
|
|
|
PARAMS ((struct eh_region *, tree, struct reachable_info *));
|
|
|
|
|
|
|
|
|
|
static int action_record_eq PARAMS ((const PTR,
|
|
|
|
|
const PTR));
|
|
|
|
|
static hashval_t action_record_hash PARAMS ((const PTR));
|
|
|
|
|
static int add_action_record PARAMS ((htab_t, int, int));
|
|
|
|
|
static int collect_one_action_chain PARAMS ((htab_t,
|
|
|
|
|
struct eh_region *));
|
|
|
|
|
static int add_call_site PARAMS ((rtx, int));
|
|
|
|
|
|
|
|
|
|
static void push_uleb128 PARAMS ((varray_type *,
|
|
|
|
|
unsigned int));
|
|
|
|
|
static void push_sleb128 PARAMS ((varray_type *, int));
|
|
|
|
|
#ifndef HAVE_AS_LEB128
|
|
|
|
|
static int dw2_size_of_call_site_table PARAMS ((void));
|
|
|
|
|
static int sjlj_size_of_call_site_table PARAMS ((void));
|
|
|
|
|
#endif
|
|
|
|
|
static void dw2_output_call_site_table PARAMS ((void));
|
|
|
|
|
static void sjlj_output_call_site_table PARAMS ((void));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
/* Routine to see if exception handling is turned on.
|
|
|
|
|
DO_WARN is non-zero if we want to inform the user that exception
|
|
|
|
|
handling is turned off.
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
This is used to ensure that -fexceptions has been specified if the
|
|
|
|
|
compiler tries to use any exception-specific functions. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int
|
|
|
|
|
doing_eh (do_warn)
|
|
|
|
|
int do_warn;
|
|
|
|
|
{
|
|
|
|
|
if (! flag_exceptions)
|
|
|
|
|
{
|
|
|
|
|
static int warned = 0;
|
|
|
|
|
if (! warned && do_warn)
|
|
|
|
|
{
|
|
|
|
|
error ("exception handling disabled, use -fexceptions to enable");
|
|
|
|
|
warned = 1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
init_eh ()
|
|
|
|
|
{
|
2002-05-09 20:02:13 +00:00
|
|
|
|
ggc_add_root (&exception_handler_label_map, 1, 1, mark_ehl_map);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! flag_exceptions)
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
|
|
|
|
|
ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Create the SjLj_Function_Context structure. This should match
|
|
|
|
|
the definition in unwind-sjlj.c. */
|
|
|
|
|
if (USING_SJLJ_EXCEPTIONS)
|
|
|
|
|
{
|
|
|
|
|
tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
|
|
|
|
|
ggc_add_tree_root (&sjlj_fc_type_node, 1);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
|
|
|
|
|
build_pointer_type (sjlj_fc_type_node));
|
|
|
|
|
DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
|
|
|
|
|
integer_type_node);
|
|
|
|
|
DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
tmp = build_index_type (build_int_2 (4 - 1, 0));
|
|
|
|
|
tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
|
|
|
|
|
f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
|
|
|
|
|
DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
|
|
|
|
|
ptr_type_node);
|
|
|
|
|
DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
|
|
|
|
|
ptr_type_node);
|
|
|
|
|
DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
1999-08-26 09:30:50 +00:00
|
|
|
|
#ifdef DONT_USE_BUILTIN_SETJMP
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#ifdef JMP_BUF_SIZE
|
|
|
|
|
tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
|
|
|
|
|
#else
|
|
|
|
|
/* Should be large enough for most systems, if it is not,
|
|
|
|
|
JMP_BUF_SIZE should be defined with the proper value. It will
|
|
|
|
|
also tend to be larger than necessary for most systems, a more
|
|
|
|
|
optimal port will define JMP_BUF_SIZE. */
|
|
|
|
|
tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
#endif
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#else
|
|
|
|
|
/* This is 2 for builtin_setjmp, plus whatever the target requires
|
|
|
|
|
via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
|
|
|
|
|
tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
|
|
|
|
|
/ GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
|
|
|
|
|
#endif
|
|
|
|
|
tmp = build_index_type (tmp);
|
|
|
|
|
tmp = build_array_type (ptr_type_node, tmp);
|
|
|
|
|
f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
|
|
|
|
|
#ifdef DONT_USE_BUILTIN_SETJMP
|
|
|
|
|
/* We don't know what the alignment requirements of the
|
|
|
|
|
runtime's jmp_buf has. Overestimate. */
|
|
|
|
|
DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
|
|
|
|
|
DECL_USER_ALIGN (f_jbuf) = 1;
|
|
|
|
|
#endif
|
|
|
|
|
DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
|
|
|
|
|
|
|
|
|
|
TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
|
|
|
|
|
TREE_CHAIN (f_prev) = f_cs;
|
|
|
|
|
TREE_CHAIN (f_cs) = f_data;
|
|
|
|
|
TREE_CHAIN (f_data) = f_per;
|
|
|
|
|
TREE_CHAIN (f_per) = f_lsda;
|
|
|
|
|
TREE_CHAIN (f_lsda) = f_jbuf;
|
|
|
|
|
|
|
|
|
|
layout_type (sjlj_fc_type_node);
|
|
|
|
|
|
|
|
|
|
/* Cache the interesting field offsets so that we have
|
|
|
|
|
easy access from rtl. */
|
|
|
|
|
sjlj_fc_call_site_ofs
|
|
|
|
|
= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
|
|
|
|
|
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
|
|
|
|
|
sjlj_fc_data_ofs
|
|
|
|
|
= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
|
|
|
|
|
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
|
|
|
|
|
sjlj_fc_personality_ofs
|
|
|
|
|
= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
|
|
|
|
|
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
|
|
|
|
|
sjlj_fc_lsda_ofs
|
|
|
|
|
= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
|
|
|
|
|
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
|
|
|
|
|
sjlj_fc_jbuf_ofs
|
|
|
|
|
= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
|
|
|
|
|
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
|
|
|
|
|
}
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
1999-08-26 09:30:50 +00:00
|
|
|
|
void
|
2002-02-01 18:16:02 +00:00
|
|
|
|
init_eh_for_function ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Mark EH for GC. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
mark_eh_region (region)
|
|
|
|
|
struct eh_region *region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! region)
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
switch (region->type)
|
|
|
|
|
{
|
|
|
|
|
case ERT_UNKNOWN:
|
|
|
|
|
/* This can happen if a nested function is inside the body of a region
|
|
|
|
|
and we do a GC as part of processing it. */
|
|
|
|
|
break;
|
|
|
|
|
case ERT_CLEANUP:
|
|
|
|
|
ggc_mark_tree (region->u.cleanup.exp);
|
|
|
|
|
break;
|
|
|
|
|
case ERT_TRY:
|
|
|
|
|
ggc_mark_rtx (region->u.try.continue_label);
|
|
|
|
|
break;
|
|
|
|
|
case ERT_CATCH:
|
|
|
|
|
ggc_mark_tree (region->u.catch.type_list);
|
|
|
|
|
ggc_mark_tree (region->u.catch.filter_list);
|
|
|
|
|
break;
|
|
|
|
|
case ERT_ALLOWED_EXCEPTIONS:
|
|
|
|
|
ggc_mark_tree (region->u.allowed.type_list);
|
|
|
|
|
break;
|
|
|
|
|
case ERT_MUST_NOT_THROW:
|
|
|
|
|
break;
|
|
|
|
|
case ERT_THROW:
|
|
|
|
|
ggc_mark_tree (region->u.throw.type);
|
|
|
|
|
break;
|
|
|
|
|
case ERT_FIXUP:
|
|
|
|
|
ggc_mark_tree (region->u.fixup.cleanup_exp);
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
abort ();
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
ggc_mark_rtx (region->label);
|
|
|
|
|
ggc_mark_rtx (region->resume);
|
|
|
|
|
ggc_mark_rtx (region->landing_pad);
|
|
|
|
|
ggc_mark_rtx (region->post_landing_pad);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
static int
|
|
|
|
|
mark_ehl_map_entry (pentry, data)
|
|
|
|
|
PTR *pentry;
|
|
|
|
|
PTR data ATTRIBUTE_UNUSED;
|
|
|
|
|
{
|
|
|
|
|
struct ehl_map_entry *entry = *(struct ehl_map_entry **) pentry;
|
|
|
|
|
ggc_mark_rtx (entry->label);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
mark_ehl_map (pp)
|
|
|
|
|
void *pp;
|
|
|
|
|
{
|
|
|
|
|
htab_t map = *(htab_t *) pp;
|
|
|
|
|
if (map)
|
|
|
|
|
htab_traverse (map, mark_ehl_map_entry, NULL);
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
mark_eh_status (eh)
|
|
|
|
|
struct eh_status *eh;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (eh == 0)
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* If we've called collect_eh_region_array, use it. Otherwise walk
|
|
|
|
|
the tree non-recursively. */
|
|
|
|
|
if (eh->region_array)
|
|
|
|
|
{
|
|
|
|
|
for (i = eh->last_region_number; i > 0; --i)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *r = eh->region_array[i];
|
|
|
|
|
if (r && r->region_number == i)
|
|
|
|
|
mark_eh_region (r);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (eh->region_tree)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *r = eh->region_tree;
|
|
|
|
|
while (1)
|
|
|
|
|
{
|
|
|
|
|
mark_eh_region (r);
|
|
|
|
|
if (r->inner)
|
|
|
|
|
r = r->inner;
|
|
|
|
|
else if (r->next_peer)
|
|
|
|
|
r = r->next_peer;
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
do {
|
|
|
|
|
r = r->outer;
|
|
|
|
|
if (r == NULL)
|
|
|
|
|
goto tree_done;
|
|
|
|
|
} while (r->next_peer == NULL);
|
|
|
|
|
r = r->next_peer;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
tree_done:;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
ggc_mark_tree (eh->protect_list);
|
|
|
|
|
ggc_mark_rtx (eh->filter);
|
|
|
|
|
ggc_mark_rtx (eh->exc_ptr);
|
|
|
|
|
ggc_mark_tree_varray (eh->ttype_data);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (eh->call_site_data)
|
|
|
|
|
{
|
|
|
|
|
for (i = eh->call_site_data_used - 1; i >= 0; --i)
|
|
|
|
|
ggc_mark_rtx (eh->call_site_data[i].landing_pad);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
ggc_mark_rtx (eh->ehr_stackadj);
|
|
|
|
|
ggc_mark_rtx (eh->ehr_handler);
|
|
|
|
|
ggc_mark_rtx (eh->ehr_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
ggc_mark_rtx (eh->sjlj_fc);
|
|
|
|
|
ggc_mark_rtx (eh->sjlj_exit_after);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
static inline void
|
|
|
|
|
free_region (r)
|
|
|
|
|
struct eh_region *r;
|
|
|
|
|
{
|
|
|
|
|
/* Note that the aka bitmap is freed by regset_release_memory. But if
|
|
|
|
|
we ever replace with a non-obstack implementation, this would be
|
|
|
|
|
the place to do it. */
|
|
|
|
|
free (r);
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
free_eh_status (f)
|
|
|
|
|
struct function *f;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_status *eh = f->eh;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (eh->region_array)
|
|
|
|
|
{
|
|
|
|
|
int i;
|
|
|
|
|
for (i = eh->last_region_number; i > 0; --i)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *r = eh->region_array[i];
|
|
|
|
|
/* Mind we don't free a region struct more than once. */
|
|
|
|
|
if (r && r->region_number == i)
|
2002-05-09 20:02:13 +00:00
|
|
|
|
free_region (r);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
}
|
|
|
|
|
free (eh->region_array);
|
|
|
|
|
}
|
|
|
|
|
else if (eh->region_tree)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *next, *r = eh->region_tree;
|
|
|
|
|
while (1)
|
|
|
|
|
{
|
|
|
|
|
if (r->inner)
|
|
|
|
|
r = r->inner;
|
|
|
|
|
else if (r->next_peer)
|
|
|
|
|
{
|
|
|
|
|
next = r->next_peer;
|
2002-05-09 20:02:13 +00:00
|
|
|
|
free_region (r);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
r = next;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
do {
|
|
|
|
|
next = r->outer;
|
2002-05-09 20:02:13 +00:00
|
|
|
|
free_region (r);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
r = next;
|
|
|
|
|
if (r == NULL)
|
|
|
|
|
goto tree_done;
|
|
|
|
|
} while (r->next_peer == NULL);
|
|
|
|
|
next = r->next_peer;
|
2002-05-09 20:02:13 +00:00
|
|
|
|
free_region (r);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
r = next;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
tree_done:;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
VARRAY_FREE (eh->ttype_data);
|
|
|
|
|
VARRAY_FREE (eh->ehspec_data);
|
|
|
|
|
VARRAY_FREE (eh->action_record_data);
|
|
|
|
|
if (eh->call_site_data)
|
|
|
|
|
free (eh->call_site_data);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
free (eh);
|
|
|
|
|
f->eh = NULL;
|
2002-05-09 20:02:13 +00:00
|
|
|
|
|
|
|
|
|
if (exception_handler_label_map)
|
|
|
|
|
{
|
|
|
|
|
htab_delete (exception_handler_label_map);
|
|
|
|
|
exception_handler_label_map = NULL;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
/* Start an exception handling region. All instructions emitted
|
|
|
|
|
after this point are considered to be part of the region until
|
|
|
|
|
expand_eh_region_end is invoked. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
expand_eh_region_start ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *new_region;
|
|
|
|
|
struct eh_region *cur_region;
|
|
|
|
|
rtx note;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Insert a new blank region as a leaf in the tree. */
|
|
|
|
|
new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
|
|
|
|
|
cur_region = cfun->eh->cur_region;
|
|
|
|
|
new_region->outer = cur_region;
|
|
|
|
|
if (cur_region)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
new_region->next_peer = cur_region->inner;
|
|
|
|
|
cur_region->inner = new_region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
new_region->next_peer = cfun->eh->region_tree;
|
|
|
|
|
cfun->eh->region_tree = new_region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
cfun->eh->cur_region = new_region;
|
|
|
|
|
|
|
|
|
|
/* Create a note marking the start of this region. */
|
|
|
|
|
new_region->region_number = ++cfun->eh->last_region_number;
|
|
|
|
|
note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
|
|
|
|
|
NOTE_EH_HANDLER (note) = new_region->region_number;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Common code to end a region. Returns the region just ended. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static struct eh_region *
|
|
|
|
|
expand_eh_region_end ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *cur_region = cfun->eh->cur_region;
|
|
|
|
|
rtx note;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Create a note marking the end of this region. */
|
|
|
|
|
note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
|
|
|
|
|
NOTE_EH_HANDLER (note) = cur_region->region_number;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Pop. */
|
|
|
|
|
cfun->eh->cur_region = cur_region->outer;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return cur_region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End an exception handling region for a cleanup. HANDLER is an
|
|
|
|
|
expression to expand for the cleanup. */
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
expand_eh_region_end_cleanup (handler)
|
|
|
|
|
tree handler;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region;
|
|
|
|
|
tree protect_cleanup_actions;
|
|
|
|
|
rtx around_label;
|
|
|
|
|
rtx data_save[2];
|
2001-03-24 01:58:31 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
2001-03-24 01:58:31 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
region = expand_eh_region_end ();
|
|
|
|
|
region->type = ERT_CLEANUP;
|
|
|
|
|
region->label = gen_label_rtx ();
|
|
|
|
|
region->u.cleanup.exp = handler;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
around_label = gen_label_rtx ();
|
|
|
|
|
emit_jump (around_label);
|
2001-03-19 19:46:16 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (region->label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Give the language a chance to specify an action to be taken if an
|
|
|
|
|
exception is thrown that would propagate out of the HANDLER. */
|
|
|
|
|
protect_cleanup_actions
|
|
|
|
|
= (lang_protect_cleanup_actions
|
|
|
|
|
? (*lang_protect_cleanup_actions) ()
|
|
|
|
|
: NULL_TREE);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (protect_cleanup_actions)
|
|
|
|
|
expand_eh_region_start ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* In case this cleanup involves an inline destructor with a try block in
|
|
|
|
|
it, we need to save the EH return data registers around it. */
|
|
|
|
|
data_save[0] = gen_reg_rtx (Pmode);
|
|
|
|
|
emit_move_insn (data_save[0], get_exception_pointer (cfun));
|
|
|
|
|
data_save[1] = gen_reg_rtx (word_mode);
|
|
|
|
|
emit_move_insn (data_save[1], get_exception_filter (cfun));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
expand_expr (handler, const0_rtx, VOIDmode, 0);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
|
|
|
|
|
emit_move_insn (cfun->eh->filter, data_save[1]);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (protect_cleanup_actions)
|
|
|
|
|
expand_eh_region_end_must_not_throw (protect_cleanup_actions);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* We need any stack adjustment complete before the around_label. */
|
|
|
|
|
do_pending_stack_adjust ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* We delay the generation of the _Unwind_Resume until we generate
|
|
|
|
|
landing pads. We emit a marker here so as to get good control
|
|
|
|
|
flow data in the meantime. */
|
|
|
|
|
region->resume
|
|
|
|
|
= emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
|
|
|
|
|
emit_barrier ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (around_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End an exception handling region for a try block, and prepares
|
|
|
|
|
for subsequent calls to expand_start_catch. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
expand_start_all_catch ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region;
|
|
|
|
|
|
|
|
|
|
if (! doing_eh (1))
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
region = expand_eh_region_end ();
|
|
|
|
|
region->type = ERT_TRY;
|
|
|
|
|
region->u.try.prev_try = cfun->eh->try_region;
|
|
|
|
|
region->u.try.continue_label = gen_label_rtx ();
|
|
|
|
|
|
|
|
|
|
cfun->eh->try_region = region;
|
|
|
|
|
|
|
|
|
|
emit_jump (region->u.try.continue_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Begin a catch clause. TYPE is the type caught, a list of such types, or
|
|
|
|
|
null if this is a catch-all clause. Providing a type list enables to
|
|
|
|
|
associate the catch region with potentially several exception types, which
|
|
|
|
|
is useful e.g. for Ada. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
expand_start_catch (type_or_list)
|
|
|
|
|
tree type_or_list;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *t, *c, *l;
|
|
|
|
|
tree type_list;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
type_list = type_or_list;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (type_or_list)
|
|
|
|
|
{
|
|
|
|
|
/* Ensure to always end up with a type list to normalize further
|
|
|
|
|
processing, then register each type against the runtime types
|
|
|
|
|
map. */
|
|
|
|
|
tree type_node;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (TREE_CODE (type_or_list) != TREE_LIST)
|
|
|
|
|
type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
type_node = type_list;
|
|
|
|
|
for (; type_node; type_node = TREE_CHAIN (type_node))
|
|
|
|
|
add_type_for_runtime (TREE_VALUE (type_node));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
expand_eh_region_start ();
|
|
|
|
|
|
|
|
|
|
t = cfun->eh->try_region;
|
|
|
|
|
c = cfun->eh->cur_region;
|
|
|
|
|
c->type = ERT_CATCH;
|
|
|
|
|
c->u.catch.type_list = type_list;
|
|
|
|
|
c->label = gen_label_rtx ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
l = t->u.try.last_catch;
|
|
|
|
|
c->u.catch.prev_catch = l;
|
|
|
|
|
if (l)
|
|
|
|
|
l->u.catch.next_catch = c;
|
|
|
|
|
else
|
|
|
|
|
t->u.try.catch = c;
|
|
|
|
|
t->u.try.last_catch = c;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (c->label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End a catch clause. Control will resume after the try/catch block. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
expand_end_catch ()
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *try_region, *catch_region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
catch_region = expand_eh_region_end ();
|
|
|
|
|
try_region = cfun->eh->try_region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_jump (try_region->u.try.continue_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End a sequence of catch handlers for a try block. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
expand_end_all_catch ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *try_region;
|
|
|
|
|
|
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
try_region = cfun->eh->try_region;
|
|
|
|
|
cfun->eh->try_region = try_region->u.try.prev_try;
|
|
|
|
|
|
|
|
|
|
emit_label (try_region->u.try.continue_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End an exception region for an exception type filter. ALLOWED is a
|
|
|
|
|
TREE_LIST of types to be matched by the runtime. FAILURE is an
|
|
|
|
|
expression to invoke if a mismatch occurs.
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
??? We could use these semantics for calls to rethrow, too; if we can
|
|
|
|
|
see the surrounding catch clause, we know that the exception we're
|
|
|
|
|
rethrowing satisfies the "filter" of the catch type. */
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
expand_eh_region_end_allowed (allowed, failure)
|
|
|
|
|
tree allowed, failure;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region;
|
|
|
|
|
rtx around_label;
|
|
|
|
|
|
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
region = expand_eh_region_end ();
|
|
|
|
|
region->type = ERT_ALLOWED_EXCEPTIONS;
|
|
|
|
|
region->u.allowed.type_list = allowed;
|
|
|
|
|
region->label = gen_label_rtx ();
|
|
|
|
|
|
|
|
|
|
for (; allowed ; allowed = TREE_CHAIN (allowed))
|
|
|
|
|
add_type_for_runtime (TREE_VALUE (allowed));
|
|
|
|
|
|
|
|
|
|
/* We must emit the call to FAILURE here, so that if this function
|
|
|
|
|
throws a different exception, that it will be processed by the
|
|
|
|
|
correct region. */
|
|
|
|
|
|
|
|
|
|
around_label = gen_label_rtx ();
|
|
|
|
|
emit_jump (around_label);
|
|
|
|
|
|
|
|
|
|
emit_label (region->label);
|
|
|
|
|
expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
|
|
|
|
|
/* We must adjust the stack before we reach the AROUND_LABEL because
|
|
|
|
|
the call to FAILURE does not occur on all paths to the
|
|
|
|
|
AROUND_LABEL. */
|
|
|
|
|
do_pending_stack_adjust ();
|
|
|
|
|
|
|
|
|
|
emit_label (around_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End an exception region for a must-not-throw filter. FAILURE is an
|
|
|
|
|
expression invoke if an uncaught exception propagates this far.
|
|
|
|
|
|
|
|
|
|
This is conceptually identical to expand_eh_region_end_allowed with
|
|
|
|
|
an empty allowed list (if you passed "std::terminate" instead of
|
|
|
|
|
"__cxa_call_unexpected"), but they are represented differently in
|
|
|
|
|
the C++ LSDA. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
expand_eh_region_end_must_not_throw (failure)
|
|
|
|
|
tree failure;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region;
|
|
|
|
|
rtx around_label;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
region = expand_eh_region_end ();
|
|
|
|
|
region->type = ERT_MUST_NOT_THROW;
|
|
|
|
|
region->label = gen_label_rtx ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* We must emit the call to FAILURE here, so that if this function
|
|
|
|
|
throws a different exception, that it will be processed by the
|
|
|
|
|
correct region. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
around_label = gen_label_rtx ();
|
|
|
|
|
emit_jump (around_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (region->label);
|
|
|
|
|
expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (around_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End an exception region for a throw. No handling goes on here,
|
|
|
|
|
but it's the easiest way for the front-end to indicate what type
|
|
|
|
|
is being thrown. */
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
expand_eh_region_end_throw (type)
|
|
|
|
|
tree type;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region;
|
|
|
|
|
|
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
region = expand_eh_region_end ();
|
|
|
|
|
region->type = ERT_THROW;
|
|
|
|
|
region->u.throw.type = type;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End a fixup region. Within this region the cleanups for the immediately
|
|
|
|
|
enclosing region are _not_ run. This is used for goto cleanup to avoid
|
|
|
|
|
destroying an object twice.
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
This would be an extraordinarily simple prospect, were it not for the
|
|
|
|
|
fact that we don't actually know what the immediately enclosing region
|
|
|
|
|
is. This surprising fact is because expand_cleanups is currently
|
|
|
|
|
generating a sequence that it will insert somewhere else. We collect
|
|
|
|
|
the proper notion of "enclosing" in convert_from_eh_region_ranges. */
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
expand_eh_region_end_fixup (handler)
|
|
|
|
|
tree handler;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *fixup;
|
|
|
|
|
|
|
|
|
|
if (! doing_eh (0))
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
fixup = expand_eh_region_end ();
|
|
|
|
|
fixup->type = ERT_FIXUP;
|
|
|
|
|
fixup->u.fixup.cleanup_exp = handler;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Return an rtl expression for a pointer to the exception object
|
|
|
|
|
within a handler. */
|
|
|
|
|
|
|
|
|
|
rtx
|
|
|
|
|
get_exception_pointer (fun)
|
|
|
|
|
struct function *fun;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx exc_ptr = fun->eh->exc_ptr;
|
|
|
|
|
if (fun == cfun && ! exc_ptr)
|
1999-10-16 06:09:09 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
exc_ptr = gen_reg_rtx (Pmode);
|
|
|
|
|
fun->eh->exc_ptr = exc_ptr;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return exc_ptr;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Return an rtl expression for the exception dispatch filter
|
|
|
|
|
within a handler. */
|
|
|
|
|
|
|
|
|
|
static rtx
|
|
|
|
|
get_exception_filter (fun)
|
|
|
|
|
struct function *fun;
|
|
|
|
|
{
|
|
|
|
|
rtx filter = fun->eh->filter;
|
|
|
|
|
if (fun == cfun && ! filter)
|
|
|
|
|
{
|
|
|
|
|
filter = gen_reg_rtx (word_mode);
|
|
|
|
|
fun->eh->filter = filter;
|
|
|
|
|
}
|
|
|
|
|
return filter;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Begin a region that will contain entries created with
|
|
|
|
|
add_partial_entry. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
begin_protect_partials ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Push room for a new list. */
|
|
|
|
|
cfun->eh->protect_list
|
|
|
|
|
= tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Start a new exception region for a region of code that has a
|
|
|
|
|
cleanup action and push the HANDLER for the region onto
|
|
|
|
|
protect_list. All of the regions created with add_partial_entry
|
2002-02-01 18:16:02 +00:00
|
|
|
|
will be ended when end_protect_partials is invoked.
|
|
|
|
|
|
|
|
|
|
??? The only difference between this purpose and that of
|
|
|
|
|
expand_decl_cleanup is that in this case, we only want the cleanup to
|
|
|
|
|
run if an exception is thrown. This should also be handled using
|
|
|
|
|
binding levels. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
add_partial_entry (handler)
|
|
|
|
|
tree handler;
|
|
|
|
|
{
|
|
|
|
|
expand_eh_region_start ();
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Add this entry to the front of the list. */
|
|
|
|
|
TREE_VALUE (cfun->eh->protect_list)
|
|
|
|
|
= tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End all the pending exception regions on protect_list. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
end_protect_partials ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
tree t;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Pop the topmost entry. */
|
|
|
|
|
t = TREE_VALUE (cfun->eh->protect_list);
|
|
|
|
|
cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* End all the exception regions. */
|
|
|
|
|
for (; t; t = TREE_CHAIN (t))
|
|
|
|
|
expand_eh_region_end_cleanup (TREE_VALUE (t));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
/* This section is for the exception handling specific optimization pass. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Random access the exception region tree. It's just as simple to
|
|
|
|
|
collect the regions this way as in expand_eh_region_start, but
|
|
|
|
|
without having to realloc memory. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
collect_eh_region_array ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region **array, *i;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
i = cfun->eh->region_tree;
|
|
|
|
|
if (! i)
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
|
|
|
|
|
cfun->eh->region_array = array;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
while (1)
|
|
|
|
|
{
|
|
|
|
|
array[i->region_number] = i;
|
|
|
|
|
|
|
|
|
|
/* If there are sub-regions, process them. */
|
|
|
|
|
if (i->inner)
|
|
|
|
|
i = i->inner;
|
|
|
|
|
/* If there are peers, process them. */
|
|
|
|
|
else if (i->next_peer)
|
|
|
|
|
i = i->next_peer;
|
|
|
|
|
/* Otherwise, step back up the tree to the next peer. */
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
do {
|
|
|
|
|
i = i->outer;
|
|
|
|
|
if (i == NULL)
|
|
|
|
|
return;
|
|
|
|
|
} while (i->next_peer == NULL);
|
|
|
|
|
i = i->next_peer;
|
|
|
|
|
}
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
resolve_fixup_regions ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i, j, n = cfun->eh->last_region_number;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = 1; i <= n; ++i)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *fixup = cfun->eh->region_array[i];
|
|
|
|
|
struct eh_region *cleanup = 0;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! fixup || fixup->type != ERT_FIXUP)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (j = 1; j <= n; ++j)
|
|
|
|
|
{
|
|
|
|
|
cleanup = cfun->eh->region_array[j];
|
|
|
|
|
if (cleanup->type == ERT_CLEANUP
|
|
|
|
|
&& cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (j > n)
|
|
|
|
|
abort ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
fixup->u.fixup.real_region = cleanup->outer;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Now that we've discovered what region actually encloses a fixup,
|
|
|
|
|
we can shuffle pointers and remove them from the tree. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
static void
|
2002-02-01 18:16:02 +00:00
|
|
|
|
remove_fixup_regions ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i;
|
|
|
|
|
rtx insn, note;
|
|
|
|
|
struct eh_region *fixup;
|
|
|
|
|
|
|
|
|
|
/* Walk the insn chain and adjust the REG_EH_REGION numbers
|
|
|
|
|
for instructions referencing fixup regions. This is only
|
|
|
|
|
strictly necessary for fixup regions with no parent, but
|
|
|
|
|
doesn't hurt to do it for all regions. */
|
|
|
|
|
for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
|
|
|
|
|
if (INSN_P (insn)
|
|
|
|
|
&& (note = find_reg_note (insn, REG_EH_REGION, NULL))
|
|
|
|
|
&& INTVAL (XEXP (note, 0)) > 0
|
|
|
|
|
&& (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
|
|
|
|
|
&& fixup->type == ERT_FIXUP)
|
|
|
|
|
{
|
|
|
|
|
if (fixup->u.fixup.real_region)
|
|
|
|
|
XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
|
|
|
|
|
else
|
|
|
|
|
remove_note (insn, note);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Remove the fixup regions from the tree. */
|
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
|
|
|
|
{
|
|
|
|
|
fixup = cfun->eh->region_array[i];
|
|
|
|
|
if (! fixup)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Allow GC to maybe free some memory. */
|
|
|
|
|
if (fixup->type == ERT_CLEANUP)
|
|
|
|
|
fixup->u.cleanup.exp = NULL_TREE;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (fixup->type != ERT_FIXUP)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (fixup->inner)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *parent, *p, **pp;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
parent = fixup->u.fixup.real_region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Fix up the children's parent pointers; find the end of
|
|
|
|
|
the list. */
|
|
|
|
|
for (p = fixup->inner; ; p = p->next_peer)
|
|
|
|
|
{
|
|
|
|
|
p->outer = parent;
|
|
|
|
|
if (! p->next_peer)
|
|
|
|
|
break;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* In the tree of cleanups, only outer-inner ordering matters.
|
|
|
|
|
So link the children back in anywhere at the correct level. */
|
|
|
|
|
if (parent)
|
|
|
|
|
pp = &parent->inner;
|
|
|
|
|
else
|
|
|
|
|
pp = &cfun->eh->region_tree;
|
|
|
|
|
p->next_peer = *pp;
|
|
|
|
|
*pp = fixup->inner;
|
|
|
|
|
fixup->inner = NULL;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
remove_eh_handler (fixup);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Remove all regions whose labels are not reachable from insns. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
static void
|
2002-02-01 18:16:02 +00:00
|
|
|
|
remove_unreachable_regions (insns)
|
|
|
|
|
rtx insns;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i, *uid_region_num;
|
|
|
|
|
bool *reachable;
|
|
|
|
|
struct eh_region *r;
|
|
|
|
|
rtx insn;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
uid_region_num = xcalloc (get_max_uid (), sizeof(int));
|
|
|
|
|
reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
|
|
|
|
{
|
|
|
|
|
r = cfun->eh->region_array[i];
|
|
|
|
|
if (!r || r->region_number != i)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (r->resume)
|
|
|
|
|
{
|
|
|
|
|
if (uid_region_num[INSN_UID (r->resume)])
|
|
|
|
|
abort ();
|
|
|
|
|
uid_region_num[INSN_UID (r->resume)] = i;
|
|
|
|
|
}
|
|
|
|
|
if (r->label)
|
|
|
|
|
{
|
|
|
|
|
if (uid_region_num[INSN_UID (r->label)])
|
|
|
|
|
abort ();
|
|
|
|
|
uid_region_num[INSN_UID (r->label)] = i;
|
|
|
|
|
}
|
|
|
|
|
if (r->type == ERT_TRY && r->u.try.continue_label)
|
|
|
|
|
{
|
|
|
|
|
if (uid_region_num[INSN_UID (r->u.try.continue_label)])
|
|
|
|
|
abort ();
|
|
|
|
|
uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
|
|
|
|
|
}
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
|
|
|
|
reachable[uid_region_num[INSN_UID (insn)]] = true;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
|
|
|
|
{
|
|
|
|
|
r = cfun->eh->region_array[i];
|
|
|
|
|
if (r && r->region_number == i && !reachable[i])
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Don't remove ERT_THROW regions if their outer region
|
|
|
|
|
is reachable. */
|
|
|
|
|
if (r->type == ERT_THROW
|
|
|
|
|
&& r->outer
|
|
|
|
|
&& reachable[r->outer->region_number])
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
remove_eh_handler (r);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
free (reachable);
|
|
|
|
|
free (uid_region_num);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
|
|
|
|
|
can_throw instruction in the region. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
|
|
|
|
|
rtx *pinsns;
|
|
|
|
|
int *orig_sp;
|
|
|
|
|
int cur;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int *sp = orig_sp;
|
|
|
|
|
rtx insn, next;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (insn = *pinsns; insn ; insn = next)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
next = NEXT_INSN (insn);
|
|
|
|
|
if (GET_CODE (insn) == NOTE)
|
|
|
|
|
{
|
|
|
|
|
int kind = NOTE_LINE_NUMBER (insn);
|
|
|
|
|
if (kind == NOTE_INSN_EH_REGION_BEG
|
|
|
|
|
|| kind == NOTE_INSN_EH_REGION_END)
|
|
|
|
|
{
|
|
|
|
|
if (kind == NOTE_INSN_EH_REGION_BEG)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *r;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
*sp++ = cur;
|
|
|
|
|
cur = NOTE_EH_HANDLER (insn);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
r = cfun->eh->region_array[cur];
|
|
|
|
|
if (r->type == ERT_FIXUP)
|
|
|
|
|
{
|
|
|
|
|
r = r->u.fixup.real_region;
|
|
|
|
|
cur = r ? r->region_number : 0;
|
|
|
|
|
}
|
|
|
|
|
else if (r->type == ERT_CATCH)
|
|
|
|
|
{
|
|
|
|
|
r = r->outer;
|
|
|
|
|
cur = r ? r->region_number : 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
cur = *--sp;
|
|
|
|
|
|
|
|
|
|
/* Removing the first insn of a CALL_PLACEHOLDER sequence
|
|
|
|
|
requires extra care to adjust sequence start. */
|
|
|
|
|
if (insn == *pinsns)
|
|
|
|
|
*pinsns = next;
|
|
|
|
|
remove_insn (insn);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (INSN_P (insn))
|
|
|
|
|
{
|
|
|
|
|
if (cur > 0
|
|
|
|
|
&& ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
|
|
|
|
|
/* Calls can always potentially throw exceptions, unless
|
|
|
|
|
they have a REG_EH_REGION note with a value of 0 or less.
|
|
|
|
|
Which should be the only possible kind so far. */
|
|
|
|
|
&& (GET_CODE (insn) == CALL_INSN
|
|
|
|
|
/* If we wanted exceptions for non-call insns, then
|
|
|
|
|
any may_trap_p instruction could throw. */
|
|
|
|
|
|| (flag_non_call_exceptions
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) != CLOBBER
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) != USE
|
|
|
|
|
&& may_trap_p (PATTERN (insn)))))
|
|
|
|
|
{
|
|
|
|
|
REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
|
|
|
|
|
REG_NOTES (insn));
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (GET_CODE (insn) == CALL_INSN
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
|
|
|
|
{
|
|
|
|
|
convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
|
|
|
|
|
sp, cur);
|
|
|
|
|
convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
|
|
|
|
|
sp, cur);
|
|
|
|
|
convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
|
|
|
|
|
sp, cur);
|
|
|
|
|
}
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (sp != orig_sp)
|
|
|
|
|
abort ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void
|
2002-02-01 18:16:02 +00:00
|
|
|
|
convert_from_eh_region_ranges ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int *stack;
|
|
|
|
|
rtx insns;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
collect_eh_region_array ();
|
|
|
|
|
resolve_fixup_regions ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
|
|
|
|
|
insns = get_insns ();
|
|
|
|
|
convert_from_eh_region_ranges_1 (&insns, stack, 0);
|
|
|
|
|
free (stack);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
remove_fixup_regions ();
|
|
|
|
|
remove_unreachable_regions (insns);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
static void
|
|
|
|
|
add_ehl_entry (label, region)
|
|
|
|
|
rtx label;
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
{
|
|
|
|
|
struct ehl_map_entry **slot, *entry;
|
|
|
|
|
|
|
|
|
|
LABEL_PRESERVE_P (label) = 1;
|
|
|
|
|
|
|
|
|
|
entry = (struct ehl_map_entry *) xmalloc (sizeof (*entry));
|
|
|
|
|
entry->label = label;
|
|
|
|
|
entry->region = region;
|
|
|
|
|
|
|
|
|
|
slot = (struct ehl_map_entry **)
|
|
|
|
|
htab_find_slot (exception_handler_label_map, entry, INSERT);
|
|
|
|
|
|
|
|
|
|
/* Before landing pad creation, each exception handler has its own
|
|
|
|
|
label. After landing pad creation, the exception handlers may
|
|
|
|
|
share landing pads. This is ok, since maybe_remove_eh_handler
|
|
|
|
|
only requires the 1-1 mapping before landing pad creation. */
|
|
|
|
|
if (*slot && !cfun->eh->built_landing_pads)
|
|
|
|
|
abort ();
|
|
|
|
|
|
|
|
|
|
*slot = entry;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
ehl_free (pentry)
|
|
|
|
|
PTR pentry;
|
|
|
|
|
{
|
|
|
|
|
struct ehl_map_entry *entry = (struct ehl_map_entry *)pentry;
|
|
|
|
|
LABEL_PRESERVE_P (entry->label) = 0;
|
|
|
|
|
free (entry);
|
|
|
|
|
}
|
|
|
|
|
|
1999-08-26 09:30:50 +00:00
|
|
|
|
void
|
2002-02-01 18:16:02 +00:00
|
|
|
|
find_exception_handler_labels ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
if (exception_handler_label_map)
|
|
|
|
|
htab_empty (exception_handler_label_map);
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
/* ??? The expansion factor here (3/2) must be greater than the htab
|
|
|
|
|
occupancy factor (4/3) to avoid unnecessary resizing. */
|
|
|
|
|
exception_handler_label_map
|
|
|
|
|
= htab_create (cfun->eh->last_region_number * 3 / 2,
|
|
|
|
|
ehl_hash, ehl_eq, ehl_free);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (cfun->eh->region_tree == NULL)
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region = cfun->eh->region_array[i];
|
|
|
|
|
rtx lab;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! region || region->region_number != i)
|
|
|
|
|
continue;
|
|
|
|
|
if (cfun->eh->built_landing_pads)
|
|
|
|
|
lab = region->landing_pad;
|
|
|
|
|
else
|
|
|
|
|
lab = region->label;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (lab)
|
2002-05-09 20:02:13 +00:00
|
|
|
|
add_ehl_entry (lab, region);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* For sjlj exceptions, need the return label to remain live until
|
|
|
|
|
after landing pad generation. */
|
|
|
|
|
if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
|
2002-05-09 20:02:13 +00:00
|
|
|
|
add_ehl_entry (return_label, NULL);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
bool
|
|
|
|
|
current_function_has_exception_handlers ()
|
|
|
|
|
{
|
|
|
|
|
int i;
|
|
|
|
|
|
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *region = cfun->eh->region_array[i];
|
|
|
|
|
|
|
|
|
|
if (! region || region->region_number != i)
|
|
|
|
|
continue;
|
|
|
|
|
if (region->type != ERT_THROW)
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
static struct eh_region *
|
|
|
|
|
duplicate_eh_region_1 (o, map)
|
|
|
|
|
struct eh_region *o;
|
|
|
|
|
struct inline_remap *map;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *n
|
|
|
|
|
= (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
n->region_number = o->region_number + cfun->eh->last_region_number;
|
|
|
|
|
n->type = o->type;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
switch (n->type)
|
|
|
|
|
{
|
|
|
|
|
case ERT_CLEANUP:
|
|
|
|
|
case ERT_MUST_NOT_THROW:
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_TRY:
|
|
|
|
|
if (o->u.try.continue_label)
|
|
|
|
|
n->u.try.continue_label
|
|
|
|
|
= get_label_from_map (map,
|
|
|
|
|
CODE_LABEL_NUMBER (o->u.try.continue_label));
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_CATCH:
|
|
|
|
|
n->u.catch.type_list = o->u.catch.type_list;
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_ALLOWED_EXCEPTIONS:
|
|
|
|
|
n->u.allowed.type_list = o->u.allowed.type_list;
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
case ERT_THROW:
|
|
|
|
|
n->u.throw.type = o->u.throw.type;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
default:
|
|
|
|
|
abort ();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (o->label)
|
|
|
|
|
n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
|
|
|
|
|
if (o->resume)
|
|
|
|
|
{
|
|
|
|
|
n->resume = map->insn_map[INSN_UID (o->resume)];
|
|
|
|
|
if (n->resume == NULL)
|
|
|
|
|
abort ();
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return n;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
duplicate_eh_region_2 (o, n_array)
|
|
|
|
|
struct eh_region *o;
|
|
|
|
|
struct eh_region **n_array;
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *n = n_array[o->region_number];
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
switch (n->type)
|
|
|
|
|
{
|
|
|
|
|
case ERT_TRY:
|
|
|
|
|
n->u.try.catch = n_array[o->u.try.catch->region_number];
|
|
|
|
|
n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_CATCH:
|
|
|
|
|
if (o->u.catch.next_catch)
|
|
|
|
|
n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
|
|
|
|
|
if (o->u.catch.prev_catch)
|
|
|
|
|
n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (o->outer)
|
|
|
|
|
n->outer = n_array[o->outer->region_number];
|
|
|
|
|
if (o->inner)
|
|
|
|
|
n->inner = n_array[o->inner->region_number];
|
|
|
|
|
if (o->next_peer)
|
|
|
|
|
n->next_peer = n_array[o->next_peer->region_number];
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int
|
|
|
|
|
duplicate_eh_regions (ifun, map)
|
|
|
|
|
struct function *ifun;
|
|
|
|
|
struct inline_remap *map;
|
|
|
|
|
{
|
|
|
|
|
int ifun_last_region_number = ifun->eh->last_region_number;
|
|
|
|
|
struct eh_region **n_array, *root, *cur;
|
|
|
|
|
int i;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (ifun_last_region_number == 0)
|
|
|
|
|
return 0;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = 1; i <= ifun_last_region_number; ++i)
|
|
|
|
|
{
|
|
|
|
|
cur = ifun->eh->region_array[i];
|
|
|
|
|
if (!cur || cur->region_number != i)
|
|
|
|
|
continue;
|
|
|
|
|
n_array[i] = duplicate_eh_region_1 (cur, map);
|
|
|
|
|
}
|
|
|
|
|
for (i = 1; i <= ifun_last_region_number; ++i)
|
|
|
|
|
{
|
|
|
|
|
cur = ifun->eh->region_array[i];
|
|
|
|
|
if (!cur || cur->region_number != i)
|
|
|
|
|
continue;
|
|
|
|
|
duplicate_eh_region_2 (cur, n_array);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
root = n_array[ifun->eh->region_tree->region_number];
|
|
|
|
|
cur = cfun->eh->cur_region;
|
|
|
|
|
if (cur)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *p = cur->inner;
|
|
|
|
|
if (p)
|
|
|
|
|
{
|
|
|
|
|
while (p->next_peer)
|
|
|
|
|
p = p->next_peer;
|
|
|
|
|
p->next_peer = root;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
cur->inner = root;
|
|
|
|
|
|
|
|
|
|
for (i = 1; i <= ifun_last_region_number; ++i)
|
|
|
|
|
if (n_array[i] && n_array[i]->outer == NULL)
|
|
|
|
|
n_array[i]->outer = cur;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *p = cfun->eh->region_tree;
|
|
|
|
|
if (p)
|
|
|
|
|
{
|
|
|
|
|
while (p->next_peer)
|
|
|
|
|
p = p->next_peer;
|
|
|
|
|
p->next_peer = root;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
cfun->eh->region_tree = root;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
free (n_array);
|
|
|
|
|
|
|
|
|
|
i = cfun->eh->last_region_number;
|
|
|
|
|
cfun->eh->last_region_number = i + ifun_last_region_number;
|
|
|
|
|
return i;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
static int
|
|
|
|
|
t2r_eq (pentry, pdata)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
const PTR pdata;
|
|
|
|
|
{
|
|
|
|
|
tree entry = (tree) pentry;
|
|
|
|
|
tree data = (tree) pdata;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return TREE_PURPOSE (entry) == data;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static hashval_t
|
|
|
|
|
t2r_hash (pentry)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
{
|
|
|
|
|
tree entry = (tree) pentry;
|
|
|
|
|
return TYPE_HASH (TREE_PURPOSE (entry));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int
|
|
|
|
|
t2r_mark_1 (slot, data)
|
|
|
|
|
PTR *slot;
|
|
|
|
|
PTR data ATTRIBUTE_UNUSED;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
tree contents = (tree) *slot;
|
|
|
|
|
ggc_mark_tree (contents);
|
|
|
|
|
return 1;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
t2r_mark (addr)
|
|
|
|
|
PTR addr;
|
|
|
|
|
{
|
|
|
|
|
htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
add_type_for_runtime (type)
|
|
|
|
|
tree type;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
tree *slot;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
|
|
|
|
|
TYPE_HASH (type), INSERT);
|
|
|
|
|
if (*slot == NULL)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
tree runtime = (*lang_eh_runtime_type) (type);
|
|
|
|
|
*slot = tree_cons (type, runtime, NULL_TREE);
|
|
|
|
|
}
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static tree
|
|
|
|
|
lookup_type_for_runtime (type)
|
|
|
|
|
tree type;
|
|
|
|
|
{
|
|
|
|
|
tree *slot;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
|
|
|
|
|
TYPE_HASH (type), NO_INSERT);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* We should have always inserted the data earlier. */
|
|
|
|
|
return TREE_VALUE (*slot);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
/* Represent an entry in @TTypes for either catch actions
|
|
|
|
|
or exception filter actions. */
|
|
|
|
|
struct ttypes_filter
|
|
|
|
|
{
|
|
|
|
|
tree t;
|
|
|
|
|
int filter;
|
|
|
|
|
};
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
|
|
|
|
|
(a tree) for a @TTypes type node we are thinking about adding. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static int
|
|
|
|
|
ttypes_filter_eq (pentry, pdata)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
const PTR pdata;
|
|
|
|
|
{
|
|
|
|
|
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
|
|
|
|
|
tree data = (tree) pdata;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return entry->t == data;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static hashval_t
|
|
|
|
|
ttypes_filter_hash (pentry)
|
|
|
|
|
const PTR pentry;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
|
|
|
|
|
return TYPE_HASH (entry->t);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
|
|
|
|
|
exception specification list we are thinking about adding. */
|
|
|
|
|
/* ??? Currently we use the type lists in the order given. Someone
|
|
|
|
|
should put these in some canonical order. */
|
|
|
|
|
|
|
|
|
|
static int
|
|
|
|
|
ehspec_filter_eq (pentry, pdata)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
const PTR pdata;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
|
|
|
|
|
const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return type_list_equal (entry->t, data->t);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Hash function for exception specification lists. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static hashval_t
|
|
|
|
|
ehspec_filter_hash (pentry)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
{
|
|
|
|
|
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
|
|
|
|
|
hashval_t h = 0;
|
|
|
|
|
tree list;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (list = entry->t; list ; list = TREE_CHAIN (list))
|
|
|
|
|
h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
|
|
|
|
|
return h;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
|
|
|
|
|
up the search. Return the filter value to be used. */
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static int
|
|
|
|
|
add_ttypes_entry (ttypes_hash, type)
|
|
|
|
|
htab_t ttypes_hash;
|
|
|
|
|
tree type;
|
|
|
|
|
{
|
|
|
|
|
struct ttypes_filter **slot, *n;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
slot = (struct ttypes_filter **)
|
|
|
|
|
htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if ((n = *slot) == NULL)
|
1999-10-16 06:09:09 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Filter value is a 1 based table index. */
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
n = (struct ttypes_filter *) xmalloc (sizeof (*n));
|
|
|
|
|
n->t = type;
|
|
|
|
|
n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
|
|
|
|
|
*slot = n;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
return n->filter;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
|
|
|
|
|
to speed up the search. Return the filter value to be used. */
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static int
|
|
|
|
|
add_ehspec_entry (ehspec_hash, ttypes_hash, list)
|
|
|
|
|
htab_t ehspec_hash;
|
|
|
|
|
htab_t ttypes_hash;
|
|
|
|
|
tree list;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct ttypes_filter **slot, *n;
|
|
|
|
|
struct ttypes_filter dummy;
|
|
|
|
|
|
|
|
|
|
dummy.t = list;
|
|
|
|
|
slot = (struct ttypes_filter **)
|
|
|
|
|
htab_find_slot (ehspec_hash, &dummy, INSERT);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if ((n = *slot) == NULL)
|
1999-10-16 06:09:09 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Filter value is a -1 based byte index into a uleb128 buffer. */
|
|
|
|
|
|
|
|
|
|
n = (struct ttypes_filter *) xmalloc (sizeof (*n));
|
|
|
|
|
n->t = list;
|
|
|
|
|
n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
|
|
|
|
|
*slot = n;
|
|
|
|
|
|
|
|
|
|
/* Look up each type in the list and encode its filter
|
|
|
|
|
value as a uleb128. Terminate the list with 0. */
|
|
|
|
|
for (; list ; list = TREE_CHAIN (list))
|
|
|
|
|
push_uleb128 (&cfun->eh->ehspec_data,
|
|
|
|
|
add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
|
|
|
|
|
VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return n->filter;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Generate the action filter values to be used for CATCH and
|
|
|
|
|
ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
|
|
|
|
|
we use lots of landing pads, and so every type or list can share
|
|
|
|
|
the same filter value, which saves table space. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
assign_filter_values ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i;
|
|
|
|
|
htab_t ttypes, ehspec;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
|
|
|
|
|
VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
|
|
|
|
|
ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *r = cfun->eh->region_array[i];
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Mind we don't process a region more than once. */
|
|
|
|
|
if (!r || r->region_number != i)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
switch (r->type)
|
|
|
|
|
{
|
|
|
|
|
case ERT_CATCH:
|
|
|
|
|
/* Whatever type_list is (NULL or true list), we build a list
|
|
|
|
|
of filters for the region. */
|
|
|
|
|
r->u.catch.filter_list = NULL_TREE;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (r->u.catch.type_list != NULL)
|
|
|
|
|
{
|
|
|
|
|
/* Get a filter value for each of the types caught and store
|
|
|
|
|
them in the region's dedicated list. */
|
|
|
|
|
tree tp_node = r->u.catch.type_list;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (;tp_node; tp_node = TREE_CHAIN (tp_node))
|
|
|
|
|
{
|
|
|
|
|
int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
|
|
|
|
|
tree flt_node = build_int_2 (flt, 0);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
r->u.catch.filter_list
|
|
|
|
|
= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
/* Get a filter value for the NULL list also since it will need
|
|
|
|
|
an action record anyway. */
|
|
|
|
|
int flt = add_ttypes_entry (ttypes, NULL);
|
|
|
|
|
tree flt_node = build_int_2 (flt, 0);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
r->u.catch.filter_list
|
|
|
|
|
= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_ALLOWED_EXCEPTIONS:
|
|
|
|
|
r->u.allowed.filter
|
|
|
|
|
= add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
htab_delete (ttypes);
|
|
|
|
|
htab_delete (ehspec);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
build_post_landing_pads ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region = cfun->eh->region_array[i];
|
|
|
|
|
rtx seq;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Mind we don't process a region more than once. */
|
|
|
|
|
if (!region || region->region_number != i)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
switch (region->type)
|
|
|
|
|
{
|
|
|
|
|
case ERT_TRY:
|
|
|
|
|
/* ??? Collect the set of all non-overlapping catch handlers
|
|
|
|
|
all the way up the chain until blocked by a cleanup. */
|
|
|
|
|
/* ??? Outer try regions can share landing pads with inner
|
|
|
|
|
try regions if the types are completely non-overlapping,
|
|
|
|
|
and there are no intervening cleanups. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
region->post_landing_pad = gen_label_rtx ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
start_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (region->post_landing_pad);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* ??? It is mighty inconvenient to call back into the
|
|
|
|
|
switch statement generation code in expand_end_case.
|
|
|
|
|
Rapid prototyping sez a sequence of ifs. */
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *c;
|
|
|
|
|
for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
|
|
|
|
|
{
|
|
|
|
|
/* ??? _Unwind_ForcedUnwind wants no match here. */
|
|
|
|
|
if (c->u.catch.type_list == NULL)
|
|
|
|
|
emit_jump (c->label);
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
/* Need for one cmp/jump per type caught. Each type
|
|
|
|
|
list entry has a matching entry in the filter list
|
|
|
|
|
(see assign_filter_values). */
|
|
|
|
|
tree tp_node = c->u.catch.type_list;
|
|
|
|
|
tree flt_node = c->u.catch.filter_list;
|
|
|
|
|
|
|
|
|
|
for (; tp_node; )
|
|
|
|
|
{
|
|
|
|
|
emit_cmp_and_jump_insns
|
|
|
|
|
(cfun->eh->filter,
|
|
|
|
|
GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
|
|
|
|
|
EQ, NULL_RTX, word_mode, 0, c->label);
|
|
|
|
|
|
|
|
|
|
tp_node = TREE_CHAIN (tp_node);
|
|
|
|
|
flt_node = TREE_CHAIN (flt_node);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* We delay the generation of the _Unwind_Resume until we generate
|
|
|
|
|
landing pads. We emit a marker here so as to get good control
|
|
|
|
|
flow data in the meantime. */
|
|
|
|
|
region->resume
|
|
|
|
|
= emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
|
|
|
|
|
emit_barrier ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
seq = get_insns ();
|
|
|
|
|
end_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_insns_before (seq, region->u.try.catch->label);
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_ALLOWED_EXCEPTIONS:
|
|
|
|
|
region->post_landing_pad = gen_label_rtx ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
start_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (region->post_landing_pad);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_cmp_and_jump_insns (cfun->eh->filter,
|
|
|
|
|
GEN_INT (region->u.allowed.filter),
|
|
|
|
|
EQ, NULL_RTX, word_mode, 0, region->label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* We delay the generation of the _Unwind_Resume until we generate
|
|
|
|
|
landing pads. We emit a marker here so as to get good control
|
|
|
|
|
flow data in the meantime. */
|
|
|
|
|
region->resume
|
|
|
|
|
= emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
|
|
|
|
|
emit_barrier ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
seq = get_insns ();
|
|
|
|
|
end_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_insns_before (seq, region->label);
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_CLEANUP:
|
|
|
|
|
case ERT_MUST_NOT_THROW:
|
|
|
|
|
region->post_landing_pad = region->label;
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_CATCH:
|
|
|
|
|
case ERT_THROW:
|
|
|
|
|
/* Nothing to do. */
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
default:
|
|
|
|
|
abort ();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Replace RESX patterns with jumps to the next handler if any, or calls to
|
|
|
|
|
_Unwind_Resume otherwise. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
connect_post_landing_pads ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i;
|
|
|
|
|
|
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region = cfun->eh->region_array[i];
|
|
|
|
|
struct eh_region *outer;
|
|
|
|
|
rtx seq;
|
|
|
|
|
|
|
|
|
|
/* Mind we don't process a region more than once. */
|
|
|
|
|
if (!region || region->region_number != i)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
/* If there is no RESX, or it has been deleted by flow, there's
|
|
|
|
|
nothing to fix up. */
|
|
|
|
|
if (! region->resume || INSN_DELETED_P (region->resume))
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
/* Search for another landing pad in this function. */
|
|
|
|
|
for (outer = region->outer; outer ; outer = outer->outer)
|
|
|
|
|
if (outer->post_landing_pad)
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
start_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (outer)
|
|
|
|
|
emit_jump (outer->post_landing_pad);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
else
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_library_call (unwind_resume_libfunc, LCT_THROW,
|
|
|
|
|
VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
|
|
|
|
|
|
|
|
|
|
seq = get_insns ();
|
|
|
|
|
end_sequence ();
|
|
|
|
|
emit_insns_before (seq, region->resume);
|
|
|
|
|
delete_insn (region->resume);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
dw2_build_landing_pads ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int i;
|
|
|
|
|
unsigned int j;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *region = cfun->eh->region_array[i];
|
|
|
|
|
rtx seq;
|
|
|
|
|
bool clobbers_hard_regs = false;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Mind we don't process a region more than once. */
|
|
|
|
|
if (!region || region->region_number != i)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (region->type != ERT_CLEANUP
|
|
|
|
|
&& region->type != ERT_TRY
|
|
|
|
|
&& region->type != ERT_ALLOWED_EXCEPTIONS)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
start_sequence ();
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
region->landing_pad = gen_label_rtx ();
|
|
|
|
|
emit_label (region->landing_pad);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#ifdef HAVE_exception_receiver
|
|
|
|
|
if (HAVE_exception_receiver)
|
|
|
|
|
emit_insn (gen_exception_receiver ());
|
1999-10-16 06:09:09 +00:00
|
|
|
|
else
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#endif
|
|
|
|
|
#ifdef HAVE_nonlocal_goto_receiver
|
|
|
|
|
if (HAVE_nonlocal_goto_receiver)
|
|
|
|
|
emit_insn (gen_nonlocal_goto_receiver ());
|
|
|
|
|
else
|
|
|
|
|
#endif
|
|
|
|
|
{ /* Nothing */ }
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* If the eh_return data registers are call-saved, then we
|
|
|
|
|
won't have considered them clobbered from the call that
|
|
|
|
|
threw. Kill them now. */
|
|
|
|
|
for (j = 0; ; ++j)
|
|
|
|
|
{
|
|
|
|
|
unsigned r = EH_RETURN_DATA_REGNO (j);
|
|
|
|
|
if (r == INVALID_REGNUM)
|
|
|
|
|
break;
|
|
|
|
|
if (! call_used_regs[r])
|
|
|
|
|
{
|
|
|
|
|
emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
|
|
|
|
|
clobbers_hard_regs = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (clobbers_hard_regs)
|
|
|
|
|
{
|
|
|
|
|
/* @@@ This is a kludge. Not all machine descriptions define a
|
|
|
|
|
blockage insn, but we must not allow the code we just generated
|
|
|
|
|
to be reordered by scheduling. So emit an ASM_INPUT to act as
|
|
|
|
|
blockage insn. */
|
|
|
|
|
emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_move_insn (cfun->eh->exc_ptr,
|
|
|
|
|
gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
|
|
|
|
|
emit_move_insn (cfun->eh->filter,
|
|
|
|
|
gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
seq = get_insns ();
|
|
|
|
|
end_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_insns_before (seq, region->post_landing_pad);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
struct sjlj_lp_info
|
|
|
|
|
{
|
|
|
|
|
int directly_reachable;
|
|
|
|
|
int action_index;
|
|
|
|
|
int dispatch_index;
|
|
|
|
|
int call_site_index;
|
|
|
|
|
};
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static bool
|
|
|
|
|
sjlj_find_directly_reachable_regions (lp_info)
|
|
|
|
|
struct sjlj_lp_info *lp_info;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx insn;
|
|
|
|
|
bool found_one = false;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
enum reachable_code rc;
|
|
|
|
|
tree type_thrown;
|
|
|
|
|
rtx note;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! INSN_P (insn))
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
|
|
|
|
|
if (!note || INTVAL (XEXP (note, 0)) <= 0)
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
type_thrown = NULL_TREE;
|
|
|
|
|
if (region->type == ERT_THROW)
|
|
|
|
|
{
|
|
|
|
|
type_thrown = region->u.throw.type;
|
|
|
|
|
region = region->outer;
|
|
|
|
|
}
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Find the first containing region that might handle the exception.
|
|
|
|
|
That's the landing pad to which we will transfer control. */
|
|
|
|
|
rc = RNL_NOT_CAUGHT;
|
|
|
|
|
for (; region; region = region->outer)
|
|
|
|
|
{
|
|
|
|
|
rc = reachable_next_level (region, type_thrown, 0);
|
|
|
|
|
if (rc != RNL_NOT_CAUGHT)
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
|
|
|
|
|
{
|
|
|
|
|
lp_info[region->region_number].directly_reachable = 1;
|
|
|
|
|
found_one = true;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return found_one;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
sjlj_assign_call_site_values (dispatch_label, lp_info)
|
|
|
|
|
rtx dispatch_label;
|
|
|
|
|
struct sjlj_lp_info *lp_info;
|
|
|
|
|
{
|
|
|
|
|
htab_t ar_hash;
|
|
|
|
|
int i, index;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* First task: build the action table. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
|
|
|
|
|
ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
|
|
|
|
if (lp_info[i].directly_reachable)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *r = cfun->eh->region_array[i];
|
|
|
|
|
r->landing_pad = dispatch_label;
|
|
|
|
|
lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
|
|
|
|
|
if (lp_info[i].action_index != -1)
|
|
|
|
|
cfun->uses_eh_lsda = 1;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
htab_delete (ar_hash);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Next: assign dispatch values. In dwarf2 terms, this would be the
|
|
|
|
|
landing pad label for the region. For sjlj though, there is one
|
|
|
|
|
common landing pad from which we dispatch to the post-landing pads.
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
A region receives a dispatch index if it is directly reachable
|
|
|
|
|
and requires in-function processing. Regions that share post-landing
|
|
|
|
|
pads may share dispatch indices. */
|
|
|
|
|
/* ??? Post-landing pad sharing doesn't actually happen at the moment
|
|
|
|
|
(see build_post_landing_pads) so we don't bother checking for it. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
index = 0;
|
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
|
|
|
|
if (lp_info[i].directly_reachable)
|
|
|
|
|
lp_info[i].dispatch_index = index++;
|
|
|
|
|
|
|
|
|
|
/* Finally: assign call-site values. If dwarf2 terms, this would be
|
|
|
|
|
the region number assigned by convert_to_eh_region_ranges, but
|
|
|
|
|
handles no-action and must-not-throw differently. */
|
|
|
|
|
|
|
|
|
|
call_site_base = 1;
|
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
|
|
|
|
if (lp_info[i].directly_reachable)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int action = lp_info[i].action_index;
|
|
|
|
|
|
|
|
|
|
/* Map must-not-throw to otherwise unused call-site index 0. */
|
|
|
|
|
if (action == -2)
|
|
|
|
|
index = 0;
|
|
|
|
|
/* Map no-action to otherwise unused call-site index -1. */
|
|
|
|
|
else if (action == -1)
|
|
|
|
|
index = -1;
|
|
|
|
|
/* Otherwise, look it up in the table. */
|
|
|
|
|
else
|
|
|
|
|
index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
|
|
|
|
|
|
|
|
|
|
lp_info[i].call_site_index = index;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
sjlj_mark_call_sites (lp_info)
|
|
|
|
|
struct sjlj_lp_info *lp_info;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int last_call_site = -2;
|
|
|
|
|
rtx insn, mem;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
int this_call_site;
|
|
|
|
|
rtx note, before, p;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Reset value tracking at extended basic block boundaries. */
|
|
|
|
|
if (GET_CODE (insn) == CODE_LABEL)
|
|
|
|
|
last_call_site = -2;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! INSN_P (insn))
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
|
|
|
|
|
if (!note)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Calls (and trapping insns) without notes are outside any
|
|
|
|
|
exception handling region in this function. Mark them as
|
|
|
|
|
no action. */
|
|
|
|
|
if (GET_CODE (insn) == CALL_INSN
|
|
|
|
|
|| (flag_non_call_exceptions
|
|
|
|
|
&& may_trap_p (PATTERN (insn))))
|
|
|
|
|
this_call_site = -1;
|
|
|
|
|
else
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
/* Calls that are known to not throw need not be marked. */
|
|
|
|
|
if (INTVAL (XEXP (note, 0)) <= 0)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
|
|
|
|
|
this_call_site = lp_info[region->region_number].call_site_index;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (this_call_site == last_call_site)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
/* Don't separate a call from it's argument loads. */
|
|
|
|
|
before = insn;
|
|
|
|
|
if (GET_CODE (insn) == CALL_INSN)
|
|
|
|
|
before = find_first_parameter_load (insn, NULL_RTX);
|
|
|
|
|
|
|
|
|
|
start_sequence ();
|
|
|
|
|
mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
|
|
|
|
|
sjlj_fc_call_site_ofs);
|
|
|
|
|
emit_move_insn (mem, GEN_INT (this_call_site));
|
|
|
|
|
p = get_insns ();
|
|
|
|
|
end_sequence ();
|
|
|
|
|
|
|
|
|
|
emit_insns_before (p, before);
|
|
|
|
|
last_call_site = this_call_site;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Construct the SjLj_Function_Context. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
sjlj_emit_function_enter (dispatch_label)
|
|
|
|
|
rtx dispatch_label;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx fn_begin, fc, mem, seq;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
fc = cfun->eh->sjlj_fc;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
start_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* We're storing this libcall's address into memory instead of
|
|
|
|
|
calling it directly. Thus, we must call assemble_external_libcall
|
|
|
|
|
here, as we can not depend on emit_library_call to do it for us. */
|
|
|
|
|
assemble_external_libcall (eh_personality_libfunc);
|
|
|
|
|
mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
|
|
|
|
|
emit_move_insn (mem, eh_personality_libfunc);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
|
|
|
|
|
if (cfun->uses_eh_lsda)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
char buf[20];
|
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
|
|
|
|
|
emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
else
|
|
|
|
|
emit_move_insn (mem, const0_rtx);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#ifdef DONT_USE_BUILTIN_SETJMP
|
|
|
|
|
{
|
|
|
|
|
rtx x, note;
|
|
|
|
|
x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
|
|
|
|
|
TYPE_MODE (integer_type_node), 1,
|
|
|
|
|
plus_constant (XEXP (fc, 0),
|
|
|
|
|
sjlj_fc_jbuf_ofs), Pmode);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
|
|
|
|
|
NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
|
|
|
|
|
TYPE_MODE (integer_type_node), 0, dispatch_label);
|
|
|
|
|
}
|
|
|
|
|
#else
|
|
|
|
|
expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
|
|
|
|
|
dispatch_label);
|
|
|
|
|
#endif
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
|
|
|
|
|
1, XEXP (fc, 0), Pmode);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
seq = get_insns ();
|
|
|
|
|
end_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* ??? Instead of doing this at the beginning of the function,
|
|
|
|
|
do this in a block that is at loop level 0 and dominates all
|
|
|
|
|
can_throw_internal instructions. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
|
|
|
|
|
if (GET_CODE (fn_begin) == NOTE
|
|
|
|
|
&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
|
|
|
|
|
break;
|
|
|
|
|
emit_insns_after (seq, fn_begin);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Call back from expand_function_end to know where we should put
|
|
|
|
|
the call to unwind_sjlj_unregister_libfunc if needed. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
void
|
2002-02-01 18:16:02 +00:00
|
|
|
|
sjlj_emit_function_exit_after (after)
|
|
|
|
|
rtx after;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
cfun->eh->sjlj_exit_after = after;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
sjlj_emit_function_exit ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx seq;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
start_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
|
|
|
|
|
1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
seq = get_insns ();
|
|
|
|
|
end_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* ??? Really this can be done in any block at loop level 0 that
|
|
|
|
|
post-dominates all can_throw_internal instructions. This is
|
|
|
|
|
the last possible moment. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_insns_after (seq, cfun->eh->sjlj_exit_after);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
sjlj_emit_dispatch_table (dispatch_label, lp_info)
|
|
|
|
|
rtx dispatch_label;
|
|
|
|
|
struct sjlj_lp_info *lp_info;
|
|
|
|
|
{
|
|
|
|
|
int i, first_reachable;
|
|
|
|
|
rtx mem, dispatch, seq, fc;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
fc = cfun->eh->sjlj_fc;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
start_sequence ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (dispatch_label);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#ifndef DONT_USE_BUILTIN_SETJMP
|
|
|
|
|
expand_builtin_setjmp_receiver (dispatch_label);
|
|
|
|
|
#endif
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Load up dispatch index, exc_ptr and filter values from the
|
|
|
|
|
function context. */
|
|
|
|
|
mem = adjust_address (fc, TYPE_MODE (integer_type_node),
|
|
|
|
|
sjlj_fc_call_site_ofs);
|
|
|
|
|
dispatch = copy_to_reg (mem);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
|
|
|
|
|
if (word_mode != Pmode)
|
|
|
|
|
{
|
|
|
|
|
#ifdef POINTERS_EXTEND_UNSIGNED
|
|
|
|
|
mem = convert_memory_address (Pmode, mem);
|
|
|
|
|
#else
|
|
|
|
|
mem = convert_to_mode (Pmode, mem, 0);
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
emit_move_insn (cfun->eh->exc_ptr, mem);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
|
|
|
|
|
emit_move_insn (cfun->eh->filter, mem);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Jump to one of the directly reachable regions. */
|
|
|
|
|
/* ??? This really ought to be using a switch statement. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
first_reachable = 0;
|
|
|
|
|
for (i = cfun->eh->last_region_number; i > 0; --i)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! lp_info[i].directly_reachable)
|
|
|
|
|
continue;
|
|
|
|
|
|
|
|
|
|
if (! first_reachable)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
first_reachable = i;
|
|
|
|
|
continue;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
|
|
|
|
|
EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
|
|
|
|
|
cfun->eh->region_array[i]->post_landing_pad);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
seq = get_insns ();
|
|
|
|
|
end_sequence ();
|
|
|
|
|
|
|
|
|
|
emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
|
|
|
|
|
->post_landing_pad));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
sjlj_build_landing_pads ()
|
|
|
|
|
{
|
|
|
|
|
struct sjlj_lp_info *lp_info;
|
|
|
|
|
|
|
|
|
|
lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
|
|
|
|
|
sizeof (struct sjlj_lp_info));
|
|
|
|
|
|
|
|
|
|
if (sjlj_find_directly_reachable_regions (lp_info))
|
|
|
|
|
{
|
|
|
|
|
rtx dispatch_label = gen_label_rtx ();
|
|
|
|
|
|
|
|
|
|
cfun->eh->sjlj_fc
|
|
|
|
|
= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
|
|
|
|
|
int_size_in_bytes (sjlj_fc_type_node),
|
|
|
|
|
TYPE_ALIGN (sjlj_fc_type_node));
|
|
|
|
|
|
|
|
|
|
sjlj_assign_call_site_values (dispatch_label, lp_info);
|
|
|
|
|
sjlj_mark_call_sites (lp_info);
|
|
|
|
|
|
|
|
|
|
sjlj_emit_function_enter (dispatch_label);
|
|
|
|
|
sjlj_emit_dispatch_table (dispatch_label, lp_info);
|
|
|
|
|
sjlj_emit_function_exit ();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
free (lp_info);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
finish_eh_generation ()
|
|
|
|
|
{
|
|
|
|
|
/* Nothing to do if no regions created. */
|
|
|
|
|
if (cfun->eh->region_tree == NULL)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
/* The object here is to provide find_basic_blocks with detailed
|
|
|
|
|
information (via reachable_handlers) on how exception control
|
|
|
|
|
flows within the function. In this first pass, we can include
|
|
|
|
|
type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
|
|
|
|
|
regions, and hope that it will be useful in deleting unreachable
|
|
|
|
|
handlers. Subsequently, we will generate landing pads which will
|
|
|
|
|
connect many of the handlers, and then type information will not
|
|
|
|
|
be effective. Still, this is a win over previous implementations. */
|
|
|
|
|
|
|
|
|
|
rebuild_jump_labels (get_insns ());
|
|
|
|
|
find_basic_blocks (get_insns (), max_reg_num (), 0);
|
|
|
|
|
cleanup_cfg (CLEANUP_PRE_LOOP);
|
|
|
|
|
|
|
|
|
|
/* These registers are used by the landing pads. Make sure they
|
|
|
|
|
have been generated. */
|
|
|
|
|
get_exception_pointer (cfun);
|
|
|
|
|
get_exception_filter (cfun);
|
|
|
|
|
|
|
|
|
|
/* Construct the landing pads. */
|
|
|
|
|
|
|
|
|
|
assign_filter_values ();
|
|
|
|
|
build_post_landing_pads ();
|
|
|
|
|
connect_post_landing_pads ();
|
|
|
|
|
if (USING_SJLJ_EXCEPTIONS)
|
|
|
|
|
sjlj_build_landing_pads ();
|
|
|
|
|
else
|
|
|
|
|
dw2_build_landing_pads ();
|
|
|
|
|
|
|
|
|
|
cfun->eh->built_landing_pads = 1;
|
|
|
|
|
|
|
|
|
|
/* We've totally changed the CFG. Start over. */
|
|
|
|
|
find_exception_handler_labels ();
|
|
|
|
|
rebuild_jump_labels (get_insns ());
|
|
|
|
|
find_basic_blocks (get_insns (), max_reg_num (), 0);
|
|
|
|
|
cleanup_cfg (CLEANUP_PRE_LOOP);
|
|
|
|
|
}
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
static hashval_t
|
|
|
|
|
ehl_hash (pentry)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
{
|
|
|
|
|
struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
|
|
|
|
|
|
|
|
|
|
/* 2^32 * ((sqrt(5) - 1) / 2) */
|
|
|
|
|
const hashval_t scaled_golden_ratio = 0x9e3779b9;
|
|
|
|
|
return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int
|
|
|
|
|
ehl_eq (pentry, pdata)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
const PTR pdata;
|
|
|
|
|
{
|
|
|
|
|
struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
|
|
|
|
|
struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
|
|
|
|
|
|
|
|
|
|
return entry->label == data->label;
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* This section handles removing dead code for flow. */
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
/* Remove LABEL from exception_handler_label_map. */
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
remove_exception_handler_label (label)
|
|
|
|
|
rtx label;
|
|
|
|
|
{
|
2002-05-09 20:02:13 +00:00
|
|
|
|
struct ehl_map_entry **slot, tmp;
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
/* If exception_handler_label_map was not built yet,
|
2002-02-01 18:16:02 +00:00
|
|
|
|
there is nothing to do. */
|
2002-05-09 20:02:13 +00:00
|
|
|
|
if (exception_handler_label_map == NULL)
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return;
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
tmp.label = label;
|
|
|
|
|
slot = (struct ehl_map_entry **)
|
|
|
|
|
htab_find_slot (exception_handler_label_map, &tmp, NO_INSERT);
|
|
|
|
|
if (! slot)
|
|
|
|
|
abort ();
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
htab_clear_slot (exception_handler_label_map, (void **) slot);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Splice REGION from the region tree etc. */
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
remove_eh_handler (region)
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
{
|
2002-05-09 20:02:13 +00:00
|
|
|
|
struct eh_region **pp, **pp_start, *p, *outer, *inner;
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx lab;
|
|
|
|
|
|
|
|
|
|
/* For the benefit of efficiently handling REG_EH_REGION notes,
|
|
|
|
|
replace this region in the region array with its containing
|
|
|
|
|
region. Note that previous region deletions may result in
|
2002-05-09 20:02:13 +00:00
|
|
|
|
multiple copies of this region in the array, so we have a
|
|
|
|
|
list of alternate numbers by which we are known. */
|
|
|
|
|
|
|
|
|
|
outer = region->outer;
|
|
|
|
|
cfun->eh->region_array[region->region_number] = outer;
|
|
|
|
|
if (region->aka)
|
|
|
|
|
{
|
|
|
|
|
int i;
|
|
|
|
|
EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
|
|
|
|
|
{ cfun->eh->region_array[i] = outer; });
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (outer)
|
|
|
|
|
{
|
|
|
|
|
if (!outer->aka)
|
|
|
|
|
outer->aka = BITMAP_XMALLOC ();
|
|
|
|
|
if (region->aka)
|
|
|
|
|
bitmap_a_or_b (outer->aka, outer->aka, region->aka);
|
|
|
|
|
bitmap_set_bit (outer->aka, region->region_number);
|
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
if (cfun->eh->built_landing_pads)
|
|
|
|
|
lab = region->landing_pad;
|
|
|
|
|
else
|
|
|
|
|
lab = region->label;
|
|
|
|
|
if (lab)
|
|
|
|
|
remove_exception_handler_label (lab);
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
if (outer)
|
|
|
|
|
pp_start = &outer->inner;
|
2002-02-01 18:16:02 +00:00
|
|
|
|
else
|
2002-05-09 20:02:13 +00:00
|
|
|
|
pp_start = &cfun->eh->region_tree;
|
|
|
|
|
for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
|
2002-02-01 18:16:02 +00:00
|
|
|
|
continue;
|
2002-05-09 20:02:13 +00:00
|
|
|
|
*pp = region->next_peer;
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
inner = region->inner;
|
|
|
|
|
if (inner)
|
2002-02-01 18:16:02 +00:00
|
|
|
|
{
|
2002-05-09 20:02:13 +00:00
|
|
|
|
for (p = inner; p->next_peer ; p = p->next_peer)
|
|
|
|
|
p->outer = outer;
|
|
|
|
|
p->outer = outer;
|
|
|
|
|
|
|
|
|
|
p->next_peer = *pp_start;
|
|
|
|
|
*pp_start = inner;
|
2002-02-01 18:16:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (region->type == ERT_CATCH)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *try, *next, *prev;
|
|
|
|
|
|
|
|
|
|
for (try = region->next_peer;
|
|
|
|
|
try->type == ERT_CATCH;
|
|
|
|
|
try = try->next_peer)
|
|
|
|
|
continue;
|
|
|
|
|
if (try->type != ERT_TRY)
|
|
|
|
|
abort ();
|
|
|
|
|
|
|
|
|
|
next = region->u.catch.next_catch;
|
|
|
|
|
prev = region->u.catch.prev_catch;
|
|
|
|
|
|
|
|
|
|
if (next)
|
|
|
|
|
next->u.catch.prev_catch = prev;
|
|
|
|
|
else
|
|
|
|
|
try->u.try.last_catch = prev;
|
|
|
|
|
if (prev)
|
|
|
|
|
prev->u.catch.next_catch = next;
|
|
|
|
|
else
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
try->u.try.catch = next;
|
|
|
|
|
if (! next)
|
|
|
|
|
remove_eh_handler (try);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
free_region (region);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* LABEL heads a basic block that is about to be deleted. If this
|
|
|
|
|
label corresponds to an exception region, we may be able to
|
|
|
|
|
delete the region. */
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
maybe_remove_eh_handler (label)
|
|
|
|
|
rtx label;
|
|
|
|
|
{
|
2002-05-09 20:02:13 +00:00
|
|
|
|
struct ehl_map_entry **slot, tmp;
|
|
|
|
|
struct eh_region *region;
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
/* ??? After generating landing pads, it's not so simple to determine
|
|
|
|
|
if the region data is completely unused. One must examine the
|
|
|
|
|
landing pad and the post landing pad, and whether an inner try block
|
|
|
|
|
is referencing the catch handlers directly. */
|
|
|
|
|
if (cfun->eh->built_landing_pads)
|
|
|
|
|
return;
|
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
tmp.label = label;
|
|
|
|
|
slot = (struct ehl_map_entry **)
|
|
|
|
|
htab_find_slot (exception_handler_label_map, &tmp, NO_INSERT);
|
|
|
|
|
if (! slot)
|
|
|
|
|
return;
|
|
|
|
|
region = (*slot)->region;
|
|
|
|
|
if (! region)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
/* Flow will want to remove MUST_NOT_THROW regions as unreachable
|
|
|
|
|
because there is no path to the fallback call to terminate.
|
|
|
|
|
But the region continues to affect call-site data until there
|
|
|
|
|
are no more contained calls, which we don't see here. */
|
|
|
|
|
if (region->type == ERT_MUST_NOT_THROW)
|
2002-02-01 18:16:02 +00:00
|
|
|
|
{
|
2002-05-09 20:02:13 +00:00
|
|
|
|
htab_clear_slot (exception_handler_label_map, (void **) slot);
|
|
|
|
|
region->label = NULL_RTX;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-05-09 20:02:13 +00:00
|
|
|
|
else
|
|
|
|
|
remove_eh_handler (region);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-05-09 20:02:13 +00:00
|
|
|
|
/* Invokes CALLBACK for every exception handler label. Only used by old
|
|
|
|
|
loop hackery; should not be used by new code. */
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
for_each_eh_label (callback)
|
|
|
|
|
void (*callback) PARAMS ((rtx));
|
|
|
|
|
{
|
|
|
|
|
htab_traverse (exception_handler_label_map, for_each_eh_label_1,
|
|
|
|
|
(void *)callback);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int
|
|
|
|
|
for_each_eh_label_1 (pentry, data)
|
|
|
|
|
PTR *pentry;
|
|
|
|
|
PTR data;
|
|
|
|
|
{
|
|
|
|
|
struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
|
|
|
|
|
void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
|
|
|
|
|
|
|
|
|
|
(*callback) (entry->label);
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
/* This section describes CFG exception edges for flow. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* For communicating between calls to reachable_next_level. */
|
|
|
|
|
struct reachable_info
|
|
|
|
|
{
|
|
|
|
|
tree types_caught;
|
|
|
|
|
tree types_allowed;
|
|
|
|
|
rtx handlers;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
/* A subroutine of reachable_next_level. Return true if TYPE, or a
|
|
|
|
|
base class of TYPE, is in HANDLED. */
|
|
|
|
|
|
|
|
|
|
static int
|
|
|
|
|
check_handled (handled, type)
|
|
|
|
|
tree handled, type;
|
|
|
|
|
{
|
|
|
|
|
tree t;
|
|
|
|
|
|
|
|
|
|
/* We can check for exact matches without front-end help. */
|
|
|
|
|
if (! lang_eh_type_covers)
|
|
|
|
|
{
|
|
|
|
|
for (t = handled; t ; t = TREE_CHAIN (t))
|
|
|
|
|
if (TREE_VALUE (t) == type)
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
for (t = handled; t ; t = TREE_CHAIN (t))
|
|
|
|
|
if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* A subroutine of reachable_next_level. If we are collecting a list
|
|
|
|
|
of handlers, add one. After landing pad generation, reference
|
|
|
|
|
it instead of the handlers themselves. Further, the handlers are
|
|
|
|
|
all wired together, so by referencing one, we've got them all.
|
|
|
|
|
Before landing pad generation we reference each handler individually.
|
|
|
|
|
|
|
|
|
|
LP_REGION contains the landing pad; REGION is the handler. */
|
|
|
|
|
|
|
|
|
|
static void
|
|
|
|
|
add_reachable_handler (info, lp_region, region)
|
|
|
|
|
struct reachable_info *info;
|
|
|
|
|
struct eh_region *lp_region;
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
{
|
|
|
|
|
if (! info)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
if (cfun->eh->built_landing_pads)
|
|
|
|
|
{
|
|
|
|
|
if (! info->handlers)
|
|
|
|
|
info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
else
|
2002-02-01 18:16:02 +00:00
|
|
|
|
info->handlers = alloc_INSN_LIST (region->label, info->handlers);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Process one level of exception regions for reachability.
|
|
|
|
|
If TYPE_THROWN is non-null, then it is the *exact* type being
|
|
|
|
|
propagated. If INFO is non-null, then collect handler labels
|
|
|
|
|
and caught/allowed type information between invocations. */
|
|
|
|
|
|
|
|
|
|
static enum reachable_code
|
|
|
|
|
reachable_next_level (region, type_thrown, info)
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
tree type_thrown;
|
|
|
|
|
struct reachable_info *info;
|
|
|
|
|
{
|
|
|
|
|
switch (region->type)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_CLEANUP:
|
|
|
|
|
/* Before landing-pad generation, we model control flow
|
|
|
|
|
directly to the individual handlers. In this way we can
|
|
|
|
|
see that catch handler types may shadow one another. */
|
|
|
|
|
add_reachable_handler (info, region, region);
|
|
|
|
|
return RNL_MAYBE_CAUGHT;
|
|
|
|
|
|
|
|
|
|
case ERT_TRY:
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *c;
|
|
|
|
|
enum reachable_code ret = RNL_NOT_CAUGHT;
|
|
|
|
|
|
|
|
|
|
for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
|
|
|
|
|
{
|
|
|
|
|
/* A catch-all handler ends the search. */
|
|
|
|
|
/* ??? _Unwind_ForcedUnwind will want outer cleanups
|
|
|
|
|
to be run as well. */
|
|
|
|
|
if (c->u.catch.type_list == NULL)
|
|
|
|
|
{
|
|
|
|
|
add_reachable_handler (info, region, c);
|
|
|
|
|
return RNL_CAUGHT;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (type_thrown)
|
|
|
|
|
{
|
|
|
|
|
/* If we have at least one type match, end the search. */
|
|
|
|
|
tree tp_node = c->u.catch.type_list;
|
|
|
|
|
|
|
|
|
|
for (; tp_node; tp_node = TREE_CHAIN (tp_node))
|
|
|
|
|
{
|
|
|
|
|
tree type = TREE_VALUE (tp_node);
|
|
|
|
|
|
|
|
|
|
if (type == type_thrown
|
|
|
|
|
|| (lang_eh_type_covers
|
|
|
|
|
&& (*lang_eh_type_covers) (type, type_thrown)))
|
|
|
|
|
{
|
|
|
|
|
add_reachable_handler (info, region, c);
|
|
|
|
|
return RNL_CAUGHT;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* If we have definitive information of a match failure,
|
|
|
|
|
the catch won't trigger. */
|
|
|
|
|
if (lang_eh_type_covers)
|
|
|
|
|
return RNL_NOT_CAUGHT;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* At this point, we either don't know what type is thrown or
|
|
|
|
|
don't have front-end assistance to help deciding if it is
|
|
|
|
|
covered by one of the types in the list for this region.
|
|
|
|
|
|
|
|
|
|
We'd then like to add this region to the list of reachable
|
|
|
|
|
handlers since it is indeed potentially reachable based on the
|
|
|
|
|
information we have.
|
|
|
|
|
|
|
|
|
|
Actually, this handler is for sure not reachable if all the
|
|
|
|
|
types it matches have already been caught. That is, it is only
|
|
|
|
|
potentially reachable if at least one of the types it catches
|
|
|
|
|
has not been previously caught. */
|
|
|
|
|
|
|
|
|
|
if (! info)
|
|
|
|
|
ret = RNL_MAYBE_CAUGHT;
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
tree tp_node = c->u.catch.type_list;
|
|
|
|
|
bool maybe_reachable = false;
|
|
|
|
|
|
|
|
|
|
/* Compute the potential reachability of this handler and
|
|
|
|
|
update the list of types caught at the same time. */
|
|
|
|
|
for (; tp_node; tp_node = TREE_CHAIN (tp_node))
|
|
|
|
|
{
|
|
|
|
|
tree type = TREE_VALUE (tp_node);
|
|
|
|
|
|
|
|
|
|
if (! check_handled (info->types_caught, type))
|
|
|
|
|
{
|
|
|
|
|
info->types_caught
|
|
|
|
|
= tree_cons (NULL, type, info->types_caught);
|
|
|
|
|
|
|
|
|
|
maybe_reachable = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (maybe_reachable)
|
|
|
|
|
{
|
|
|
|
|
add_reachable_handler (info, region, c);
|
|
|
|
|
|
|
|
|
|
/* ??? If the catch type is a base class of every allowed
|
|
|
|
|
type, then we know we can stop the search. */
|
|
|
|
|
ret = RNL_MAYBE_CAUGHT;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case ERT_ALLOWED_EXCEPTIONS:
|
|
|
|
|
/* An empty list of types definitely ends the search. */
|
|
|
|
|
if (region->u.allowed.type_list == NULL_TREE)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
add_reachable_handler (info, region, region);
|
|
|
|
|
return RNL_CAUGHT;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Collect a list of lists of allowed types for use in detecting
|
|
|
|
|
when a catch may be transformed into a catch-all. */
|
|
|
|
|
if (info)
|
|
|
|
|
info->types_allowed = tree_cons (NULL_TREE,
|
|
|
|
|
region->u.allowed.type_list,
|
|
|
|
|
info->types_allowed);
|
|
|
|
|
|
|
|
|
|
/* If we have definitive information about the type hierarchy,
|
|
|
|
|
then we can tell if the thrown type will pass through the
|
|
|
|
|
filter. */
|
|
|
|
|
if (type_thrown && lang_eh_type_covers)
|
|
|
|
|
{
|
|
|
|
|
if (check_handled (region->u.allowed.type_list, type_thrown))
|
|
|
|
|
return RNL_NOT_CAUGHT;
|
|
|
|
|
else
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
add_reachable_handler (info, region, region);
|
|
|
|
|
return RNL_CAUGHT;
|
|
|
|
|
}
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
add_reachable_handler (info, region, region);
|
|
|
|
|
return RNL_MAYBE_CAUGHT;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
case ERT_CATCH:
|
|
|
|
|
/* Catch regions are handled by their controling try region. */
|
|
|
|
|
return RNL_NOT_CAUGHT;
|
|
|
|
|
|
|
|
|
|
case ERT_MUST_NOT_THROW:
|
|
|
|
|
/* Here we end our search, since no exceptions may propagate.
|
|
|
|
|
If we've touched down at some landing pad previous, then the
|
|
|
|
|
explicit function call we generated may be used. Otherwise
|
|
|
|
|
the call is made by the runtime. */
|
|
|
|
|
if (info && info->handlers)
|
|
|
|
|
{
|
|
|
|
|
add_reachable_handler (info, region, region);
|
|
|
|
|
return RNL_CAUGHT;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
else
|
|
|
|
|
return RNL_BLOCKED;
|
|
|
|
|
|
|
|
|
|
case ERT_THROW:
|
|
|
|
|
case ERT_FIXUP:
|
|
|
|
|
case ERT_UNKNOWN:
|
|
|
|
|
/* Shouldn't see these here. */
|
|
|
|
|
break;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
abort ();
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Retrieve a list of labels of exception handlers which can be
|
|
|
|
|
reached by a given insn. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx
|
|
|
|
|
reachable_handlers (insn)
|
|
|
|
|
rtx insn;
|
|
|
|
|
{
|
|
|
|
|
struct reachable_info info;
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
tree type_thrown;
|
|
|
|
|
int region_number;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (GET_CODE (insn) == JUMP_INSN
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) == RESX)
|
|
|
|
|
region_number = XINT (PATTERN (insn), 0);
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
|
|
|
|
|
if (!note || INTVAL (XEXP (note, 0)) <= 0)
|
|
|
|
|
return NULL;
|
|
|
|
|
region_number = INTVAL (XEXP (note, 0));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
memset (&info, 0, sizeof (info));
|
|
|
|
|
|
|
|
|
|
region = cfun->eh->region_array[region_number];
|
|
|
|
|
|
|
|
|
|
type_thrown = NULL_TREE;
|
|
|
|
|
if (GET_CODE (insn) == JUMP_INSN
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) == RESX)
|
|
|
|
|
{
|
|
|
|
|
/* A RESX leaves a region instead of entering it. Thus the
|
|
|
|
|
region itself may have been deleted out from under us. */
|
|
|
|
|
if (region == NULL)
|
|
|
|
|
return NULL;
|
|
|
|
|
region = region->outer;
|
|
|
|
|
}
|
|
|
|
|
else if (region->type == ERT_THROW)
|
|
|
|
|
{
|
|
|
|
|
type_thrown = region->u.throw.type;
|
|
|
|
|
region = region->outer;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (; region; region = region->outer)
|
|
|
|
|
if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
return info.handlers;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Determine if the given INSN can throw an exception that is caught
|
|
|
|
|
within the function. */
|
|
|
|
|
|
|
|
|
|
bool
|
|
|
|
|
can_throw_internal (insn)
|
|
|
|
|
rtx insn;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *region;
|
|
|
|
|
tree type_thrown;
|
|
|
|
|
rtx note;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! INSN_P (insn))
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
if (GET_CODE (insn) == INSN
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
|
|
|
|
|
insn = XVECEXP (PATTERN (insn), 0, 0);
|
|
|
|
|
|
|
|
|
|
if (GET_CODE (insn) == CALL_INSN
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
|
|
|
|
{
|
|
|
|
|
int i;
|
|
|
|
|
for (i = 0; i < 3; ++i)
|
|
|
|
|
{
|
|
|
|
|
rtx sub = XEXP (PATTERN (insn), i);
|
|
|
|
|
for (; sub ; sub = NEXT_INSN (sub))
|
|
|
|
|
if (can_throw_internal (sub))
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Every insn that might throw has an EH_REGION note. */
|
|
|
|
|
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
|
|
|
|
|
if (!note || INTVAL (XEXP (note, 0)) <= 0)
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
|
|
|
|
|
|
|
|
|
|
type_thrown = NULL_TREE;
|
|
|
|
|
if (region->type == ERT_THROW)
|
|
|
|
|
{
|
|
|
|
|
type_thrown = region->u.throw.type;
|
|
|
|
|
region = region->outer;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* If this exception is ignored by each and every containing region,
|
|
|
|
|
then control passes straight out. The runtime may handle some
|
|
|
|
|
regions, which also do not require processing internally. */
|
|
|
|
|
for (; region; region = region->outer)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
enum reachable_code how = reachable_next_level (region, type_thrown, 0);
|
|
|
|
|
if (how == RNL_BLOCKED)
|
|
|
|
|
return false;
|
|
|
|
|
if (how != RNL_NOT_CAUGHT)
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Determine if the given INSN can throw an exception that is
|
|
|
|
|
visible outside the function. */
|
|
|
|
|
|
|
|
|
|
bool
|
|
|
|
|
can_throw_external (insn)
|
|
|
|
|
rtx insn;
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
tree type_thrown;
|
|
|
|
|
rtx note;
|
|
|
|
|
|
|
|
|
|
if (! INSN_P (insn))
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
if (GET_CODE (insn) == INSN
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
|
|
|
|
|
insn = XVECEXP (PATTERN (insn), 0, 0);
|
|
|
|
|
|
|
|
|
|
if (GET_CODE (insn) == CALL_INSN
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
|
|
|
|
|
{
|
|
|
|
|
int i;
|
|
|
|
|
for (i = 0; i < 3; ++i)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx sub = XEXP (PATTERN (insn), i);
|
|
|
|
|
for (; sub ; sub = NEXT_INSN (sub))
|
|
|
|
|
if (can_throw_external (sub))
|
|
|
|
|
return true;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return false;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
|
|
|
|
|
if (!note)
|
|
|
|
|
{
|
|
|
|
|
/* Calls (and trapping insns) without notes are outside any
|
|
|
|
|
exception handling region in this function. We have to
|
|
|
|
|
assume it might throw. Given that the front end and middle
|
|
|
|
|
ends mark known NOTHROW functions, this isn't so wildly
|
|
|
|
|
inaccurate. */
|
|
|
|
|
return (GET_CODE (insn) == CALL_INSN
|
|
|
|
|
|| (flag_non_call_exceptions
|
|
|
|
|
&& may_trap_p (PATTERN (insn))));
|
|
|
|
|
}
|
|
|
|
|
if (INTVAL (XEXP (note, 0)) <= 0)
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
|
|
|
|
|
|
|
|
|
|
type_thrown = NULL_TREE;
|
|
|
|
|
if (region->type == ERT_THROW)
|
|
|
|
|
{
|
|
|
|
|
type_thrown = region->u.throw.type;
|
|
|
|
|
region = region->outer;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* If the exception is caught or blocked by any containing region,
|
|
|
|
|
then it is not seen by any calling function. */
|
|
|
|
|
for (; region ; region = region->outer)
|
|
|
|
|
if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* True if nothing in this function can throw outside this function. */
|
|
|
|
|
|
|
|
|
|
bool
|
|
|
|
|
nothrow_function_p ()
|
|
|
|
|
{
|
|
|
|
|
rtx insn;
|
|
|
|
|
|
|
|
|
|
if (! flag_exceptions)
|
|
|
|
|
return true;
|
|
|
|
|
|
|
|
|
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
|
|
|
|
if (can_throw_external (insn))
|
|
|
|
|
return false;
|
|
|
|
|
for (insn = current_function_epilogue_delay_list; insn;
|
|
|
|
|
insn = XEXP (insn, 1))
|
|
|
|
|
if (can_throw_external (insn))
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
return true;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Various hooks for unwind library. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
|
|
|
|
/* Do any necessary initialization to access arbitrary stack frames.
|
|
|
|
|
On the SPARC, this means flushing the register windows. */
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
expand_builtin_unwind_init ()
|
|
|
|
|
{
|
|
|
|
|
/* Set this so all the registers get saved in our frame; we need to be
|
2002-02-01 18:16:02 +00:00
|
|
|
|
able to copy the saved values for any registers from frames we unwind. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
current_function_has_nonlocal_label = 1;
|
|
|
|
|
|
|
|
|
|
#ifdef SETUP_FRAME_ADDRESSES
|
|
|
|
|
SETUP_FRAME_ADDRESSES ();
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx
|
|
|
|
|
expand_builtin_eh_return_data_regno (arglist)
|
|
|
|
|
tree arglist;
|
|
|
|
|
{
|
|
|
|
|
tree which = TREE_VALUE (arglist);
|
|
|
|
|
unsigned HOST_WIDE_INT iwhich;
|
|
|
|
|
|
|
|
|
|
if (TREE_CODE (which) != INTEGER_CST)
|
|
|
|
|
{
|
|
|
|
|
error ("argument of `__builtin_eh_return_regno' must be constant");
|
|
|
|
|
return constm1_rtx;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
iwhich = tree_low_cst (which, 1);
|
|
|
|
|
iwhich = EH_RETURN_DATA_REGNO (iwhich);
|
|
|
|
|
if (iwhich == INVALID_REGNUM)
|
|
|
|
|
return constm1_rtx;
|
|
|
|
|
|
|
|
|
|
#ifdef DWARF_FRAME_REGNUM
|
|
|
|
|
iwhich = DWARF_FRAME_REGNUM (iwhich);
|
|
|
|
|
#else
|
|
|
|
|
iwhich = DBX_REGISTER_NUMBER (iwhich);
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
return GEN_INT (iwhich);
|
|
|
|
|
}
|
|
|
|
|
|
1999-08-26 09:30:50 +00:00
|
|
|
|
/* Given a value extracted from the return address register or stack slot,
|
|
|
|
|
return the actual address encoded in that value. */
|
|
|
|
|
|
|
|
|
|
rtx
|
|
|
|
|
expand_builtin_extract_return_addr (addr_tree)
|
|
|
|
|
tree addr_tree;
|
|
|
|
|
{
|
|
|
|
|
rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
/* First mask out any unwanted bits. */
|
|
|
|
|
#ifdef MASK_RETURN_ADDR
|
2002-05-09 20:02:13 +00:00
|
|
|
|
expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
/* Then adjust to find the real return address. */
|
|
|
|
|
#if defined (RETURN_ADDR_OFFSET)
|
|
|
|
|
addr = plus_constant (addr, RETURN_ADDR_OFFSET);
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
return addr;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Given an actual address in addr_tree, do any necessary encoding
|
|
|
|
|
and return the value to be stored in the return address register or
|
|
|
|
|
stack slot so the epilogue will return to that address. */
|
|
|
|
|
|
|
|
|
|
rtx
|
|
|
|
|
expand_builtin_frob_return_addr (addr_tree)
|
|
|
|
|
tree addr_tree;
|
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
|
|
|
|
|
|
|
|
|
|
#ifdef POINTERS_EXTEND_UNSIGNED
|
|
|
|
|
if (GET_MODE (addr) != Pmode)
|
|
|
|
|
addr = convert_memory_address (Pmode, addr);
|
|
|
|
|
#endif
|
|
|
|
|
|
1999-08-26 09:30:50 +00:00
|
|
|
|
#ifdef RETURN_ADDR_OFFSET
|
2002-02-01 18:16:02 +00:00
|
|
|
|
addr = force_reg (Pmode, addr);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
|
|
|
|
|
#endif
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
1999-08-26 09:30:50 +00:00
|
|
|
|
return addr;
|
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Set up the epilogue with the magic bits we'll need to return to the
|
|
|
|
|
exception handler. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
expand_builtin_eh_return (stackadj_tree, handler_tree)
|
|
|
|
|
tree stackadj_tree, handler_tree;
|
|
|
|
|
{
|
|
|
|
|
rtx stackadj, handler;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
|
|
|
|
|
handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#ifdef POINTERS_EXTEND_UNSIGNED
|
|
|
|
|
if (GET_MODE (stackadj) != Pmode)
|
|
|
|
|
stackadj = convert_memory_address (Pmode, stackadj);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (GET_MODE (handler) != Pmode)
|
|
|
|
|
handler = convert_memory_address (Pmode, handler);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
#endif
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (! cfun->eh->ehr_label)
|
|
|
|
|
{
|
|
|
|
|
cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
|
|
|
|
|
cfun->eh->ehr_handler = copy_to_reg (handler);
|
|
|
|
|
cfun->eh->ehr_label = gen_label_rtx ();
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
else
|
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (stackadj != cfun->eh->ehr_stackadj)
|
|
|
|
|
emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
|
|
|
|
|
if (handler != cfun->eh->ehr_handler)
|
|
|
|
|
emit_move_insn (cfun->eh->ehr_handler, handler);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
emit_jump (cfun->eh->ehr_label);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void
|
|
|
|
|
expand_eh_return ()
|
|
|
|
|
{
|
|
|
|
|
rtx sa, ra, around_label;
|
|
|
|
|
|
|
|
|
|
if (! cfun->eh->ehr_label)
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
sa = EH_RETURN_STACKADJ_RTX;
|
|
|
|
|
if (! sa)
|
|
|
|
|
{
|
|
|
|
|
error ("__builtin_eh_return not supported on this target");
|
|
|
|
|
return;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
current_function_calls_eh_return = 1;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
around_label = gen_label_rtx ();
|
|
|
|
|
emit_move_insn (sa, const0_rtx);
|
|
|
|
|
emit_jump (around_label);
|
|
|
|
|
|
|
|
|
|
emit_label (cfun->eh->ehr_label);
|
|
|
|
|
clobber_return_register ();
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_eh_return
|
|
|
|
|
if (HAVE_eh_return)
|
|
|
|
|
emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
|
|
|
|
|
else
|
|
|
|
|
#endif
|
|
|
|
|
{
|
|
|
|
|
ra = EH_RETURN_HANDLER_RTX;
|
|
|
|
|
if (! ra)
|
|
|
|
|
{
|
|
|
|
|
error ("__builtin_eh_return not supported on this target");
|
|
|
|
|
ra = gen_reg_rtx (Pmode);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
emit_move_insn (sa, cfun->eh->ehr_stackadj);
|
|
|
|
|
emit_move_insn (ra, cfun->eh->ehr_handler);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
emit_label (around_label);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
/* In the following functions, we represent entries in the action table
|
|
|
|
|
as 1-based indices. Special cases are:
|
|
|
|
|
|
|
|
|
|
0: null action record, non-null landing pad; implies cleanups
|
|
|
|
|
-1: null action record, null landing pad; implies no action
|
|
|
|
|
-2: no call-site entry; implies must_not_throw
|
|
|
|
|
-3: we have yet to process outer regions
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
Further, no special cases apply to the "next" field of the record.
|
|
|
|
|
For next, 0 means end of list. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct action_record
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int offset;
|
|
|
|
|
int filter;
|
|
|
|
|
int next;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
static int
|
|
|
|
|
action_record_eq (pentry, pdata)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
const PTR pdata;
|
|
|
|
|
{
|
|
|
|
|
const struct action_record *entry = (const struct action_record *) pentry;
|
|
|
|
|
const struct action_record *data = (const struct action_record *) pdata;
|
|
|
|
|
return entry->filter == data->filter && entry->next == data->next;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static hashval_t
|
|
|
|
|
action_record_hash (pentry)
|
|
|
|
|
const PTR pentry;
|
|
|
|
|
{
|
|
|
|
|
const struct action_record *entry = (const struct action_record *) pentry;
|
|
|
|
|
return entry->next * 1009 + entry->filter;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static int
|
|
|
|
|
add_action_record (ar_hash, filter, next)
|
|
|
|
|
htab_t ar_hash;
|
|
|
|
|
int filter, next;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct action_record **slot, *new, tmp;
|
|
|
|
|
|
|
|
|
|
tmp.filter = filter;
|
|
|
|
|
tmp.next = next;
|
|
|
|
|
slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if ((new = *slot) == NULL)
|
|
|
|
|
{
|
|
|
|
|
new = (struct action_record *) xmalloc (sizeof (*new));
|
|
|
|
|
new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
|
|
|
|
|
new->filter = filter;
|
|
|
|
|
new->next = next;
|
|
|
|
|
*slot = new;
|
|
|
|
|
|
|
|
|
|
/* The filter value goes in untouched. The link to the next
|
|
|
|
|
record is a "self-relative" byte offset, or zero to indicate
|
|
|
|
|
that there is no next record. So convert the absolute 1 based
|
|
|
|
|
indices we've been carrying around into a displacement. */
|
|
|
|
|
|
|
|
|
|
push_sleb128 (&cfun->eh->action_record_data, filter);
|
|
|
|
|
if (next)
|
|
|
|
|
next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
|
|
|
|
|
push_sleb128 (&cfun->eh->action_record_data, next);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return new->offset;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static int
|
|
|
|
|
collect_one_action_chain (ar_hash, region)
|
|
|
|
|
htab_t ar_hash;
|
|
|
|
|
struct eh_region *region;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct eh_region *c;
|
|
|
|
|
int next;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* If we've reached the top of the region chain, then we have
|
|
|
|
|
no actions, and require no landing pad. */
|
|
|
|
|
if (region == NULL)
|
|
|
|
|
return -1;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
switch (region->type)
|
|
|
|
|
{
|
|
|
|
|
case ERT_CLEANUP:
|
|
|
|
|
/* A cleanup adds a zero filter to the beginning of the chain, but
|
|
|
|
|
there are special cases to look out for. If there are *only*
|
|
|
|
|
cleanups along a path, then it compresses to a zero action.
|
|
|
|
|
Further, if there are multiple cleanups along a path, we only
|
|
|
|
|
need to represent one of them, as that is enough to trigger
|
|
|
|
|
entry to the landing pad at runtime. */
|
|
|
|
|
next = collect_one_action_chain (ar_hash, region->outer);
|
|
|
|
|
if (next <= 0)
|
|
|
|
|
return 0;
|
|
|
|
|
for (c = region->outer; c ; c = c->outer)
|
|
|
|
|
if (c->type == ERT_CLEANUP)
|
|
|
|
|
return next;
|
|
|
|
|
return add_action_record (ar_hash, 0, next);
|
|
|
|
|
|
|
|
|
|
case ERT_TRY:
|
|
|
|
|
/* Process the associated catch regions in reverse order.
|
|
|
|
|
If there's a catch-all handler, then we don't need to
|
|
|
|
|
search outer regions. Use a magic -3 value to record
|
|
|
|
|
that we haven't done the outer search. */
|
|
|
|
|
next = -3;
|
|
|
|
|
for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
|
|
|
|
|
{
|
|
|
|
|
if (c->u.catch.type_list == NULL)
|
|
|
|
|
{
|
|
|
|
|
/* Retrieve the filter from the head of the filter list
|
|
|
|
|
where we have stored it (see assign_filter_values). */
|
|
|
|
|
int filter
|
|
|
|
|
= TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
next = add_action_record (ar_hash, filter, 0);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
/* Once the outer search is done, trigger an action record for
|
|
|
|
|
each filter we have. */
|
|
|
|
|
tree flt_node;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (next == -3)
|
|
|
|
|
{
|
|
|
|
|
next = collect_one_action_chain (ar_hash, region->outer);
|
|
|
|
|
|
|
|
|
|
/* If there is no next action, terminate the chain. */
|
|
|
|
|
if (next == -1)
|
|
|
|
|
next = 0;
|
|
|
|
|
/* If all outer actions are cleanups or must_not_throw,
|
|
|
|
|
we'll have no action record for it, since we had wanted
|
|
|
|
|
to encode these states in the call-site record directly.
|
|
|
|
|
Add a cleanup action to the chain to catch these. */
|
|
|
|
|
else if (next <= 0)
|
|
|
|
|
next = add_action_record (ar_hash, 0, 0);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
flt_node = c->u.catch.filter_list;
|
|
|
|
|
for (; flt_node; flt_node = TREE_CHAIN (flt_node))
|
|
|
|
|
{
|
|
|
|
|
int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
|
|
|
|
|
next = add_action_record (ar_hash, filter, next);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return next;
|
|
|
|
|
|
|
|
|
|
case ERT_ALLOWED_EXCEPTIONS:
|
|
|
|
|
/* An exception specification adds its filter to the
|
|
|
|
|
beginning of the chain. */
|
|
|
|
|
next = collect_one_action_chain (ar_hash, region->outer);
|
|
|
|
|
return add_action_record (ar_hash, region->u.allowed.filter,
|
|
|
|
|
next < 0 ? 0 : next);
|
|
|
|
|
|
|
|
|
|
case ERT_MUST_NOT_THROW:
|
|
|
|
|
/* A must-not-throw region with no inner handlers or cleanups
|
|
|
|
|
requires no call-site entry. Note that this differs from
|
|
|
|
|
the no handler or cleanup case in that we do require an lsda
|
|
|
|
|
to be generated. Return a magic -2 value to record this. */
|
|
|
|
|
return -2;
|
|
|
|
|
|
|
|
|
|
case ERT_CATCH:
|
|
|
|
|
case ERT_THROW:
|
|
|
|
|
/* CATCH regions are handled in TRY above. THROW regions are
|
|
|
|
|
for optimization information only and produce no output. */
|
|
|
|
|
return collect_one_action_chain (ar_hash, region->outer);
|
|
|
|
|
|
|
|
|
|
default:
|
|
|
|
|
abort ();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int
|
|
|
|
|
add_call_site (landing_pad, action)
|
|
|
|
|
rtx landing_pad;
|
|
|
|
|
int action;
|
|
|
|
|
{
|
|
|
|
|
struct call_site_record *data = cfun->eh->call_site_data;
|
|
|
|
|
int used = cfun->eh->call_site_data_used;
|
|
|
|
|
int size = cfun->eh->call_site_data_size;
|
|
|
|
|
|
|
|
|
|
if (used >= size)
|
1999-10-16 06:09:09 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
size = (size ? size * 2 : 64);
|
|
|
|
|
data = (struct call_site_record *)
|
|
|
|
|
xrealloc (data, sizeof (*data) * size);
|
|
|
|
|
cfun->eh->call_site_data = data;
|
|
|
|
|
cfun->eh->call_site_data_size = size;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
data[used].landing_pad = landing_pad;
|
|
|
|
|
data[used].action = action;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
cfun->eh->call_site_data_used = used + 1;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
return used + call_site_base;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
|
|
|
|
|
The new note numbers will not refer to region numbers, but
|
|
|
|
|
instead to call site entries. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
convert_to_eh_region_ranges ()
|
|
|
|
|
{
|
|
|
|
|
rtx insn, iter, note;
|
|
|
|
|
htab_t ar_hash;
|
|
|
|
|
int last_action = -3;
|
|
|
|
|
rtx last_action_insn = NULL_RTX;
|
|
|
|
|
rtx last_landing_pad = NULL_RTX;
|
|
|
|
|
rtx first_no_action_insn = NULL_RTX;
|
|
|
|
|
int call_site = 0;
|
|
|
|
|
|
|
|
|
|
if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
|
|
|
|
|
|
|
|
|
|
ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
|
|
|
|
|
|
|
|
|
|
for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
|
|
|
|
|
if (INSN_P (iter))
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *region;
|
|
|
|
|
int this_action;
|
|
|
|
|
rtx this_landing_pad;
|
|
|
|
|
|
|
|
|
|
insn = iter;
|
|
|
|
|
if (GET_CODE (insn) == INSN
|
|
|
|
|
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
|
|
|
|
|
insn = XVECEXP (PATTERN (insn), 0, 0);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
|
|
|
|
|
if (!note)
|
|
|
|
|
{
|
|
|
|
|
if (! (GET_CODE (insn) == CALL_INSN
|
|
|
|
|
|| (flag_non_call_exceptions
|
|
|
|
|
&& may_trap_p (PATTERN (insn)))))
|
|
|
|
|
continue;
|
|
|
|
|
this_action = -1;
|
|
|
|
|
region = NULL;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
if (INTVAL (XEXP (note, 0)) <= 0)
|
|
|
|
|
continue;
|
|
|
|
|
region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
|
|
|
|
|
this_action = collect_one_action_chain (ar_hash, region);
|
|
|
|
|
}
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Existence of catch handlers, or must-not-throw regions
|
|
|
|
|
implies that an lsda is needed (even if empty). */
|
|
|
|
|
if (this_action != -1)
|
|
|
|
|
cfun->uses_eh_lsda = 1;
|
|
|
|
|
|
|
|
|
|
/* Delay creation of region notes for no-action regions
|
|
|
|
|
until we're sure that an lsda will be required. */
|
|
|
|
|
else if (last_action == -3)
|
|
|
|
|
{
|
|
|
|
|
first_no_action_insn = iter;
|
|
|
|
|
last_action = -1;
|
|
|
|
|
}
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Cleanups and handlers may share action chains but not
|
|
|
|
|
landing pads. Collect the landing pad for this region. */
|
|
|
|
|
if (this_action >= 0)
|
|
|
|
|
{
|
|
|
|
|
struct eh_region *o;
|
|
|
|
|
for (o = region; ! o->landing_pad ; o = o->outer)
|
|
|
|
|
continue;
|
|
|
|
|
this_landing_pad = o->landing_pad;
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
this_landing_pad = NULL_RTX;
|
1999-10-16 06:09:09 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Differing actions or landing pads implies a change in call-site
|
|
|
|
|
info, which implies some EH_REGION note should be emitted. */
|
|
|
|
|
if (last_action != this_action
|
|
|
|
|
|| last_landing_pad != this_landing_pad)
|
|
|
|
|
{
|
|
|
|
|
/* If we'd not seen a previous action (-3) or the previous
|
|
|
|
|
action was must-not-throw (-2), then we do not need an
|
|
|
|
|
end note. */
|
|
|
|
|
if (last_action >= -1)
|
|
|
|
|
{
|
|
|
|
|
/* If we delayed the creation of the begin, do it now. */
|
|
|
|
|
if (first_no_action_insn)
|
|
|
|
|
{
|
|
|
|
|
call_site = add_call_site (NULL_RTX, 0);
|
|
|
|
|
note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
|
|
|
|
|
first_no_action_insn);
|
|
|
|
|
NOTE_EH_HANDLER (note) = call_site;
|
|
|
|
|
first_no_action_insn = NULL_RTX;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
note = emit_note_after (NOTE_INSN_EH_REGION_END,
|
|
|
|
|
last_action_insn);
|
|
|
|
|
NOTE_EH_HANDLER (note) = call_site;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* If the new action is must-not-throw, then no region notes
|
|
|
|
|
are created. */
|
|
|
|
|
if (this_action >= -1)
|
|
|
|
|
{
|
|
|
|
|
call_site = add_call_site (this_landing_pad,
|
|
|
|
|
this_action < 0 ? 0 : this_action);
|
|
|
|
|
note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
|
|
|
|
|
NOTE_EH_HANDLER (note) = call_site;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
last_action = this_action;
|
|
|
|
|
last_landing_pad = this_landing_pad;
|
|
|
|
|
}
|
|
|
|
|
last_action_insn = iter;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
if (last_action >= -1 && ! first_no_action_insn)
|
|
|
|
|
{
|
|
|
|
|
note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
|
|
|
|
|
NOTE_EH_HANDLER (note) = call_site;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
htab_delete (ar_hash);
|
1999-10-16 06:09:09 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
push_uleb128 (data_area, value)
|
|
|
|
|
varray_type *data_area;
|
|
|
|
|
unsigned int value;
|
|
|
|
|
{
|
|
|
|
|
do
|
|
|
|
|
{
|
|
|
|
|
unsigned char byte = value & 0x7f;
|
|
|
|
|
value >>= 7;
|
|
|
|
|
if (value)
|
|
|
|
|
byte |= 0x80;
|
|
|
|
|
VARRAY_PUSH_UCHAR (*data_area, byte);
|
|
|
|
|
}
|
|
|
|
|
while (value);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
push_sleb128 (data_area, value)
|
|
|
|
|
varray_type *data_area;
|
|
|
|
|
int value;
|
|
|
|
|
{
|
|
|
|
|
unsigned char byte;
|
|
|
|
|
int more;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
do
|
|
|
|
|
{
|
|
|
|
|
byte = value & 0x7f;
|
|
|
|
|
value >>= 7;
|
|
|
|
|
more = ! ((value == 0 && (byte & 0x40) == 0)
|
|
|
|
|
|| (value == -1 && (byte & 0x40) != 0));
|
|
|
|
|
if (more)
|
|
|
|
|
byte |= 0x80;
|
|
|
|
|
VARRAY_PUSH_UCHAR (*data_area, byte);
|
|
|
|
|
}
|
|
|
|
|
while (more);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
#ifndef HAVE_AS_LEB128
|
|
|
|
|
static int
|
|
|
|
|
dw2_size_of_call_site_table ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int n = cfun->eh->call_site_data_used;
|
|
|
|
|
int size = n * (4 + 4 + 4);
|
|
|
|
|
int i;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = 0; i < n; ++i)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct call_site_record *cs = &cfun->eh->call_site_data[i];
|
|
|
|
|
size += size_of_uleb128 (cs->action);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
return size;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static int
|
|
|
|
|
sjlj_size_of_call_site_table ()
|
|
|
|
|
{
|
|
|
|
|
int n = cfun->eh->call_site_data_used;
|
|
|
|
|
int size = 0;
|
|
|
|
|
int i;
|
|
|
|
|
|
|
|
|
|
for (i = 0; i < n; ++i)
|
|
|
|
|
{
|
|
|
|
|
struct call_site_record *cs = &cfun->eh->call_site_data[i];
|
|
|
|
|
size += size_of_uleb128 (INTVAL (cs->landing_pad));
|
|
|
|
|
size += size_of_uleb128 (cs->action);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return size;
|
|
|
|
|
}
|
|
|
|
|
#endif
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
dw2_output_call_site_table ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
const char *const function_start_lab
|
|
|
|
|
= IDENTIFIER_POINTER (current_function_func_begin_label);
|
|
|
|
|
int n = cfun->eh->call_site_data_used;
|
|
|
|
|
int i;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
for (i = 0; i < n; ++i)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
struct call_site_record *cs = &cfun->eh->call_site_data[i];
|
|
|
|
|
char reg_start_lab[32];
|
|
|
|
|
char reg_end_lab[32];
|
|
|
|
|
char landing_pad_lab[32];
|
|
|
|
|
|
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
|
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
|
|
|
|
|
|
|
|
|
|
if (cs->landing_pad)
|
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
|
|
|
|
|
CODE_LABEL_NUMBER (cs->landing_pad));
|
|
|
|
|
|
|
|
|
|
/* ??? Perhaps use insn length scaling if the assembler supports
|
|
|
|
|
generic arithmetic. */
|
|
|
|
|
/* ??? Perhaps use attr_length to choose data1 or data2 instead of
|
|
|
|
|
data4 if the function is small enough. */
|
|
|
|
|
#ifdef HAVE_AS_LEB128
|
|
|
|
|
dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
|
|
|
|
|
"region %d start", i);
|
|
|
|
|
dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
|
|
|
|
|
"length");
|
|
|
|
|
if (cs->landing_pad)
|
|
|
|
|
dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
|
|
|
|
|
"landing pad");
|
|
|
|
|
else
|
|
|
|
|
dw2_asm_output_data_uleb128 (0, "landing pad");
|
|
|
|
|
#else
|
|
|
|
|
dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
|
|
|
|
|
"region %d start", i);
|
|
|
|
|
dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
|
|
|
|
|
if (cs->landing_pad)
|
|
|
|
|
dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
|
|
|
|
|
"landing pad");
|
|
|
|
|
else
|
|
|
|
|
dw2_asm_output_data (4, 0, "landing pad");
|
|
|
|
|
#endif
|
|
|
|
|
dw2_asm_output_data_uleb128 (cs->action, "action");
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
2002-02-01 18:16:02 +00:00
|
|
|
|
|
|
|
|
|
call_site_base += n;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
static void
|
|
|
|
|
sjlj_output_call_site_table ()
|
|
|
|
|
{
|
|
|
|
|
int n = cfun->eh->call_site_data_used;
|
|
|
|
|
int i;
|
|
|
|
|
|
|
|
|
|
for (i = 0; i < n; ++i)
|
|
|
|
|
{
|
|
|
|
|
struct call_site_record *cs = &cfun->eh->call_site_data[i];
|
|
|
|
|
|
|
|
|
|
dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
|
|
|
|
|
"region %d landing pad", i);
|
|
|
|
|
dw2_asm_output_data_uleb128 (cs->action, "action");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
call_site_base += n;
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
void
|
|
|
|
|
output_function_exception_table ()
|
1999-08-26 09:30:50 +00:00
|
|
|
|
{
|
2002-02-01 18:16:02 +00:00
|
|
|
|
int tt_format, cs_format, lp_format, i, n;
|
|
|
|
|
#ifdef HAVE_AS_LEB128
|
|
|
|
|
char ttype_label[32];
|
|
|
|
|
char cs_after_size_label[32];
|
|
|
|
|
char cs_end_label[32];
|
|
|
|
|
#else
|
|
|
|
|
int call_site_len;
|
|
|
|
|
#endif
|
|
|
|
|
int have_tt_data;
|
|
|
|
|
int funcdef_number;
|
|
|
|
|
int tt_format_size = 0;
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Not all functions need anything. */
|
|
|
|
|
if (! cfun->uses_eh_lsda)
|
1999-08-26 09:30:50 +00:00
|
|
|
|
return;
|
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
funcdef_number = (USING_SJLJ_EXCEPTIONS
|
|
|
|
|
? sjlj_funcdef_number
|
|
|
|
|
: current_funcdef_number);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#ifdef IA64_UNWIND_INFO
|
|
|
|
|
fputs ("\t.personality\t", asm_out_file);
|
|
|
|
|
output_addr_const (asm_out_file, eh_personality_libfunc);
|
|
|
|
|
fputs ("\n\t.handlerdata\n", asm_out_file);
|
|
|
|
|
/* Note that varasm still thinks we're in the function's code section.
|
|
|
|
|
The ".endp" directive that will immediately follow will take us back. */
|
|
|
|
|
#else
|
|
|
|
|
(*targetm.asm_out.exception_section) ();
|
|
|
|
|
#endif
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
|
|
|
|
|
|| VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Indicate the format of the @TType entries. */
|
|
|
|
|
if (! have_tt_data)
|
|
|
|
|
tt_format = DW_EH_PE_omit;
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
|
|
|
|
|
#ifdef HAVE_AS_LEB128
|
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
|
|
|
|
|
#endif
|
|
|
|
|
tt_format_size = size_of_encoded_value (tt_format);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
assemble_align (tt_format_size * BITS_PER_UNIT);
|
|
|
|
|
}
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* The LSDA header. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* Indicate the format of the landing pad start pointer. An omitted
|
|
|
|
|
field implies @LPStart == @Start. */
|
|
|
|
|
/* Currently we always put @LPStart == @Start. This field would
|
|
|
|
|
be most useful in moving the landing pads completely out of
|
|
|
|
|
line to another section, but it could also be used to minimize
|
|
|
|
|
the size of uleb128 landing pad offsets. */
|
|
|
|
|
lp_format = DW_EH_PE_omit;
|
|
|
|
|
dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
|
|
|
|
|
eh_data_format_name (lp_format));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* @LPStart pointer would go here. */
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
dw2_asm_output_data (1, tt_format, "@TType format (%s)",
|
|
|
|
|
eh_data_format_name (tt_format));
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
#ifndef HAVE_AS_LEB128
|
|
|
|
|
if (USING_SJLJ_EXCEPTIONS)
|
|
|
|
|
call_site_len = sjlj_size_of_call_site_table ();
|
|
|
|
|
else
|
|
|
|
|
call_site_len = dw2_size_of_call_site_table ();
|
|
|
|
|
#endif
|
1999-08-26 09:30:50 +00:00
|
|
|
|
|
2002-02-01 18:16:02 +00:00
|
|
|
|
/* A pc-relative 4-byte displacement to the @TType data. */
|
|
|
|
|
if (have_tt_data)
|
|
|
|
|
{
|
|
|
|
|
#ifdef HAVE_AS_LEB128
|
|
|
|
|
char ttype_after_disp_label[32];
|
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
|
|
|
|
|
funcdef_number);
|
|
|
|
|
dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
|
|
|
|
|
"@TType base offset");
|
|
|
|
|
ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
|
|
|
|
|
#else
|
|
|
|
|
/* Ug. Alignment queers things. */
|
|
|
|
|
unsigned int before_disp, after_disp, last_disp, disp;
|
|
|
|
|
|
|
|
|
|
before_disp = 1 + 1;
|
|
|
|
|
after_disp = (1 + size_of_uleb128 (call_site_len)
|
|
|
|
|
+ call_site_len
|
|
|
|
|
+ VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
|
|
|
|
|
+ (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
|
|
|
|
|
* tt_format_size));
|
|
|
|
|
|
|
|
|
|
disp = after_disp;
|
|
|
|
|
do
|
|
|
|
|
{
|
|
|
|
|
unsigned int disp_size, pad;
|
|
|
|
|
|
|
|
|
|
last_disp = disp;
|
|
|
|
|
disp_size = size_of_uleb128 (disp);
|
|
|
|
|
pad = before_disp + disp_size + after_disp;
|
|
|
|
|
if (pad % tt_format_size)
|
|
|
|
|
pad = tt_format_size - (pad % tt_format_size);
|
|
|
|
|
else
|
|
|
|
|
pad = 0;
|
|
|
|
|
disp = after_disp + pad;
|
|
|
|
|
}
|
|
|
|
|
while (disp != last_disp);
|
|
|
|
|
|
|
|
|
|
dw2_asm_output_data_uleb128 (disp, "@TType base offset");
|
|
|
|
|
#endif
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Indicate the format of the call-site offsets. */
|
|
|
|
|
#ifdef HAVE_AS_LEB128
|
|
|
|
|
cs_format = DW_EH_PE_uleb128;
|
|
|
|
|
#else
|
|
|
|
|
cs_format = DW_EH_PE_udata4;
|
|
|
|
|
#endif
|
|
|
|
|
dw2_asm_output_data (1, cs_format, "call-site format (%s)",
|
|
|
|
|
eh_data_format_name (cs_format));
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_AS_LEB128
|
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
|
|
|
|
|
funcdef_number);
|
|
|
|
|
ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
|
|
|
|
|
funcdef_number);
|
|
|
|
|
dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
|
|
|
|
|
"Call-site table length");
|
|
|
|
|
ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
|
|
|
|
|
if (USING_SJLJ_EXCEPTIONS)
|
|
|
|
|
sjlj_output_call_site_table ();
|
|
|
|
|
else
|
|
|
|
|
dw2_output_call_site_table ();
|
|
|
|
|
ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
|
|
|
|
|
#else
|
|
|
|
|
dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
|
|
|
|
|
if (USING_SJLJ_EXCEPTIONS)
|
|
|
|
|
sjlj_output_call_site_table ();
|
|
|
|
|
else
|
|
|
|
|
dw2_output_call_site_table ();
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
/* ??? Decode and interpret the data for flag_debug_asm. */
|
|
|
|
|
n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
|
|
|
|
|
for (i = 0; i < n; ++i)
|
|
|
|
|
dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
|
|
|
|
|
(i ? NULL : "Action record table"));
|
|
|
|
|
|
|
|
|
|
if (have_tt_data)
|
|
|
|
|
assemble_align (tt_format_size * BITS_PER_UNIT);
|
|
|
|
|
|
|
|
|
|
i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
|
|
|
|
|
while (i-- > 0)
|
|
|
|
|
{
|
|
|
|
|
tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
|
|
|
|
|
rtx value;
|
|
|
|
|
|
|
|
|
|
if (type == NULL_TREE)
|
|
|
|
|
type = integer_zero_node;
|
|
|
|
|
else
|
|
|
|
|
type = lookup_type_for_runtime (type);
|
|
|
|
|
|
|
|
|
|
value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
|
|
|
|
|
if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
|
|
|
|
|
assemble_integer (value, tt_format_size,
|
|
|
|
|
tt_format_size * BITS_PER_UNIT, 1);
|
|
|
|
|
else
|
|
|
|
|
dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_AS_LEB128
|
|
|
|
|
if (have_tt_data)
|
|
|
|
|
ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
|
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
|
/* ??? Decode and interpret the data for flag_debug_asm. */
|
|
|
|
|
n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
|
|
|
|
|
for (i = 0; i < n; ++i)
|
|
|
|
|
dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
|
|
|
|
|
(i ? NULL : "Exception specification table"));
|
|
|
|
|
|
|
|
|
|
function_section (current_function_decl);
|
|
|
|
|
|
|
|
|
|
if (USING_SJLJ_EXCEPTIONS)
|
|
|
|
|
sjlj_funcdef_number += 1;
|
|
|
|
|
}
|